Remove bad output

This commit is contained in:
Eugen Betke 2018-10-23 10:44:54 +02:00
parent e3090c5c23
commit b02eb4099c
22 changed files with 2 additions and 1440 deletions

View File

@ -1,34 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 1 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:1#PPN:1#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24654 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 1 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 1 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
+ tee -a ./output/COUNT:1#NN:1#PPN:1#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24662 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,34 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 1 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:1#PPN:1#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24625 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 1 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ tee -a ./output/COUNT:1#NN:1#PPN:1#API:POSIX#T:102400.txt
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 1 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread1/file -r
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24633 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,40 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:1#PPN:2#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24594 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
+ tee -a ./output/COUNT:1#NN:1#PPN:2#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24605 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,40 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:1#PPN:2#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24555 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread1/file -r
+ tee -a ./output/COUNT:1#NN:1#PPN:2#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24566 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,54 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:1#PPN:4#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
tee: standard outputior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
: Resource temporarily unavailable
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24513 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
tee: write error
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
+ tee -a ./output/COUNT:1#NN:1#PPN:4#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24532 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,52 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:1#PPN:4#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24469 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread1/file -r
+ tee -a ./output/COUNT:1#NN:1#PPN:4#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: [cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24487 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,64 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 6 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:1#PPN:6#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24411 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 6 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 6 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
+ tee -a ./output/COUNT:1#NN:1#PPN:6#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_2]: [cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24440 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,66 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 6 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:1#PPN:6#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24325 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 6 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 6 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread1/file -r
+ tee -a ./output/COUNT:1#NN:1#PPN:6#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
tee: standard outputior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
: Resource temporarily unavailable
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24351 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
tee: write error
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,76 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:1#PPN:8#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24253 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
+ tee -a ./output/COUNT:1#NN:1#PPN:8#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24290 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,76 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:1#PPN:8#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24169 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ tee -a ./output/COUNT:1#NN:1#PPN:8#API:POSIX#T:102400.txt
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread1/file -r
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 24220 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,46 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:2#PPN:1#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 31995 RUNNING AT isc17-c05
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
+ tee -a ./output/COUNT:1#NN:2#PPN:1#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 30373 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,46 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:2#PPN:1#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 30323 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread2/file -r
+ tee -a ./output/COUNT:1#NN:2#PPN:1#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: [cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 31974 RUNNING AT isc17-c05
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,59 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:2#PPN:2#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_3]: ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 30284 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
+ tee -a ./output/COUNT:1#NN:2#PPN:2#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 31909 RUNNING AT isc17-c05
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@isc17-c04] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
[proxy:0:0@isc17-c04] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,59 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:2#PPN:2#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 30245 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread2/file -r
+ tee -a ./output/COUNT:1#NN:2#PPN:2#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: [cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 30260 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:1@isc17-c05] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
[proxy:0:1@isc17-c05] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
[proxy:0:1@isc17-c05] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,85 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:2#PPN:4#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
tee: standard outputior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
: Resource temporarily unavailable
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 30192 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
tee: write error
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
+ tee -a ./output/COUNT:1#NN:2#PPN:4#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 30218 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:1@isc17-c05] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
[proxy:0:1@isc17-c05] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
[proxy:0:1@isc17-c05] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,91 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:2#PPN:4#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
tee: standard outputior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
: Resource temporarily unavailable
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 31610 RUNNING AT isc17-c05
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
tee: write error
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread2/file -r
+ tee -a ./output/COUNT:1#NN:2#PPN:4#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 31673 RUNNING AT isc17-c05
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@isc17-c04] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
[proxy:0:0@isc17-c04] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,105 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 12 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:2#PPN:6#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
[cli_3]: [cli_9]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_11]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_8]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_10]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 31487 RUNNING AT isc17-c05
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 12 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 12 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
+ tee -a ./output/COUNT:1#NN:2#PPN:6#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
[cli_8]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
[cli_10]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
[cli_11]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
[cli_9]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 31572 RUNNING AT isc17-c05
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@isc17-c04] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
[proxy:0:0@isc17-c04] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,107 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 12 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:2#PPN:6#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_11]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_8]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
[cli_10]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_9]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 29916 RUNNING AT isc17-c04
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 12 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 12 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread2/file -r
+ tee -a ./output/COUNT:1#NN:2#PPN:6#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_11]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
[cli_9]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
[cli_6]: [cli_8]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
[cli_10]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 31445 RUNNING AT isc17-c05
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@isc17-c04] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
[proxy:0:0@isc17-c04] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,128 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:2#PPN:8#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_8]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_10]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
[cli_12]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
[cli_14]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
[cli_9]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
[cli_15]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_11]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
[cli_13]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 31253 RUNNING AT isc17-c05
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
+ tee -a ./output/COUNT:1#NN:2#PPN:8#API:MPIIO#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_8]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
[cli_9]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
[cli_10]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
[cli_11]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
[cli_12]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
[cli_13]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
[cli_14]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
[cli_15]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 31334 RUNNING AT isc17-c05
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1

View File

@ -1,132 +0,0 @@
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
+ tee -a ./output/COUNT:1#NN:2#PPN:8#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_8]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_9]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
[cli_10]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
[cli_11]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
[cli_12]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
[cli_13]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
[cli_14]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
[cli_15]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 31097 RUNNING AT isc17-c05
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread2/file -r
+ tee -a ./output/COUNT:1#NN:2#PPN:8#API:POSIX#T:102400.txt
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
[cli_8]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
[cli_9]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
[cli_10]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
[cli_11]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
[cli_12]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
[cli_13]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
[cli_14]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
[cli_15]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
[cli_1]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
[cli_2]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
[cli_3]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
[cli_4]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
[cli_5]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
[cli_6]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
[cli_7]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
===================================================================================
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
= PID 31211 RUNNING AT isc17-c05
= EXIT CODE: 255
= CLEANING UP REMAINING PROCESSES
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
+ set +x
/esfs/jtacquaviva/ioperf
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1

4
run.sh
View File

@ -41,7 +41,7 @@ API_ARR=( "POSIX" "MPIIO" )
#NN_ARR=( 4 2 1 8 10 16)
NN_ARR=( 2 1 )
PPN_ARR=( 8 6 4 2 1 )
T_ARR=( $((10240*1024)) $((1024*1024)) $((100*1024)) $((16*1024)) )
T_ARR=( $((10*1024*1024)) $((1*1024*1024)) $((100*1024)) $((16*1024)) )
for COUNT in $(seq 1); do
for NN in ${NN_ARR[@]}; do
@ -68,7 +68,7 @@ for API in ${API_ARR[@]}; do
fi
IOR_PARAMS="-i $ITERATIONS -s 1 -t $T -b $((132 * 1024 * 1024 * 1020 / $PPN)) -D $((120)) -a $API $IOR_API_OPTS -e -g -z -k"
IOR_PARAMS="-i $ITERATIONS -s 1 -t $T -b $((4800 * 1024 * 1024 * 32 / $PPN)) -D $((120)) -a $API $IOR_API_OPTS -e -g -z -k"
ENVVAR="-genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter"
MPIEXEC_PARAMS=" -ppn $PPN -np $(($NN * $PPN)) $ENVVAR --hosts $(hosts $NN) "

View File

@ -1,44 +0,0 @@
#/bin/bash
function join_by() {
local IFS="$1"; shift; echo "$*";
}
cx4nodes=( isc17-c0{1..9} isc17-c{10..23} )
node_list=$( join_by , "${cx4nodes[@]}" )
IFS='' read -r -d '' script <<"EOF"
if [ -d /esfs/jtacquaviva ]; then
cx4="$(lspci | grep ConnectX-4)"
if [[ "" != $cx4 ]]; then
echo 'ok'
else
echo "no cx4"
fi
else
echo "/esfs/ is not mounted"
fi
EOF
declare -a good_nodes
for node in ${cx4nodes[@]}; do
#echo "ping $node"
status=$(ping -c1 $node | grep Unreachable)
if [[ "" == $status ]]; then
check=$(ssh $node "$script")
if [[ "ok" == ${check} ]]; then
good_nodes=( ${good_nodes[@]} $node )
else
echo "$node failed with: $check"
fi
else
echo "$node is unreachable"
fi
done
echo "GOOD NODES:"
echo "${good_nodes[@]}"