Merge branch 'master' of https://github.com/joobog/ime-evaluation
Conflicts: output/COUNT:1#NN:16#PPN:8#API:POSIX#T:10485760.txt
This commit is contained in:
commit
a1c4bcdeab
|
@ -30,8 +30,8 @@ connection = dbConnect(SQLite(), dbname=file_db)
|
|||
#dbdata = dbGetQuery(connection,'select * from p where tag=="mpio-individual"')
|
||||
#dbdata = dbGetQuery(connection,'select *, (x*y*z) as blocksize from p where count=8')
|
||||
#dbdata = dbGetQuery(connection,'select * from p where count<5')
|
||||
dbdata = dbGetQuery(connection,'select * from p where ppn==1 or ppn=4 or ppn=8')
|
||||
dbdata[,"blocksize"] = dbdata$x * dbdata$y * dbdata$z * 4
|
||||
dbdata = dbGetQuery(connection,'select * from p')
|
||||
dbdata[,"blocksize"] = dbdata$tsize
|
||||
|
||||
|
||||
summary(dbdata)
|
||||
|
@ -57,30 +57,26 @@ dims_list = data.frame(h, w, event) # df is a data frame
|
|||
fss = unique(dbdata$fs)
|
||||
for (fs in fss) {
|
||||
data1 = dbdata[fs == dbdata$fs, ]
|
||||
ifaces = unique(data1$iface)
|
||||
apis = unique(data1$api)
|
||||
|
||||
for (iface in ifaces) {
|
||||
data2 = data1[iface == data1$iface, ]
|
||||
print(fs)
|
||||
|
||||
for (api in apis) {
|
||||
data2 = data1[api == data1$api, ]
|
||||
apps = unique(data2$app)
|
||||
|
||||
print(api)
|
||||
|
||||
for (app in apps) {
|
||||
data3 = data2[app == data2$app, ]
|
||||
types = unique(data3$type)
|
||||
|
||||
print(app)
|
||||
|
||||
for (type in types) {
|
||||
data4 = data3[type == data3$type, ]
|
||||
chunkeds = unique(data4$chunked)
|
||||
data = data3[type == data3$type, ]
|
||||
|
||||
for (chunked in chunkeds) {
|
||||
data5 = data4[chunked == data4$chunked, ]
|
||||
filleds = unique(data4$filled)
|
||||
|
||||
for (filled in filleds) {
|
||||
data6 = data5[filled == data5$filled, ]
|
||||
unlimiteds = unique(data5$unlimited)
|
||||
|
||||
for (unlimited in unlimiteds) {
|
||||
data = data6[unlimited == data5$unlimited, ]
|
||||
print(type)
|
||||
|
||||
ggplot(data=data, aes(x=nn, y=write, colour=as.factor(blocksize/1024), group=blocksize), ymin=0) +
|
||||
#ggtitle("Write") +
|
||||
|
@ -93,13 +89,13 @@ data = data6[unlimited == data5$unlimited, ]
|
|||
scale_x_continuous(breaks = c(unique(data$nn))) +
|
||||
scale_color_manual(name="Blocksize in KiB: ", values=c('#999999','#E69F00', '#56B4E9', '#000000'), breaks=sort(unique(data$blocksize)/1024)) +
|
||||
#stat_summary(fun.y="median", geom="line", aes(group=factor(blocksize))) +
|
||||
stat_summary(fun.y="max", geom="line", aes(group=factor(blocksize))) +
|
||||
stat_summary(fun.y="mean", geom="line", aes(group=factor(blocksize))) +
|
||||
#geom_boxplot()
|
||||
geom_point()
|
||||
filename_eps = sprintf("%s/performance_%s_%s_%s_%s_CHUNK:%s_FILL:%s_LIM:%s_%s.eps", folder_out, app, fs, iface, type, chunked, filled, unlimited, "write")
|
||||
filename_png = sprintf("%s/performance_%s_%s_%s_%s_CHUNK:%s_FILL:%s_LIM:%s_%s.png", folder_out, app, fs, iface, type, chunked, filled, unlimited, "write")
|
||||
ggsave(filename_png, width = 6, height = 4)
|
||||
ggsave(filename_eps, width = 6, height = 4)
|
||||
filename_eps = sprintf("%s/performance_%s_%s_%s_%s_%s.eps", folder_out, app, fs, api, type, "write")
|
||||
filename_png = sprintf("%s/performance_%s_%s_%s_%s_%s.png", folder_out, app, fs, api, type, "write")
|
||||
ggsave(filename_png, width = 6, height = 10)
|
||||
ggsave(filename_eps, width = 6, height = 10)
|
||||
system(sprintf("epstopdf %s", filename_eps))
|
||||
system(sprintf("rm %s", filename_eps))
|
||||
|
||||
|
@ -114,30 +110,14 @@ data = data6[unlimited == data5$unlimited, ]
|
|||
scale_x_continuous(breaks = c(unique(data$nn))) +
|
||||
scale_color_manual(name="Blocksize in KiB: ", values=c('#999999','#E69F00', '#56B4E9', '#000000'), breaks=sort(unique(data$blocksize)/1024)) +
|
||||
#stat_summary(fun.y="median", geom="line", aes(group=factor(blocksize))) +
|
||||
stat_summary(fun.y="max", geom="line", aes(group=factor(blocksize))) +
|
||||
stat_summary(fun.y="mean", geom="line", aes(group=factor(blocksize))) +
|
||||
#geom_boxplot()
|
||||
geom_point()
|
||||
filename_eps = sprintf("%s/performance_%s_%s_%s_%s_CHUNK:%s_FILL:%s_LIM:%s_%s.eps", folder_out, app, fs, iface, type, chunked, filled, unlimited, "read")
|
||||
filename_png = sprintf("%s/performance_%s_%s_%s_%s_CHUNK:%s_FILL:%s_LIM:%s_%s.png", folder_out, app, fs, iface, type, chunked, filled, unlimited, "read")
|
||||
ggsave(filename_png, width = 6, height = 4)
|
||||
ggsave(filename_eps, width = 6, height = 4)
|
||||
filename_eps = sprintf("%s/performance_%s_%s_%s_%s_%s.eps", folder_out, app, fs, api, type, "read")
|
||||
filename_png = sprintf("%s/performance_%s_%s_%s_%s_%s.png", folder_out, app, fs, api, type, "read")
|
||||
ggsave(filename_png, width = 6, height = 10)
|
||||
ggsave(filename_eps, width = 6, height = 10)
|
||||
system(sprintf("epstopdf %s", filename_eps))
|
||||
system(sprintf("rm %s", filename_eps))
|
||||
|
||||
|
||||
#ggplot(data=data, aes(x=blocksize, y=read, colour=app, group=blocksize)) +
|
||||
# ggtitle("Read") +
|
||||
# facet_grid(ppn ~ nn, labeller = labeller(nn = as_labeller(nn_lab), ppn = as_labeller(ppn_lab))) +
|
||||
# xlab("Blocksize in KiB") +
|
||||
# ylab("Performance in MiB/s") +
|
||||
# theme(axis.text.x=element_text(angle=90, hjust=0.95, vjust=0.5)) +
|
||||
# scale_y_log10() +
|
||||
# scale_x_log10(breaks = breaks, labels=breaks/1024) +
|
||||
# geom_boxplot()
|
||||
##geom_line() +
|
||||
##geom_point()
|
||||
#filename_eps = sprintf("%s/performance_%s_%s_%s_%s_%s.eps", folder_out, app, fs, iface, type, "read")
|
||||
#ggsave(filename_eps, width = 8, height = 6)
|
||||
##system(sprintf("epstopdf %s", filename_eps))
|
||||
|
||||
}}}}}}}
|
||||
}}}}
|
||||
|
|
7
mkdb.py
7
mkdb.py
|
@ -33,6 +33,9 @@ def parse(filename, conn):
|
|||
metadata["ppn"] = int(m.group(3))
|
||||
metadata["api"] = m.group(4)
|
||||
metadata["tsize"] = m.group(5)
|
||||
metadata["fs"] = "lustre"
|
||||
metadata["app"] = "ior-default"
|
||||
metadata["type"] = "random"
|
||||
|
||||
else:
|
||||
print('couldn\'t parse', os.path.basename(filename))
|
||||
|
@ -75,7 +78,7 @@ def parse(filename, conn):
|
|||
|
||||
|
||||
for iteration,entry in data.items():
|
||||
if len(entry) == 19:
|
||||
if len(entry) == 22:
|
||||
print("Success")
|
||||
columns = ", ".join(entry.keys())
|
||||
placeholders = ':' + ', :'.join(entry.keys())
|
||||
|
@ -103,6 +106,8 @@ try:
|
|||
nn int, \
|
||||
ppn int, \
|
||||
api text, \
|
||||
fs text, \
|
||||
type text, \
|
||||
tsize float, \
|
||||
fsize float, \
|
||||
fsize_ctl txt, \
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 1 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:1#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24654 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 1 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 1 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:1#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24662 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,34 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 1 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:1#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24625 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 1 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:1#API:POSIX#T:102400.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 1 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread1/file -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24633 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,40 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:2#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24594 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:2#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24605 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,40 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:2#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24555 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread1/file -r
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:2#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24566 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,54 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:4#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
tee: standard outputior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
: Resource temporarily unavailable
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24513 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
tee: write error
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:4#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24532 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,52 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:4#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24469 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread1/file -r
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:4#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: [cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24487 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,64 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 6 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:6#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24411 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 6 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 6 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:6#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_2]: [cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24440 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,66 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 6 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:6#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24325 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 6 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 6 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread1/file -r
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:6#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
tee: standard outputior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
: Resource temporarily unavailable
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24351 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
tee: write error
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,76 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:8#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24253 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:8#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24290 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,76 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:8#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24169 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ tee -a ./output/COUNT:1#NN:1#PPN:8#API:POSIX#T:102400.txt
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread1/file -r
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 24220 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 2 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,46 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:1#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 31995 RUNNING AT isc17-c05
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
|
||||
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:1#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 30373 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,46 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:1#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 30323 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 1 -np 2 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 141180272640 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread2/file -r
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:1#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: [cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 31974 RUNNING AT isc17-c05
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
|
||||
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,59 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:2#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_3]: ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 30284 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:2#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 31909 RUNNING AT isc17-c05
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c04] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:0@isc17-c04] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,59 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:2#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 30245 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 2 -np 4 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 70590136320 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread2/file -r
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:2#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: [cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 30260 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:1@isc17-c05] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:1@isc17-c05] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:1@isc17-c05] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,85 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:4#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
tee: standard outputior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
: Resource temporarily unavailable
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 30192 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
tee: write error
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:4#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 30218 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:1@isc17-c05] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:1@isc17-c05] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:1@isc17-c05] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,91 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:4#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
tee: standard outputior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
: Resource temporarily unavailable
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 31610 RUNNING AT isc17-c05
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
|
||||
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
tee: write error
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 4 -np 8 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 35295068160 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread2/file -r
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:4#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 31673 RUNNING AT isc17-c05
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c04] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:0@isc17-c04] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,105 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 12 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:6#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_3]: [cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 31487 RUNNING AT isc17-c05
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
|
||||
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 12 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 12 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:6#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 31572 RUNNING AT isc17-c05
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c04] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:0@isc17-c04] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,107 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 12 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:6#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 29916 RUNNING AT isc17-c04
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 12 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 6 -np 12 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 23530045440 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread2/file -r
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:6#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_6]: [cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 31445 RUNNING AT isc17-c05
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c04] HYD_pmcd_pmip_control_cmd_cb (pm/pmiserv/pmip_cb.c:912): assert (!closed) failed
|
||||
[proxy:0:0@isc17-c04] HYDT_dmxu_poll_wait_for_event (tools/demux/demux_poll.c:76): callback returned error status
|
||||
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:256): demux engine error waiting for event
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,128 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:8#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 31253 RUNNING AT isc17-c05
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
|
||||
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a MPIIO -e -g -z -k -o /esfs/jtacquaviva/file_read -r
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:8#API:MPIIO#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 31334 RUNNING AT isc17-c05
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
|
||||
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1
|
|
@ -1,132 +0,0 @@
|
|||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/ioperf/file_write -w
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:8#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 31097 RUNNING AT isc17-c05
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
|
||||
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
|
||||
[mpiexec@isc17-c04] HYDT_bscu_wait_for_completion (tools/bootstrap/utils/bscu_wait.c:76): one of the processes terminated badly; aborting
|
||||
[mpiexec@isc17-c04] HYDT_bsci_wait_for_completion (tools/bootstrap/src/bsci_wait.c:23): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] HYD_pmci_wait_for_completion (pm/pmiserv/pmiserv_pmci.c:218): launcher returned error waiting for completion
|
||||
[mpiexec@isc17-c04] main (ui/mpich/mpiexec.c:344): process manager error waiting for completion
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/git/ime-evaluation/drop_caches.sh
|
||||
+ /opt/ddn/mvapich/bin/mpiexec -ppn 8 -np 16 -genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter --hosts isc17-c04,isc17-c05 /esfs/jtacquaviva/software/install/ior/git-ddn/bin/ior -i 3 -s 1 -t 102400 -b 17647534080 -D 120 -a POSIX -F -e -g -z -k -o /esfs/jtacquaviva/indread2/file -r
|
||||
+ tee -a ./output/COUNT:1#NN:2#PPN:8#API:POSIX#T:102400.txt
|
||||
IOR-3.0.1: MPI Coordinated Test of Parallel I/O
|
||||
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 2, No such file or directory (ior.c:2293)
|
||||
[cli_8]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 8
|
||||
ior ERROR: block size must be a multiple of transfer size, errno 0, Success (ior.c:2293)
|
||||
[cli_9]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 9
|
||||
[cli_10]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 10
|
||||
[cli_11]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 11
|
||||
[cli_12]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 12
|
||||
[cli_13]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 13
|
||||
[cli_14]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 14
|
||||
[cli_15]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 15
|
||||
[cli_1]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 1
|
||||
[cli_2]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 2
|
||||
[cli_3]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 3
|
||||
[cli_4]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 4
|
||||
[cli_5]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 5
|
||||
[cli_6]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 6
|
||||
[cli_7]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 7
|
||||
[cli_0]: aborting job:
|
||||
application called MPI_Abort(MPI_COMM_WORLD, -1) - process 0
|
||||
|
||||
===================================================================================
|
||||
= BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
|
||||
= PID 31211 RUNNING AT isc17-c05
|
||||
= EXIT CODE: 255
|
||||
= CLEANING UP REMAINING PROCESSES
|
||||
= YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
|
||||
===================================================================================
|
||||
[proxy:0:0@isc17-c04] HYDU_sock_write (utils/sock/sock.c:286): write error (Broken pipe)
|
||||
[proxy:0:0@isc17-c04] main (pm/pmiserv/pmip.c:265): unable to send EXIT_STATUS command upstream
|
||||
+ set +x
|
||||
/esfs/jtacquaviva/ioperf
|
||||
stripe_count: 4 stripe_size: 1048576 stripe_offset: -1
|
4
run.sh
4
run.sh
|
@ -41,7 +41,7 @@ API_ARR=( "POSIX" "MPIIO" )
|
|||
#NN_ARR=( 4 2 1 8 10 16)
|
||||
NN_ARR=( 16 )
|
||||
PPN_ARR=( 8 6 4 2 1 )
|
||||
T_ARR=( $((10240*1024)) $((1024*1024)) $((100*1024)) $((16*1024)) )
|
||||
T_ARR=( $((10*1024*1024)) $((1*1024*1024)) $((100*1024)) $((16*1024)) )
|
||||
|
||||
for COUNT in $(seq 1); do
|
||||
for NN in ${NN_ARR[@]}; do
|
||||
|
@ -68,7 +68,7 @@ for API in ${API_ARR[@]}; do
|
|||
fi
|
||||
|
||||
|
||||
IOR_PARAMS="-i $ITERATIONS -s 1 -t $T -b $((132 * 1024 * 1024 * 1020 / $PPN)) -D $((120)) -a $API $IOR_API_OPTS -e -g -z -k"
|
||||
IOR_PARAMS="-i $ITERATIONS -s 1 -t $T -b $((4800 * 1024 * 1024 * 32 / $PPN)) -D $((120)) -a $API $IOR_API_OPTS -e -g -z -k"
|
||||
ENVVAR="-genv MV2_NUM_HCAS 1 -genv MV2_CPU_BINDING_LEVEL core -genv MV2_CPU_BINDING_POLICY scatter"
|
||||
MPIEXEC_PARAMS=" -ppn $PPN -np $(($NN * $PPN)) $ENVVAR --hosts $(hosts $NN) "
|
||||
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
#!/bin/bash
|
||||
|
||||
|
||||
NN_ARR=( 4 2 1 8 10 12 14 16)
|
||||
PPN_ARR=( 8 6 4 2 1 )
|
||||
T_ARR=( $((10*1024*1024)) $((1*1024*1024)) $((100*1024)) $((16*1024)) )
|
||||
|
||||
|
||||
res="OK"
|
||||
|
||||
for COUNT in $(seq 1); do
|
||||
for NN in ${NN_ARR[@]}; do
|
||||
for T in ${T_ARR[@]}; do
|
||||
for PPN in ${PPN_ARR[@]}; do
|
||||
|
||||
#datasize=$((130 * 1024 * 1024 * 1020 / $PPN))
|
||||
datasize=$((4800 * 1024 * 1024 * 32 / $PPN))
|
||||
remain=$(( $datasize - ($datasize / $T * $T) ))
|
||||
if [ 0 -ne $remain ]; then
|
||||
echo "Bad IOR paramters: NN=$NN, PPN=$PPN, T=$T, DS=$datasize"
|
||||
res="FAILED"
|
||||
fi
|
||||
|
||||
done
|
||||
done
|
||||
done
|
||||
done
|
||||
|
||||
echo $res
|
|
@ -0,0 +1,17 @@
|
|||
#!/bin/bash
|
||||
|
||||
force_delete=$1
|
||||
|
||||
find "./output" -type f -name "*.txt" -print0 |
|
||||
while IFS= read -r -d $'\0' fn; do
|
||||
status="$(grep -i "error" $fn)"
|
||||
if [[ "" != ${status} ]]; then
|
||||
if [[ "delete" == $force_delete ]]; then
|
||||
set -x
|
||||
rm $fn
|
||||
set +x
|
||||
else
|
||||
echo "Bad output: $fn"
|
||||
fi
|
||||
fi
|
||||
done
|
Loading…
Reference in New Issue