21 description = [testbake]
37 [MPI get: ompi-nightly-trunk]
40 module = OMPI_Snapshot
41 ompi_snapshot_url = http://www.open-mpi.org/nightly/trunk
45 [MPI get: ompi-nightly-v1.2]
48 module = OMPI_Snapshot
49 ompi_snapshot_url = http://www.open-mpi.org/nightly/v1.2
53 [MPI get: ompi-released-v1.2]
56 module = OMPI_Snapshot
57 ompi_snapshot_url = http://www.open-mpi.org/software/ompi/v1.2/downloads
70 download_url = http://www.open-mpi.org/~jsquyres/ompi-coll-bakeoff/mpich-1.2.7p1-patched-
for-slurm.tar.gz
72 download_version = 1.2.7p1
77 mpi_details = MPICH-MX
80 download_url = http://www.myri.com/ftp/pub/MPICH-MX/mpich-mx_1.2.7..5.tar.gz
82 download_username = <OBTAIN THIS FROM MYRICOM>
83 download_password = <OBTAIN THIS FROM MYRICOM>
91 download_url = http://www-unix.mcs.anl.gov/mpi/mpich2/downloads/mpich2-1.0.5p4.tar.gz
96 mpi_details = MVAPICH1
99 download_url = http://mvapich.cse.ohio-state.edu/download/mvapich/mvapich-0.9.9.tar.gz
104 mpi_details = MVAPICH2
107 download_url = http://mvapich.cse.ohio-state.edu/download/mvapich2/mvapich2-0.9.8p3.tar.gz
115 module = AlreadyInstalled
117 alreadyinstalled_version = 2.2.5.1b1
122 mpi_details = Intel MPI
125 module = AlreadyInstalled
127 alreadyinstalled_version = 3.0
131 [SKIP MPI get: Scali MPI]
132 mpi_details = Scali MPI
135 module = AlreadyInstalled
137 alreadyinstalled_version = ???
141 [SKIP MPI get: Cray MPI]
142 mpi_details = Cray MPI
145 module = AlreadyInstalled
147 alreadyinstalled_version = ???
154 [MPI install: OMPI/GNU-standard]
155 mpi_get = ompi-nightly-trunk, ompi-nightly-v1.2, ompi-released-v1.2
156 save_stdout_on_success = 1
157 merge_stdout_stderr = 0
160 ompi_make_all_arguments = -j 8
161 ompi_compiler_name = gnu
162 ompi_compiler_version = &get_gcc_version()
164 ompi_configure_arguments = CFLAGS=-O3 --with-openib --enable-mpirun-prefix-by-default --enable-branch-probabilities --disable-heterogeneous --without-mpi-param-check
168 [MPI install: MPICH1]
170 save_stdout_on_success = 1
171 merge_stdout_stderr = 0
174 setenv = P4_GLOBMEMSIZE 67108864
177 mpich2_use_all_target = 0
178 mpich2_apply_slurm_patch = 1
179 mpich2_compiler_name = gnu
180 mpich2_compiler_version = &get_gcc_version()
181 mpich2_configure_arguments = -cflags=-O3 -rsh=ssh --with-device=ch_p4 --with-comm=shared
185 [MPI install: MPICH2]
187 save_stdout_on_success = 1
188 merge_stdout_stderr = 0
193 prepend_path = LD_LIBRARY_PATH /opt/slurm/current/lib
196 mpich2_compiler_name = gnu
197 mpich2_compiler_version = &get_gcc_version()
198 mpich2_configure_arguments = --disable-f90 CFLAGS=-O3 --enable-fast --with-device=ch3:nemesis
200 mpich2_additional_wrapper_ldflags = -L/opt/slurm/current/lib
201 mpich2_additional_wrapper_libs = -lpmi
205 [MPI install: MVAPICH1]
207 save_stdout_on_success = 1
208 merge_stdout_stderr = 0
211 setenv = VIADEV_USE_COMPAT_MODE 0
215 mvapich2_setenv = IBHOME /usr
218 mvapich2_setenv = COMPAT AUTO_DETECT
219 mvapich2_build_script = make.mvapich.gen2
220 mvapich2_compiler_name = gnu
221 mvapich2_compiler_version = &get_gcc_version()
225 [MPI install: MVAPICH2]
227 save_stdout_on_success = 1
228 merge_stdout_stderr = 0
233 prepend_path = LD_LIBRARY_PATH /opt/slurm/current/lib
237 mvapich2_setenv = OPEN_IB_HOME /usr
238 mvapich2_build_script = make.mvapich2.ofa
239 mvapich2_compiler_name = gnu
240 mvapich2_compiler_version = &get_gcc_version()
242 mvapich2_additional_wrapper_ldflags = -L/opt/slurm/current/lib
243 mvapich2_additional_wrapper_libs = -lpmi
247 [MPI install: Intel MPI]
253 setenv = MPD_CON_EXT mtt-unique-mpd.&getenv(
"SLURM_JOBID")
255 module = Analyze::IntelMPI
259 [MPI install: HP MPI]
262 module = Analyze::HPMPI
270 exec = mpirun @alloc@ -np &test_np() @mca@ &test_executable() &test_argv()
271 parameters = &MPI::OMPI::find_mpirun_params(&test_command_line(), \
273 network = &MPI::OMPI::find_network(&test_command_line(), &test_executable())
275 alloc = &if(&eq(&test_alloc(),
"node"),
"--bynode",
"--byslot")
277 "--mca btl sm,tcp,self " . @common_params@, \
278 "--mca btl tcp,self " . @common_params@, \
279 "--mca btl sm,openib,self " . @common_params@, \
280 "--mca btl sm,openib,self --mca mpi_leave_pinned 1 " . @common_params@, \
281 "--mca btl openib,self " . @common_params@, \
282 "--mca btl openib,self --mca mpi_leave_pinned 1 " . @common_params@, \
283 "--mca btl openib,self --mca mpi_leave_pinned_pipeline 1 " . @common_params@, \
284 "--mca btl openib,self --mca btl_openib_use_srq 1 " . @common_params@)
289 common_params =
"--mca btl_tcp_if_include ib0 --mca oob_tcp_if_include ib0 --mca btl_openib_if_include mthca0 --mca mpi_paffinity_alone 1" . \
290 &if(&or(&eq(&mpi_get_name(),
"ompi-nightly-v1.2"), \
291 &eq(&mpi_get_name(),
"ompi-released-v1.2")), \
292 "--mca btl_openib_max_btls 1",
"")
300 after_each_exec = &if(&ne(
"", &getenv(
"SLURM_NNODES")),
"srun -N " . &getenv(
"SLURM_NNODES")) /home/mpiteam/svn/ompi-tests/cisco/mtt/after_each_exec.pl
304 [MPI Details: MPICH1]
308 exec = srun @alloc@ -n &test_np() --mpi=mpich1_p4 &test_executable() &test_argv()
314 network = loopback,ethernet
316 alloc = &if(&eq(&test_alloc(),
"node"),
"-m cyclic",
"-m block")
320 [MPI Details: MPICH2]
325 exec = srun @alloc@ -n &test_np() &test_executable() &test_argv()
331 network = loopback,ethernet,shmem
333 alloc = &if(&eq(&test_alloc(),
"node"),
"-m cyclic",
"-m block")
337 [MPI Details: MVAPICH1]
341 exec = srun @alloc@ -n &test_np() --mpi=mvapich &test_executable() &test_argv()
347 network = loopback,verbs,shmem
349 alloc = &if(&eq(&test_alloc(),
"node"),
"-m cyclic",
"-m block")
353 [MPI Details: MVAPICH2]
358 exec = srun @alloc@ -n &test_np() &test_executable() &test_argv()
364 network = loopback,verbs,shmem
366 alloc = &if(&eq(&test_alloc(),
"node"),
"-m cyclic",
"-m block")
370 [MPI Details: MPICH-MX]
374 exec = srun @alloc@ -n &test_np() --mpi=mpichgm &test_executable() &test_argv()
381 alloc = &if(&eq(&test_alloc(),
"node"),
"-m cyclic",
"-m block")
385 [MPI Details: Intel MPI]
388 before_any_exec = <<EOF
394 srun hostname | uniq > $file
396 local=`grep $h $file`
397 touch /tmp/mtt-mpiexec-options.$SLURM_JOBID
398 if test
"$local" =
""; then
400 echo -nolocal > /tmp/mpiexec-options.$SLURM_JOBID
402 num=`wc -l $file | awk
'{ print $1 }'`
403 mpdboot -n $num -r ssh --verbose --file=$file
410 exec = mpiexec @options@ -n &test_np() ./&test_executable() &test_argv()
411 network = loopback,verbs,shmem
417 options = &stringify(&cat(
"/tmp/mpiexec-options." . &getenv(
"SLURM_JOBID"))) \
418 &enumerate(
"-genv I_MPI_DEVICE rdssm", \
419 "-genv I_MPI_DEVICE rdssm -genv I_MPI_FAST_COLLECTIVES 1")
421 after_all_exec = <<EOT
422 rm -f /tmp/mpiexec-options.$SLURM_JOBID
428 [MPI Details: HP MPI]
442 exec = mpirun -IBV -e MPI_IBV_NO_FORK_SAFE=1 -e MPI_IB_CARD_ORDER=0:0 -srun -n&test_np() ./&test_executable() &test_argv()
443 network = loopback,verbs,shmem
451 download_url = http://www.scl.ameslab.gov/netpipe/code/NetPIPE_3.6.2.tar.gz
457 svn_url = https://svn.open-mpi.org/svn/ompi-tests/trunk/osu
463 svn_url = https://svn.open-mpi.org/svn/ompi-tests/trunk/IMB_2.3
469 svn_url = https://svn.open-mpi.org/svn/ompi-tests/trunk/skampi-5.0.1
475 svn_url = https://svn.open-mpi.org/svn/ompi-tests/trunk/nbcbench
481 [Test build: netpipe]
483 save_stdout_on_success = 1
484 merge_stdout_stderr = 1
485 stderr_save_lines = 100
488 shell_build_command = <<EOT
496 save_stdout_on_success = 1
497 merge_stdout_stderr = 1
498 stderr_save_lines = 100
501 shell_build_command = <<EOT
502 make osu_latency osu_bw osu_bibw
509 save_stdout_on_success = 1
510 merge_stdout_stderr = 1
513 shell_build_command = <<EOT
522 save_stdout_on_success = 1
523 merge_stdout_stderr = 1
524 stderr_save_lines = 100
528 shell_build_command = <<EOT
529 make CFLAGS=
"-O2 -DPRODUCE_SPARSE_OUTPUT -DEVERYONE_CAN_MPI_IO"
534 [Test build: nbcbench]
536 save_stdout_on_success = 1
537 merge_stdout_stderr = 1
538 stderr_save_lines = 100
541 shell_build_command = <<EOT
551 pass = &and(&cmd_wifexited(), &eq(&cmd_wexitstatus(), 0))
554 save_stdout_on_pass = 1
556 stdout_save_lines = -1
557 merge_stdout_stderr = 1
561 specify_module = Simple
562 analyze_module = NetPipe
564 simple_pass:tests = NPmpi
570 pass = &and(&cmd_wifexited(), &eq(&cmd_wexitstatus(), 0))
573 save_stdout_on_pass = 1
575 stdout_save_lines = -1
576 merge_stdout_stderr = 1
580 specify_module = Simple
583 simple_pass:tests = osu_bw osu_latency osu_bibw
589 pass = &and(&cmd_wifexited(), &eq(&cmd_wexitstatus(), 0))
592 save_stdout_on_pass = 1
594 stdout_save_lines = -1
595 merge_stdout_stderr = 1
596 np = &env_max_procs()
598 argv = -npmin &test_np() &enumerate(
"PingPong",
"PingPing",
"Sendrecv",
"Exchange",
"Allreduce",
"Reduce",
"Reduce_scatter",
"Allgather",
"Allgatherv",
"Alltoall",
"Bcast",
"Barrier")
600 specify_module = Simple
603 simple_pass:tests = src/IMB-MPI1
609 pass = &and(&cmd_wifexited(), &eq(&cmd_wexitstatus(), 0))
612 save_stdout_on_pass = 1
614 stdout_save_lines = -1
615 merge_stdout_stderr = 1
616 np = &env_max_procs()
618 argv = -i &find(
"mtt_.+.ski",
"input_files_bakeoff")
620 specify_module = Simple
621 analyze_module = SKaMPI
623 simple_pass:tests = skampi
628 test_build = nbcbench
629 pass = &and(&test_wifexited(), &eq(&test_wexitstatus(), 0))
631 save_stdout_on_pass = 1
633 stdout_save_lines = -1
634 merge_stdout_stderr = 1
636 specify_module = Simple
637 analyze_module = NBCBench
638 simple_pass:tests = nbcbench
640 argv = -p &test_np()-&test_np() -s 1-1048576 -v -t \
641 &enumerate(
"MPI_Allgatherv",
"MPI_Allgather",
"MPI_Allreduce", \
642 "MPI_Alltoall",
"MPI_Alltoallv",
"MPI_Barrier",
"MPI_Bcast", \
643 "MPI_Gather",
"MPI_Gatherv",
"MPI_Reduce",
"MPI_Reduce_scatter", \
644 "MPI_Scan",
"MPI_Scatter",
"MPI_Scatterv")
650 [Reporter: IU database]
653 mttdatabase_realm = OMPI
654 mttdatabase_url = https://www.open-mpi.org/mtt/submit/
657 mttdatabase_username = you must set this value
658 mttdatabase_password = you must set this value
660 mttdatabase_platform = you must set this value
662 mttdatabase_debug_filename = mttdb_debug_file_perf
663 mttdatabase_keep_debug_files = 1
670 [Reporter: text file backup]
673 textfile_filename = $phase-$section-$mpi_name-$mpi_version.txt
675 textfile_summary_header = <<EOT
676 Hostname: &shell(
"hostname")
677 uname: &shell(
"uname -a")
678 Username: &shell(
"who am i")
681 textfile_summary_footer =
682 textfile_detail_header =
683 textfile_detail_footer =
685 textfile_textwrap = 78