pymtt
 All Classes Namespaces Files Functions Variables Groups
mpich2-template.ini
Go to the documentation of this file.
1 #
2 # Copyright (c) 2006-2009 Cisco Systems, Inc. All rights reserved.
3 # Copyright (c) 2006-2007 Sun Microystems, Inc. All rights reserved.
4 #
5 
6 # Note that there are many items in this file that, while they are
7 # good for examples, may not work for random MTT users. For example,
8 # the "ompi-tests" SVN repository that is used in the examples below
9 # is *not* a public repository (there's nothing really secret in this
10 # repository; it contains many publicly-available MPI tests and
11 # benchmarks, but we have never looked into the redistribution rights
12 # of these codes, so we keep the SVN repository "closed" to the
13 # general public and only use it internally in the Open MPI project).
14 
15 #======================================================================
16 # Overall configuration
17 #======================================================================
18 
19 [MTT]
20 # See ompi-core-template.ini for more information on this section.
21 
22 #----------------------------------------------------------------------
23 
24 [Lock]
25 # See ompi-core-template.ini for more information on this section.
26 
27 #======================================================================
28 # MPI get phase
29 #======================================================================
30 
31 [MPI get: mpich2]
32 mpi_details = MPICH2
33 
34 module = Tarball
35 tarball_filename = /home/jsquyres/mpich2-1.0.5p4.tar.gz
36 tarball_version = 1.0.5p4
37 
38 #======================================================================
39 # Install MPI phase
40 #======================================================================
41 
42 [MPI install: MPICH2/GNU]
43 mpi_get = mpich2
44 save_stdout_on_success = 1
45 merge_stdout_stderr = 0
46 # Needed to launch in SLURM; adding this to LD_LIBRARY_PATH here
47 # propagates this all the way through the test run phases that use
48 # this MPI install, where the test executables will need to have this
49 # set.
50 prepend_path = LD_LIBRARY_PATH /opt/slurm/current/lib
51 
52 module = MPICH2
53 mpich2_vpath_mode = none
54 mpich2_make_all_arguments =
55 mpich2_compiler_name = gnu
56 mpich2_compiler_version = &get_gcc_version()
57 mpich2_configure_arguments = "CFLAGS=-g -pipe"
58 # These are needed to launch through SLURM; adjust as appropriate.
59 mpich2_additional_wrapper_ldflags = -L/opt/slurm/current/lib
60 mpich2_additional_wrapper_libs = -lpmi
61 
62 #======================================================================
63 # MPI run details
64 #======================================================================
65 
66 [MPI Details: MPICH2]
67 
68 # Launching through SLURM. If you use mpdboot instead, you need to
69 # ensure that multiple mpd's on the same node don't conflict (or never
70 # happen).
71 exec = srun -n &test_np() &test_executable() &test_argv()
72 
73 #======================================================================
74 # Test get phase
75 #======================================================================
76 
77 [Test get: trivial]
78 module = Trivial
79 
80 #----------------------------------------------------------------------
81 
82 [Test get: intel]
83 module = SVN
84 svn_url = https://svn.open-mpi.org/svn/ompi-tests/trunk/intel_tests
85 
86 #----------------------------------------------------------------------
87 
88 [Test get: ibm]
89 module = SVN
90 svn_url = https://svn.open-mpi.org/svn/ompi-tests/trunk/ibm
91 svn_post_export = <<EOT
92 ./autogen.sh
93 EOT
94 
95 #----------------------------------------------------------------------
96 
97 [Test get: onesided]
98 module = SVN
99 svn_url = https://svn.open-mpi.org/svn/ompi-tests/trunk/onesided
100 svn_post_export = <<EOT
101 ./autogen.sh
102 EOT
103 
104 #----------------------------------------------------------------------
105 
106 [Test get: mpicxx]
107 module = SVN
108 svn_url = https://svn.open-mpi.org/svn/ompi-tests/trunk/cxx-test-suite
109 svn_post_export = <<EOT
110 ./autogen.sh
111 EOT
112 
113 #----------------------------------------------------------------------
114 
115 [Test get: imb]
116 module = SVN
117 svn_url = https://svn.open-mpi.org/svn/ompi-tests/trunk/IMB_2.3
118 
119 #----------------------------------------------------------------------
120 
121 [Test get: netpipe]
122 module = SVN
123 svn_url = https://svn.open-mpi.org/svn/ompi-tests/trunk/NetPIPE_3.6.2
124 
125 #======================================================================
126 # Test build phase
127 #======================================================================
128 
129 [Test build: trivial]
130 test_get = trivial
131 save_stdout_on_success = 1
132 merge_stdout_stderr = 1
133 
134 module = Trivial
135 
136 #----------------------------------------------------------------------
137 
138 [Test build: intel]
139 test_get = intel
140 save_stdout_on_success = 1
141 merge_stdout_stderr = 1
142 
143 module = Intel_OMPI_Tests
144 intel_ompi_tests_buildfile = all_tests_no_perf
145 
146 #----------------------------------------------------------------------
147 
148 [Test build: ibm]
149 test_get = ibm
150 save_stdout_on_success = 1
151 merge_stdout_stderr = 1
152 
153 module = Shell
154 shell_build_command = <<EOT
155 ./configure CC=mpicc CXX=mpic++ F77=mpif77
156 make
157 EOT
158 
159 #----------------------------------------------------------------------
160 
161 [Test build: onesided]
162 test_get = onesided
163 save_stdout_on_success = 1
164 merge_stdout_stderr = 1
165 stderr_save_lines = 100
166 
167 module = Shell
168 shell_build_command = <<EOT
169 ./configure
170 make
171 EOT
172 
173 #----------------------------------------------------------------------
174 
175 [Test build: mpicxx]
176 test_get = mpicxx
177 save_stdout_on_success = 1
178 merge_stdout_stderr = 1
179 
180 module = Shell
181 shell_build_command = <<EOT
182 ./configure CC=mpicc CXX=mpic++
183 make
184 EOT
185 
186 #----------------------------------------------------------------------
187 
188 [Test build: imb]
189 test_get = imb
190 save_stdout_on_success = 1
191 merge_stdout_stderr = 1
192 
193 module = Shell
194 shell_build_command = <<EOT
195 cd src
196 make clean IMB-MPI1
197 EOT
198 
199 #----------------------------------------------------------------------
200 
201 [Test build: netpipe]
202 test_get = netpipe
203 save_stdout_on_success = 1
204 merge_stdout_stderr = 1
205 stderr_save_lines = 100
206 
207 module = Shell
208 shell_build_command = <<EOT
209 make mpi
210 EOT
211 
212 #======================================================================
213 # Test Run phase
214 #======================================================================
215 
216 [Test run: trivial]
217 test_build = trivial
218 pass = &and(&test_wifexited(), &eq(&test_wexitstatus(), 0))
219 skipped = 0
220 timeout = &max(10, &test_np())
221 save_stdout_on_pass = 1
222 merge_stdout_stderr = 1
223 stdout_save_lines = 100
224 stderr_save_lines = 100
225 np = &env_max_procs()
226 
227 specify_module = Simple
228 simple_first:tests = &find_executables(".")
229 
230 #----------------------------------------------------------------------
231 
232 [Test run: intel]
233 test_build = intel
234 pass = &and(&test_wifexited(), &eq(&test_wexitstatus(), 0))
235 skipped = &and(&test_wifexited(), &eq(&test_wexitstatus(), 77))
236 timeout = &max(30, &multiply(10, &test_np()))
237 save_stdout_on_pass = 1
238 merge_stdout_stderr = 1
239 np = &min("60", &env_max_procs())
240 
241 specify_module = Simple
242 simple_successful:tests = &find_executables("src")
243 
244 simple_failures:tests = &find_executables("src/" . "&cat("supposed_to_fail")")
245 simple_failures:pass = &and(&test_wifexited(), &ne(&test_wexitstatus(), 0))
246 simple_failures:exclusive = 1
247 simple_failures:timeout = &env_max_procs()
248 
249 # These tests sleep for 90 seconds (!) before attempting to process
250 # any messages
251 simple_really_slow:tests = src/MPI_Isend_flood_c src/MPI_Send_flood_c
252 simple_really_slow:pass = &and(&test_wifexited(), &eq(&test_wexitstatus(), 0))
253 simple_really_slow:exclusive = 1
254 simple_really_slow:timeout = &sum(120, &multiply(5, &test_np()))
255 
256 # This test collectively sleeps for 26 seconds *per MCW rank*
257 simple_coll_slow:tests = src/MPI_collective_overlap_c
258 simple_coll_slow:pass = &and(&test_wifexited(), &eq(&test_wexitstatus(), 0))
259 simple_coll_slow:exclusive = 1
260 simple_coll_slow:timeout = &multiply(35, &test_np()))
261 
262 #------------------------------------------------------------------------
263 
264 [Test run: ibm]
265 test_build = ibm
266 pass = &and(&test_wifexited(), &eq(&test_wexitstatus(), 0))
267 skipped = &and(&test_wifexited(), &eq(&test_wexitstatus(), 77))
268 timeout = &max(30, &multiply(10, &test_np()))
269 save_stdout_on_pass = 1
270 merge_stdout_stderr = 1
271 np = &env_max_procs()
272 
273 specify_module = Simple
274 simple_first:tests = &find_executables("collective", "communicator", \
275  "datatype", "dynamic", "environment", \
276  "group", "info", "pt2pt", "topology", \
277  "onesided" )
278 
279 simple_fail:tests = environment/abort environment/final
280 simple_fail:pass = &and(&test_wifexited(), &ne(&test_wexitstatus(), 0))
281 simple_fail:exclusive = 1
282 simple_fail:timeout = &env_max_procs()
283 
284 spawns:tests = dynamic/spawn dynamic/spawn_multiple
285 spawns:np = 2
286 spawns:pass = &and(&test_wifexited(), &ne(&test_wexitstatus(),0))
287 spawns:exclusive = 1
288 spawns:timeout = &multiply(5,&env_max_procs())
289 
290 #----------------------------------------------------------------------
291 
292 [Test run: onesided]
293 test_build = onesided
294 pass = &and(&test_wifexited(), &eq(&test_wexitstatus(), 0))
295 timeout = &max(30, &multiply(10, &test_np()))
296 save_stdout_on_pass = 1
297 merge_stdout_stderr = 1
298 stdout_save_lines = 100
299 np = &env_max_procs()
300 
301 specify_module = Simple
302 simple_pass:tests = &cat("run_list")
303 
304 #----------------------------------------------------------------------
305 
306 [Test run: mpicxx]
307 test_build = mpicxx
308 pass = &and(&test_wifexited(), &eq(&test_wexitstatus(), 0))
309 timeout = &max(30, &multiply(10, &test_np()))
310 save_stdout_on_pass = 1
311 merge_stdout_stderr = 1
312 np = &env_max_procs()
313 
314 specify_module = Simple
315 simple_pass:tests = src/mpi2c++_test
316 
317 #----------------------------------------------------------------------
318 
319 [Test run: imb correctness]
320 test_build = imb
321 pass = &eq(&test_exit_status(), 0)
322 timeout = &max(2800, &multiply(50, &test_np()))
323 save_stdout_on_pass = 1
324 merge_stdout_stderr = 1
325 np = &min("32", &env_max_procs())
326 
327 specify_module = Simple
328 simple_first:tests = src/IMB-MPI1
329 
330 #----------------------------------------------------------------------
331 
332 [Test run: imb performance]
333 test_build = imb
334 pass = &eq(&test_wexitstatus(), 0)
335 timeout = -1
336 save_stdout_on_pass = 1
337 # Ensure to leave this value as "-1", or performance results could be lost!
338 stdout_save_lines = -1
339 merge_stdout_stderr = 1
340 np = &env_max_procs()
341 
342 argv = -npmin &test_np() &enumerate("PingPong", "PingPing", "Sendrecv", "Exchange", "Allreduce", "Reduce", "Reduce_scatter", "Allgather", "Allgatherv", "Alltoall", "Bcast", "Barrier")
343 
344 specify_module = Simple
345 analyze_module = IMB
346 simple_pass:tests = IMB-MPI1
347 
348 #----------------------------------------------------------------------
349 
350 [Test run: netpipe]
351 test_build = netpipe
352 pass = &eq(&test_wexitstatus(), 0)
353 timeout = -1
354 save_stdout_on_pass = 1
355 # Ensure to leave this value as "-1", or performance results could be lost!
356 stdout_save_lines = -1
357 merge_stdout_stderr = 1
358 np = 2
359 
360 specify_module = Simple
361 analyze_module = NetPipe
362 simple_pass:tests = NPmpi
363 
364 #======================================================================
365 # Reporter phase
366 #======================================================================
367 
368 [Reporter: text output]
369 module = TextFile
370 
371 textfile_filename = $phase-$section-$mpi_name-$mpi_version.txt
372 
373 textfile_summary_header = <<EOT
374 hostname: &shell("hostname")
375 uname: &shell("uname -a")
376 who am i: &shell("who am i")
377 EOT
378 
379 textfile_summary_footer =
380 textfile_detail_header =
381 textfile_detail_footer =
382 
383 textfile_textwrap = 78