ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/mdtools/mpi_implementation/mpiSimulation_module.F90
Revision: 282
Committed: Mon Feb 24 21:26:54 2003 UTC (21 years, 4 months ago) by chuckv
File size: 21091 byte(s)
Log Message:
Changes...

File Contents

# Content
1 !! MPI support for long range forces using force decomposition
2 !! on a square grid of processors.
3 !! Corresponds to mpiSimunation.cpp for C++
4 !! mpi_module also contains a private interface for mpi f90 routines.
5 !!
6 !! @author Charles F. Vardeman II
7 !! @author Matthew Meineke
8 !! @version $Id: mpiSimulation_module.F90,v 1.8 2003-02-24 21:26:54 chuckv Exp $, $Date: 2003-02-24 21:26:54 $, $Name: not supported by cvs2svn $, $Revision: 1.8 $
9
10
11
12
13 module mpiSimulation
14 use definitions
15 use mpi
16 implicit none
17 PRIVATE
18
19
20 !! PUBLIC Subroutines contained in this module
21 !! gather and scatter are a generic interface
22 !! to gather and scatter routines
23 public :: gather, scatter
24 public :: setupSimParallel
25 public :: replanSimParallel
26 public :: getNcol
27 public :: getNrow
28 public :: isMPISimSet
29 public :: printComponentPlan
30 public :: getMyNode
31
32 !! PUBLIC Subroutines contained in MPI module
33 public :: mpi_bcast
34 public :: mpi_allreduce
35 public :: mpi_reduce
36 public :: mpi_send
37 public :: mpi_recv
38 public :: mpi_get_processor_name
39 public :: mpi_finalize
40
41 !! PUBLIC mpi variables
42 public :: mpi_comm_world
43 public :: mpi_character
44 public :: mpi_integer
45 public :: mpi_double_precision
46 public :: mpi_sum
47 public :: mpi_max
48 public :: mpi_status_size
49 public :: mpi_any_source
50
51 !! Safety logical to prevent access to ComponetPlan until
52 !! set by C++.
53 logical :: ComponentPlanSet = .false.
54
55
56 !! generic mpi error declaration.
57 integer,public :: mpi_err
58
59
60
61 !! Include mpiComponentPlan type. mpiComponentPlan is a
62 !! dual header file for both c and fortran.
63 #define __FORTRAN90
64 #include "mpiComponentPlan.h"
65
66
67
68 !! Tags used during force loop for parallel simulation
69 integer, allocatable, dimension(:) :: tagLocal
70 integer, public, allocatable, dimension(:) :: tagRow
71 integer ,public, allocatable, dimension(:) :: tagColumn
72
73 !! Logical set true if mpiSimulation has been initialized
74 logical :: isSimSet = .false.
75
76
77 type (mpiComponentPlan) :: mpiSim
78
79 !! gs_plan contains plans for gather and scatter routines
80 type, public :: gs_plan
81 private
82 type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
83 integer :: gsPlanSize !! size of this plan (nDim*nComponents)
84 integer :: globalPlanSize !! size of all components in plan
85 integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
86 integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
87 integer :: myPlanComm !! My communicator for this plan
88 integer :: myPlanRank !! My rank in this plan
89 integer :: planNprocs !! Number of processors in this plan
90 end type gs_plan
91
92 ! plans for different decompositions
93 type (gs_plan), public :: plan_row
94 type (gs_plan), public :: plan_row3d
95 type (gs_plan), public :: plan_col
96 type (gs_plan), public :: plan_col3d
97 type(gs_plan), public :: plan_row_Rotation
98 type(gs_plan), public :: plan_col_Rotation
99
100 type (mpiComponentPlan), pointer :: simComponentPlan
101
102 ! interface for gather scatter routines
103 !! Generic interface for gather.
104 !! Gathers an local array into row or column array
105 !! Interface provided for integer, double and double
106 !! rank 2 arrays.
107 interface gather
108 module procedure gather_integer
109 module procedure gather_double
110 module procedure gather_double_2d
111 end interface
112
113 !! Generic interface for scatter.
114 !! Scatters a row or column array, adding componets
115 !! and reducing them to a local nComponent array.
116 !! Interface provided for double and double rank=2 arrays.
117 interface scatter
118 module procedure scatter_double
119 module procedure scatter_double_2d
120 end interface
121
122
123
124 contains
125
126 !! Sets up mpiComponentPlan with structure passed from C++.
127 subroutine setupSimParallel(thisComponentPlan,ntags,tags,status)
128 ! Passed Arguments
129 ! integer, intent(inout) :: nDim !! Number of dimensions
130 !! mpiComponentPlan struct from C
131 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
132 !! Number of tags passed, nlocal
133 integer, intent(in) :: ntags
134 !! Result status, 0 = normal, -1 = error
135 integer, intent(out) :: status
136 integer :: localStatus
137 !! Global reference tag for local particles
138 integer, dimension(ntags),intent(inout) :: tags
139
140
141 status = 0
142 if (componentPlanSet) then
143 return
144 endif
145 componentPlanSet = .true.
146
147 !! copy c component plan to fortran
148 mpiSim = thisComponentPlan
149 write(*,*) "Seting up simParallel"
150
151 call make_Force_Grid(mpiSim,localStatus)
152 if (localStatus /= 0) then
153 write(default_error,*) "Error creating force grid"
154 status = -1
155 return
156 endif
157
158 call updateGridComponents(mpiSim,localStatus)
159 if (localStatus /= 0) then
160 write(default_error,*) "Error updating grid components"
161 status = -1
162 return
163 endif
164
165
166 !! initialize gather and scatter plans used in this simulation
167 call plan_gather_scatter(1,mpiSim%myNlocal,&
168 mpiSim,mpiSim%rowComm,plan_row)
169 call plan_gather_scatter(nDim,mpiSim%myNlocal,&
170 mpiSim,mpiSim%rowComm,plan_row3d)
171 call plan_gather_scatter(9,mpiSim%myNlocal,&
172 mpiSim,mpiSim%rowComm,plan_row_Rotation)
173 call plan_gather_scatter(1,mpiSim%myNlocal,&
174 mpiSim,mpiSim%columnComm,plan_col)
175 call plan_gather_scatter(nDim,mpiSim%myNlocal,&
176 mpiSim,mpiSim%columnComm,plan_col3d)
177 call plan_gather_scatter(9,mpiSim%myNlocal,&
178 mpiSim,mpiSim%columnComm,plan_col_Rotation)
179
180
181
182 ! Initialize tags
183 call setTags(tags,localStatus)
184 if (localStatus /= 0) then
185 status = -1
186 return
187 endif
188 isSimSet = .true.
189
190 ! call printComponentPlan(mpiSim,0)
191 end subroutine setupSimParallel
192
193 subroutine replanSimParallel(thisComponentPlan,status)
194 ! Passed Arguments
195 !! mpiComponentPlan struct from C
196 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
197 integer, intent(out) :: status
198 integer :: localStatus
199 integer :: mpierror
200 status = 0
201
202 call updateGridComponents(thisComponentPlan,localStatus)
203 if (localStatus /= 0) then
204 status = -1
205 return
206 endif
207
208 !! Unplan Gather Scatter plans
209 call unplan_gather_scatter(plan_row)
210 call unplan_gather_scatter(plan_row3d)
211 call unplan_gather_scatter(plan_row_Rotation)
212 call unplan_gather_scatter(plan_col)
213 call unplan_gather_scatter(plan_col3d)
214 call unplan_gather_scatter(plan_col_Rotation)
215
216 !! initialize gather and scatter plans used in this simulation
217 call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
218 thisComponentPlan,thisComponentPlan%rowComm,plan_row)
219 call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
220 thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
221 call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
222 thisComponentPlan,thisComponentPlan%rowComm,plan_row_Rotation)
223 call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
224 thisComponentPlan,thisComponentPlan%columnComm,plan_col)
225 call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
226 thisComponentPlan,thisComponentPlan%rowComm,plan_col3d)
227 call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
228 thisComponentPlan,thisComponentPlan%rowComm,plan_col_Rotation)
229
230
231
232 end subroutine replanSimParallel
233
234 !! Updates number of row and column components for long range forces.
235 subroutine updateGridComponents(thisComponentPlan,status)
236 type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
237
238 !! Status return
239 !! - 0 Success
240 !! - -1 Failure
241 integer, intent(out) :: status
242 integer :: nComponentsLocal
243 integer :: nComponentsRow = 0
244 integer :: nComponentsColumn = 0
245 integer :: mpiErrors
246
247 status = 0
248 if (.not. componentPlanSet) return
249
250 if (thisComponentPlan%myNlocal == 0 ) then
251 status = -1
252 return
253 endif
254
255 nComponentsLocal = thisComponentPlan%myNlocal
256
257 call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
258 mpi_sum,thisComponentPlan%rowComm,mpiErrors)
259 if (mpiErrors /= 0) then
260 status = -1
261 return
262 endif
263
264 call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
265 mpi_sum,thisComponentPlan%columnComm,mpiErrors)
266 if (mpiErrors /= 0) then
267 status = -1
268 return
269 endif
270
271 thisComponentPlan%nComponentsRow = nComponentsRow
272 thisComponentPlan%nComponentsColumn = nComponentsColumn
273
274
275 end subroutine updateGridComponents
276
277
278 !! Creates a square force decomposition of processors into row and column
279 !! communicators.
280 subroutine make_Force_Grid(thisComponentPlan,status)
281 type (mpiComponentPlan) :: thisComponentPlan
282 integer, intent(out) :: status !! status returns -1 if error
283 integer :: nColumnsMax !! Maximum number of columns
284 integer :: nWorldProcessors !! Total number of processors in World comm.
285 integer :: rowIndex !! Row for this processor.
286 integer :: columnIndex !! Column for this processor.
287 integer :: nRows !! Total number of rows.
288 integer :: nColumns !! Total number of columns.
289 integer :: mpiErrors !! MPI errors.
290 integer :: rowCommunicator !! MPI row communicator.
291 integer :: columnCommunicator
292 integer :: myWorldRank
293 integer :: i
294
295
296 if (.not. ComponentPlanSet) return
297 status = 0
298
299 !! We make a dangerous assumption here that if numberProcessors is
300 !! zero, then we need to get the information from MPI.
301 if (thisComponentPlan%numberProcessors == 0 ) then
302 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
303 if ( mpiErrors /= 0 ) then
304 status = -1
305 return
306 endif
307 call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
308 if ( mpiErrors /= 0 ) then
309 status = -1
310 return
311 endif
312
313 else
314 nWorldProcessors = thisComponentPlan%numberProcessors
315 myWorldRank = thisComponentPlan%myNode
316 endif
317
318
319
320
321 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
322
323 do i = 1, nColumnsMax
324 if (mod(nWorldProcessors,i) == 0) nColumns = i
325 end do
326
327 nRows = nWorldProcessors/nColumns
328
329 rowIndex = myWorldRank/nColumns
330
331
332
333 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
334 if ( mpiErrors /= 0 ) then
335 write(default_error,*) "MPI comm split failed at row communicator"
336 status = -1
337 return
338 endif
339
340 columnIndex = mod(myWorldRank,nColumns)
341 call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
342 if ( mpiErrors /= 0 ) then
343 write(default_error,*) "MPI comm split faild at columnCommunicator"
344 status = -1
345 return
346 endif
347
348 ! Set appropriate components of thisComponentPlan
349 thisComponentPlan%rowComm = rowCommunicator
350 thisComponentPlan%columnComm = columnCommunicator
351 thisComponentPlan%rowIndex = rowIndex
352 thisComponentPlan%columnIndex = columnIndex
353 thisComponentPlan%numberRows = nRows
354 thisComponentPlan%numberColumns = nColumns
355
356
357 end subroutine make_Force_Grid
358
359
360 !! initalizes a gather scatter plan
361 subroutine plan_gather_scatter( nDim,nComponents,thisComponentPlan, &
362 thisComm, this_plan,status)
363 integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
364 integer, intent(in) :: nComponents
365 type (mpiComponentPlan), intent(in), target :: thisComponentPlan
366 type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
367 integer, intent(in) :: thisComm !! MPI communicator for this plan
368
369 integer :: arraySize !! size to allocate plan for
370 integer, intent(out), optional :: status
371 integer :: ierror
372 integer :: i,junk
373
374 if (present(status)) status = 0
375
376
377
378 !! Set gsComponetPlan pointer
379 !! to the componet plan we want to use for this gather scatter plan.
380 !! WARNING this could be dangerous since thisComponentPlan was origionally
381 !! allocated in C++ and there is a significant difference between c and
382 !! f95 pointers....
383 this_plan%gsComponentPlan => thisComponentPlan
384
385 ! Set this plan size for displs array.
386 this_plan%gsPlanSize = nDim * nComponents
387
388 ! Duplicate communicator for this plan
389 call mpi_comm_dup(thisComm,this_plan%myPlanComm,mpi_err)
390 if (mpi_err /= 0) then
391 if (present(status)) status = -1
392 return
393 end if
394 call mpi_comm_rank(this_plan%myPlanComm,this_plan%myPlanRank,mpi_err)
395 if (mpi_err /= 0) then
396 if (present(status)) status = -1
397 return
398 end if
399
400 call mpi_comm_size(this_plan%myPlanComm,this_plan%planNprocs,mpi_err)
401
402 if (mpi_err /= 0) then
403 if (present(status)) status = -1
404 return
405 end if
406
407 !! counts and displacements arrays are indexed from 0 to be compatable
408 !! with MPI arrays.
409 allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
410 if (ierror /= 0) then
411 if (present(status)) status = -1
412 return
413 end if
414
415 allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
416 if (ierror /= 0) then
417 if (present(status)) status = -1
418 return
419 end if
420
421 !! gather all the local sizes into a size # processors array.
422 call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
423 1,mpi_integer,thisComm,mpi_err)
424
425 if (mpi_err /= 0) then
426 if (present(status)) status = -1
427 return
428 end if
429
430
431 !! figure out the total number of particles in this plan
432 this_plan%globalPlanSize = sum(this_plan%counts)
433
434
435 !! initialize plan displacements.
436 this_plan%displs(0) = 0
437 do i = 1, this_plan%planNprocs - 1,1
438 this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
439 end do
440
441
442 end subroutine plan_gather_scatter
443
444
445 subroutine unplan_gather_scatter(this_plan)
446 type (gs_plan), intent(inout) :: this_plan
447
448
449 this_plan%gsComponentPlan => null()
450 call mpi_comm_free(this_plan%myPlanComm,mpi_err)
451
452 deallocate(this_plan%counts)
453 deallocate(this_plan%displs)
454
455 end subroutine unplan_gather_scatter
456
457 subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
458
459 type (gs_plan), intent(in) :: this_plan
460 integer, dimension(:), intent(in) :: sbuffer
461 integer, dimension(:), intent(in) :: rbuffer
462 integer :: noffset
463 integer, intent(out), optional :: status
464 integer :: i
465
466
467
468 if (present(status)) status = 0
469 noffset = this_plan%displs(this_plan%myPlanRank)
470
471 ! if (getmyNode() == 1) then
472 ! write(*,*) "Node 0 printing allgatherv vars"
473 ! write(*,*) "Noffset: ", noffset
474 ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
475 ! write(*,*) "PlanComm: ", this_plan%myPlanComm
476 ! end if
477
478 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
479 rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
480 this_plan%myPlanComm, mpi_err)
481
482 if (mpi_err /= 0) then
483 if (present(status)) status = -1
484 endif
485
486 end subroutine gather_integer
487
488 subroutine gather_double( sbuffer, rbuffer, this_plan, status)
489
490 type (gs_plan), intent(in) :: this_plan
491 real( kind = DP ), dimension(:), intent(in) :: sbuffer
492 real( kind = DP ), dimension(:), intent(in) :: rbuffer
493 integer :: noffset
494 integer, intent(out), optional :: status
495
496
497 if (present(status)) status = 0
498 noffset = this_plan%displs(this_plan%myPlanRank)
499
500 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
501 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
502 this_plan%myPlanComm, mpi_err)
503
504 if (mpi_err /= 0) then
505 if (present(status)) status = -1
506 endif
507
508 end subroutine gather_double
509
510 subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
511
512 type (gs_plan), intent(in) :: this_plan
513 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
514 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
515 integer :: noffset,i,ierror
516 integer, intent(out), optional :: status
517
518 external mpi_allgatherv
519
520 if (present(status)) status = 0
521
522 ! noffset = this_plan%displs(this_plan%me)
523
524 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
525 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
526 this_plan%myPlanComm, mpi_err)
527
528 if (mpi_err /= 0) then
529 if (present(status)) status = -1
530 endif
531
532 end subroutine gather_double_2d
533
534 subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
535
536 type (gs_plan), intent(in) :: this_plan
537 real( kind = DP ), dimension(:), intent(in) :: sbuffer
538 real( kind = DP ), dimension(:), intent(in) :: rbuffer
539 integer, intent(out), optional :: status
540 external mpi_reduce_scatter
541
542 if (present(status)) status = 0
543
544 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
545 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
546
547 if (mpi_err /= 0) then
548 if (present(status)) status = -1
549 endif
550
551 end subroutine scatter_double
552
553 subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
554
555 type (gs_plan), intent(in) :: this_plan
556 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
557 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
558 integer, intent(out), optional :: status
559 external mpi_reduce_scatter
560
561 if (present(status)) status = 0
562 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
563 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
564
565 if (mpi_err /= 0) then
566 if (present(status)) status = -1
567 endif
568
569 end subroutine scatter_double_2d
570
571
572 subroutine setTags(tags,status)
573 integer, dimension(:) :: tags
574 integer :: status
575
576 integer :: alloc_stat
577
578 integer :: ncol
579 integer :: nrow
580
581 status = 0
582 ! allocate row arrays
583 nrow = getNrow(plan_row)
584 ncol = getNcol(plan_col)
585
586 if (.not. allocated(tagRow)) then
587 allocate(tagRow(nrow),STAT=alloc_stat)
588 if (alloc_stat /= 0 ) then
589 status = -1
590 return
591 endif
592 else
593 deallocate(tagRow)
594 allocate(tagRow(nrow),STAT=alloc_stat)
595 if (alloc_stat /= 0 ) then
596 status = -1
597 return
598 endif
599
600 endif
601 ! allocate column arrays
602 if (.not. allocated(tagColumn)) then
603 allocate(tagColumn(ncol),STAT=alloc_stat)
604 if (alloc_stat /= 0 ) then
605 status = -1
606 return
607 endif
608 else
609 deallocate(tagColumn)
610 allocate(tagColumn(ncol),STAT=alloc_stat)
611 if (alloc_stat /= 0 ) then
612 status = -1
613 return
614 endif
615 endif
616
617 call gather(tags,tagRow,plan_row)
618 call gather(tags,tagColumn,plan_col)
619
620
621 end subroutine setTags
622
623 pure function getNcol(thisplan) result(ncol)
624 type (gs_plan), intent(in) :: thisplan
625 integer :: ncol
626 ncol = thisplan%gsComponentPlan%nComponentsColumn
627 end function getNcol
628
629 pure function getNrow(thisplan) result(ncol)
630 type (gs_plan), intent(in) :: thisplan
631 integer :: ncol
632 ncol = thisplan%gsComponentPlan%nComponentsrow
633 end function getNrow
634
635 function isMPISimSet() result(isthisSimSet)
636 logical :: isthisSimSet
637 if (isSimSet) then
638 isthisSimSet = .true.
639 else
640 isthisSimSet = .false.
641 endif
642 end function isMPISimSet
643
644
645
646 subroutine printComponentPlan(this_plan,printNode)
647
648 type (mpiComponentPlan), intent(in) :: this_plan
649 integer, optional :: printNode
650 logical :: print_me = .false.
651
652 if (present(printNode)) then
653 if (printNode == mpiSim%myNode) print_me = .true.
654 else
655 print_me = .true.
656 endif
657
658 if (print_me) then
659 write(default_error,*) "SetupSimParallel: writing component plan"
660
661 write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
662 write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
663 write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
664 write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
665 write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
666 write(default_error,*) "myMolStart: ", mpiSim%myMolStart
667 write(default_error,*) "myMolEnd: ", mpiSim%myMolEnd
668 write(default_error,*) "myMol: ", mpiSim%myMol
669 write(default_error,*) "myNlocal: ", mpiSim%myNlocal
670 write(default_error,*) "myNode: ", mpiSim%myNode
671 write(default_error,*) "numberProcessors: ", mpiSim%numberProcessors
672 write(default_error,*) "rowComm: ", mpiSim%rowComm
673 write(default_error,*) "columnComm: ", mpiSim%columnComm
674 write(default_error,*) "numberRows: ", mpiSim%numberRows
675 write(default_error,*) "numberColumns: ", mpiSim%numberColumns
676 write(default_error,*) "nComponentsRow: ", mpiSim%nComponentsRow
677 write(default_error,*) "nComponentsColumn: ", mpiSim%nComponentsColumn
678 write(default_error,*) "rowIndex: ", mpiSim%rowIndex
679 write(default_error,*) "columnIndex: ", mpiSim%columnIndex
680 endif
681 end subroutine printComponentPlan
682
683 function getMyNode() result(myNode)
684 integer :: myNode
685 myNode = mpiSim%myNode
686 end function getMyNode
687
688
689 end module mpiSimulation
690