ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE_old/src/mdtools/libmdCode/mpiSimulation_module.F90
Revision: 332
Committed: Thu Mar 13 15:28:43 2003 UTC (21 years, 4 months ago) by gezelter
File size: 21089 byte(s)
Log Message:
initialization routines, bug fixes, exclude lists

File Contents

# Content
1 !! MPI support for long range forces using force decomposition
2 !! on a square grid of processors.
3 !! Corresponds to mpiSimunation.cpp for C++
4 !! mpi_module also contains a private interface for mpi f90 routines.
5 !!
6 !! @author Charles F. Vardeman II
7 !! @author Matthew Meineke
8 !! @version $Id: mpiSimulation_module.F90,v 1.4 2003-03-13 15:28:43 gezelter Exp $, $Date: 2003-03-13 15:28:43 $, $Name: not supported by cvs2svn $, $Revision: 1.4 $
9
10 module mpiSimulation
11 use definitions
12 use mpi
13 implicit none
14 PRIVATE
15
16
17 !! PUBLIC Subroutines contained in this module
18 !! gather and scatter are a generic interface
19 !! to gather and scatter routines
20 public :: gather, scatter
21 public :: setupSimParallel
22 public :: replanSimParallel
23 public :: getNcol
24 public :: getNrow
25 public :: isMPISimSet
26 public :: printComponentPlan
27 public :: getMyNode
28
29 !! PUBLIC Subroutines contained in MPI module
30 public :: mpi_bcast
31 public :: mpi_allreduce
32 public :: mpi_reduce
33 public :: mpi_send
34 public :: mpi_recv
35 public :: mpi_get_processor_name
36 public :: mpi_finalize
37
38 !! PUBLIC mpi variables
39 public :: mpi_comm_world
40 public :: mpi_character
41 public :: mpi_integer
42 public :: mpi_double_precision
43 public :: mpi_sum
44 public :: mpi_max
45 public :: mpi_status_size
46 public :: mpi_any_source
47
48 !! Safety logical to prevent access to ComponetPlan until
49 !! set by C++.
50 logical :: ComponentPlanSet = .false.
51
52
53 !! generic mpi error declaration.
54 integer,public :: mpi_err
55
56
57
58 !! Include mpiComponentPlan type. mpiComponentPlan is a
59 !! dual header file for both c and fortran.
60 #define __FORTRAN90
61 #include "mpiComponentPlan.h"
62
63
64
65 !! Tags used during force loop for parallel simulation
66 integer, allocatable, dimension(:) :: tagLocal
67 integer, public, allocatable, dimension(:) :: tagRow
68 integer, public, allocatable, dimension(:) :: tagColumn
69
70 !! Logical set true if mpiSimulation has been initialized
71 logical :: isSimSet = .false.
72
73
74 type (mpiComponentPlan) :: mpiSim
75
76 !! gs_plan contains plans for gather and scatter routines
77 type, public :: gs_plan
78 private
79 type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
80 integer :: gsPlanSize !! size of this plan (nDim*nComponents)
81 integer :: globalPlanSize !! size of all components in plan
82 integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
83 integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
84 integer :: myPlanComm !! My communicator for this plan
85 integer :: myPlanRank !! My rank in this plan
86 integer :: planNprocs !! Number of processors in this plan
87 end type gs_plan
88
89 ! plans for different decompositions
90 type (gs_plan), public :: plan_row
91 type (gs_plan), public :: plan_row3d
92 type (gs_plan), public :: plan_col
93 type (gs_plan), public :: plan_col3d
94 type(gs_plan), public :: plan_row_Rotation
95 type(gs_plan), public :: plan_col_Rotation
96
97 type (mpiComponentPlan), pointer :: simComponentPlan
98
99 ! interface for gather scatter routines
100 !! Generic interface for gather.
101 !! Gathers an local array into row or column array
102 !! Interface provided for integer, double and double
103 !! rank 2 arrays.
104 interface gather
105 module procedure gather_integer
106 module procedure gather_double
107 module procedure gather_double_2d
108 end interface
109
110 !! Generic interface for scatter.
111 !! Scatters a row or column array, adding componets
112 !! and reducing them to a local nComponent array.
113 !! Interface provided for double and double rank=2 arrays.
114 interface scatter
115 module procedure scatter_double
116 module procedure scatter_double_2d
117 end interface
118
119
120
121 contains
122
123 !! Sets up mpiComponentPlan with structure passed from C++.
124 subroutine setupSimParallel(thisComponentPlan,ntags,tags,status)
125 ! Passed Arguments
126 ! integer, intent(inout) :: nDim !! Number of dimensions
127 !! mpiComponentPlan struct from C
128 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
129 !! Number of tags passed, nlocal
130 integer, intent(in) :: ntags
131 !! Result status, 0 = normal, -1 = error
132 integer, intent(out) :: status
133 integer :: localStatus
134 !! Global reference tag for local particles
135 integer, dimension(ntags),intent(inout) :: tags
136
137
138 status = 0
139 if (componentPlanSet) then
140 return
141 endif
142 componentPlanSet = .true.
143
144 !! copy c component plan to fortran
145 mpiSim = thisComponentPlan
146 write(*,*) "Seting up simParallel"
147
148 call make_Force_Grid(mpiSim,localStatus)
149 if (localStatus /= 0) then
150 write(default_error,*) "Error creating force grid"
151 status = -1
152 return
153 endif
154
155 call updateGridComponents(mpiSim,localStatus)
156 if (localStatus /= 0) then
157 write(default_error,*) "Error updating grid components"
158 status = -1
159 return
160 endif
161
162
163 !! initialize gather and scatter plans used in this simulation
164 call plan_gather_scatter(1,mpiSim%myNlocal,&
165 mpiSim,mpiSim%rowComm,plan_row)
166 call plan_gather_scatter(nDim,mpiSim%myNlocal,&
167 mpiSim,mpiSim%rowComm,plan_row3d)
168 call plan_gather_scatter(9,mpiSim%myNlocal,&
169 mpiSim,mpiSim%rowComm,plan_row_Rotation)
170 call plan_gather_scatter(1,mpiSim%myNlocal,&
171 mpiSim,mpiSim%columnComm,plan_col)
172 call plan_gather_scatter(nDim,mpiSim%myNlocal,&
173 mpiSim,mpiSim%columnComm,plan_col3d)
174 call plan_gather_scatter(9,mpiSim%myNlocal,&
175 mpiSim,mpiSim%columnComm,plan_col_Rotation)
176
177
178
179 ! Initialize tags
180 call setTags(tags,localStatus)
181 if (localStatus /= 0) then
182 status = -1
183 return
184 endif
185 isSimSet = .true.
186
187 ! call printComponentPlan(mpiSim,0)
188 end subroutine setupSimParallel
189
190 subroutine replanSimParallel(thisComponentPlan,status)
191 ! Passed Arguments
192 !! mpiComponentPlan struct from C
193 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
194 integer, intent(out) :: status
195 integer :: localStatus
196 integer :: mpierror
197 status = 0
198
199 call updateGridComponents(thisComponentPlan,localStatus)
200 if (localStatus /= 0) then
201 status = -1
202 return
203 endif
204
205 !! Unplan Gather Scatter plans
206 call unplan_gather_scatter(plan_row)
207 call unplan_gather_scatter(plan_row3d)
208 call unplan_gather_scatter(plan_row_Rotation)
209 call unplan_gather_scatter(plan_col)
210 call unplan_gather_scatter(plan_col3d)
211 call unplan_gather_scatter(plan_col_Rotation)
212
213 !! initialize gather and scatter plans used in this simulation
214 call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
215 thisComponentPlan,thisComponentPlan%rowComm,plan_row)
216 call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
217 thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
218 call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
219 thisComponentPlan,thisComponentPlan%rowComm,plan_row_Rotation)
220 call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
221 thisComponentPlan,thisComponentPlan%columnComm,plan_col)
222 call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
223 thisComponentPlan,thisComponentPlan%rowComm,plan_col3d)
224 call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
225 thisComponentPlan,thisComponentPlan%rowComm,plan_col_Rotation)
226
227
228
229 end subroutine replanSimParallel
230
231 !! Updates number of row and column components for long range forces.
232 subroutine updateGridComponents(thisComponentPlan,status)
233 type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
234
235 !! Status return
236 !! - 0 Success
237 !! - -1 Failure
238 integer, intent(out) :: status
239 integer :: nComponentsLocal
240 integer :: nComponentsRow = 0
241 integer :: nComponentsColumn = 0
242 integer :: mpiErrors
243
244 status = 0
245 if (.not. componentPlanSet) return
246
247 if (thisComponentPlan%myNlocal == 0 ) then
248 status = -1
249 return
250 endif
251
252 nComponentsLocal = thisComponentPlan%myNlocal
253
254 call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
255 mpi_sum,thisComponentPlan%rowComm,mpiErrors)
256 if (mpiErrors /= 0) then
257 status = -1
258 return
259 endif
260
261 call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
262 mpi_sum,thisComponentPlan%columnComm,mpiErrors)
263 if (mpiErrors /= 0) then
264 status = -1
265 return
266 endif
267
268 thisComponentPlan%nComponentsRow = nComponentsRow
269 thisComponentPlan%nComponentsColumn = nComponentsColumn
270
271
272 end subroutine updateGridComponents
273
274
275 !! Creates a square force decomposition of processors into row and column
276 !! communicators.
277 subroutine make_Force_Grid(thisComponentPlan,status)
278 type (mpiComponentPlan) :: thisComponentPlan
279 integer, intent(out) :: status !! status returns -1 if error
280 integer :: nColumnsMax !! Maximum number of columns
281 integer :: nWorldProcessors !! Total number of processors in World comm.
282 integer :: rowIndex !! Row for this processor.
283 integer :: columnIndex !! Column for this processor.
284 integer :: nRows !! Total number of rows.
285 integer :: nColumns !! Total number of columns.
286 integer :: mpiErrors !! MPI errors.
287 integer :: rowCommunicator !! MPI row communicator.
288 integer :: columnCommunicator
289 integer :: myWorldRank
290 integer :: i
291
292
293 if (.not. ComponentPlanSet) return
294 status = 0
295
296 !! We make a dangerous assumption here that if numberProcessors is
297 !! zero, then we need to get the information from MPI.
298 if (thisComponentPlan%numberProcessors == 0 ) then
299 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
300 if ( mpiErrors /= 0 ) then
301 status = -1
302 return
303 endif
304 call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
305 if ( mpiErrors /= 0 ) then
306 status = -1
307 return
308 endif
309
310 else
311 nWorldProcessors = thisComponentPlan%numberProcessors
312 myWorldRank = thisComponentPlan%myNode
313 endif
314
315
316
317
318 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
319
320 do i = 1, nColumnsMax
321 if (mod(nWorldProcessors,i) == 0) nColumns = i
322 end do
323
324 nRows = nWorldProcessors/nColumns
325
326 rowIndex = myWorldRank/nColumns
327
328
329
330 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
331 if ( mpiErrors /= 0 ) then
332 write(default_error,*) "MPI comm split failed at row communicator"
333 status = -1
334 return
335 endif
336
337 columnIndex = mod(myWorldRank,nColumns)
338 call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
339 if ( mpiErrors /= 0 ) then
340 write(default_error,*) "MPI comm split faild at columnCommunicator"
341 status = -1
342 return
343 endif
344
345 ! Set appropriate components of thisComponentPlan
346 thisComponentPlan%rowComm = rowCommunicator
347 thisComponentPlan%columnComm = columnCommunicator
348 thisComponentPlan%rowIndex = rowIndex
349 thisComponentPlan%columnIndex = columnIndex
350 thisComponentPlan%numberRows = nRows
351 thisComponentPlan%numberColumns = nColumns
352
353
354 end subroutine make_Force_Grid
355
356
357 !! initalizes a gather scatter plan
358 subroutine plan_gather_scatter( nDim,nComponents,thisComponentPlan, &
359 thisComm, this_plan,status)
360 integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
361 integer, intent(in) :: nComponents
362 type (mpiComponentPlan), intent(in), target :: thisComponentPlan
363 type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
364 integer, intent(in) :: thisComm !! MPI communicator for this plan
365
366 integer :: arraySize !! size to allocate plan for
367 integer, intent(out), optional :: status
368 integer :: ierror
369 integer :: i,junk
370
371 if (present(status)) status = 0
372
373
374
375 !! Set gsComponetPlan pointer
376 !! to the componet plan we want to use for this gather scatter plan.
377 !! WARNING this could be dangerous since thisComponentPlan was origionally
378 !! allocated in C++ and there is a significant difference between c and
379 !! f95 pointers....
380 this_plan%gsComponentPlan => thisComponentPlan
381
382 ! Set this plan size for displs array.
383 this_plan%gsPlanSize = nDim * nComponents
384
385 ! Duplicate communicator for this plan
386 call mpi_comm_dup(thisComm,this_plan%myPlanComm,mpi_err)
387 if (mpi_err /= 0) then
388 if (present(status)) status = -1
389 return
390 end if
391 call mpi_comm_rank(this_plan%myPlanComm,this_plan%myPlanRank,mpi_err)
392 if (mpi_err /= 0) then
393 if (present(status)) status = -1
394 return
395 end if
396
397 call mpi_comm_size(this_plan%myPlanComm,this_plan%planNprocs,mpi_err)
398
399 if (mpi_err /= 0) then
400 if (present(status)) status = -1
401 return
402 end if
403
404 !! counts and displacements arrays are indexed from 0 to be compatable
405 !! with MPI arrays.
406 allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
407 if (ierror /= 0) then
408 if (present(status)) status = -1
409 return
410 end if
411
412 allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
413 if (ierror /= 0) then
414 if (present(status)) status = -1
415 return
416 end if
417
418 !! gather all the local sizes into a size # processors array.
419 call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
420 1,mpi_integer,thisComm,mpi_err)
421
422 if (mpi_err /= 0) then
423 if (present(status)) status = -1
424 return
425 end if
426
427
428 !! figure out the total number of particles in this plan
429 this_plan%globalPlanSize = sum(this_plan%counts)
430
431
432 !! initialize plan displacements.
433 this_plan%displs(0) = 0
434 do i = 1, this_plan%planNprocs - 1,1
435 this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
436 end do
437
438
439 end subroutine plan_gather_scatter
440
441
442 subroutine unplan_gather_scatter(this_plan)
443 type (gs_plan), intent(inout) :: this_plan
444
445
446 this_plan%gsComponentPlan => null()
447 call mpi_comm_free(this_plan%myPlanComm,mpi_err)
448
449 deallocate(this_plan%counts)
450 deallocate(this_plan%displs)
451
452 end subroutine unplan_gather_scatter
453
454 subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
455
456 type (gs_plan), intent(in) :: this_plan
457 integer, dimension(:), intent(in) :: sbuffer
458 integer, dimension(:), intent(in) :: rbuffer
459 integer :: noffset
460 integer, intent(out), optional :: status
461 integer :: i
462
463
464
465 if (present(status)) status = 0
466 noffset = this_plan%displs(this_plan%myPlanRank)
467
468 ! if (getmyNode() == 1) then
469 ! write(*,*) "Node 0 printing allgatherv vars"
470 ! write(*,*) "Noffset: ", noffset
471 ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
472 ! write(*,*) "PlanComm: ", this_plan%myPlanComm
473 ! end if
474
475 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
476 rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
477 this_plan%myPlanComm, mpi_err)
478
479 if (mpi_err /= 0) then
480 if (present(status)) status = -1
481 endif
482
483 end subroutine gather_integer
484
485 subroutine gather_double( sbuffer, rbuffer, this_plan, status)
486
487 type (gs_plan), intent(in) :: this_plan
488 real( kind = DP ), dimension(:), intent(in) :: sbuffer
489 real( kind = DP ), dimension(:), intent(in) :: rbuffer
490 integer :: noffset
491 integer, intent(out), optional :: status
492
493
494 if (present(status)) status = 0
495 noffset = this_plan%displs(this_plan%myPlanRank)
496
497 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
498 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
499 this_plan%myPlanComm, mpi_err)
500
501 if (mpi_err /= 0) then
502 if (present(status)) status = -1
503 endif
504
505 end subroutine gather_double
506
507 subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
508
509 type (gs_plan), intent(in) :: this_plan
510 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
511 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
512 integer :: noffset,i,ierror
513 integer, intent(out), optional :: status
514
515 external mpi_allgatherv
516
517 if (present(status)) status = 0
518
519 ! noffset = this_plan%displs(this_plan%me)
520
521 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
522 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
523 this_plan%myPlanComm, mpi_err)
524
525 if (mpi_err /= 0) then
526 if (present(status)) status = -1
527 endif
528
529 end subroutine gather_double_2d
530
531 subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
532
533 type (gs_plan), intent(in) :: this_plan
534 real( kind = DP ), dimension(:), intent(in) :: sbuffer
535 real( kind = DP ), dimension(:), intent(in) :: rbuffer
536 integer, intent(out), optional :: status
537 external mpi_reduce_scatter
538
539 if (present(status)) status = 0
540
541 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
542 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
543
544 if (mpi_err /= 0) then
545 if (present(status)) status = -1
546 endif
547
548 end subroutine scatter_double
549
550 subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
551
552 type (gs_plan), intent(in) :: this_plan
553 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
554 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
555 integer, intent(out), optional :: status
556 external mpi_reduce_scatter
557
558 if (present(status)) status = 0
559 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
560 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
561
562 if (mpi_err /= 0) then
563 if (present(status)) status = -1
564 endif
565
566 end subroutine scatter_double_2d
567
568
569 subroutine setTags(tags,status)
570 integer, dimension(:) :: tags
571 integer :: status
572
573 integer :: alloc_stat
574
575 integer :: ncol
576 integer :: nrow
577
578 status = 0
579 ! allocate row arrays
580 nrow = getNrow(plan_row)
581 ncol = getNcol(plan_col)
582
583 if (.not. allocated(tagRow)) then
584 allocate(tagRow(nrow),STAT=alloc_stat)
585 if (alloc_stat /= 0 ) then
586 status = -1
587 return
588 endif
589 else
590 deallocate(tagRow)
591 allocate(tagRow(nrow),STAT=alloc_stat)
592 if (alloc_stat /= 0 ) then
593 status = -1
594 return
595 endif
596
597 endif
598 ! allocate column arrays
599 if (.not. allocated(tagColumn)) then
600 allocate(tagColumn(ncol),STAT=alloc_stat)
601 if (alloc_stat /= 0 ) then
602 status = -1
603 return
604 endif
605 else
606 deallocate(tagColumn)
607 allocate(tagColumn(ncol),STAT=alloc_stat)
608 if (alloc_stat /= 0 ) then
609 status = -1
610 return
611 endif
612 endif
613
614 call gather(tags,tagRow,plan_row)
615 call gather(tags,tagColumn,plan_col)
616
617 end subroutine setTags
618
619 pure function getNcol(thisplan) result(ncol)
620 type (gs_plan), intent(in) :: thisplan
621 integer :: ncol
622 ncol = thisplan%gsComponentPlan%nComponentsColumn
623 end function getNcol
624
625 pure function getNrow(thisplan) result(ncol)
626 type (gs_plan), intent(in) :: thisplan
627 integer :: ncol
628 ncol = thisplan%gsComponentPlan%nComponentsrow
629 end function getNrow
630
631 function isMPISimSet() result(isthisSimSet)
632 logical :: isthisSimSet
633 if (isSimSet) then
634 isthisSimSet = .true.
635 else
636 isthisSimSet = .false.
637 endif
638 end function isMPISimSet
639
640
641
642 subroutine printComponentPlan(this_plan,printNode)
643
644 type (mpiComponentPlan), intent(in) :: this_plan
645 integer, optional :: printNode
646 logical :: print_me = .false.
647
648 if (present(printNode)) then
649 if (printNode == mpiSim%myNode) print_me = .true.
650 else
651 print_me = .true.
652 endif
653
654 if (print_me) then
655 write(default_error,*) "SetupSimParallel: writing component plan"
656
657 write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
658 write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
659 write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
660 write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
661 write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
662 write(default_error,*) "myMolStart: ", mpiSim%myMolStart
663 write(default_error,*) "myMolEnd: ", mpiSim%myMolEnd
664 write(default_error,*) "myMol: ", mpiSim%myMol
665 write(default_error,*) "myNlocal: ", mpiSim%myNlocal
666 write(default_error,*) "myNode: ", mpiSim%myNode
667 write(default_error,*) "numberProcessors: ", mpiSim%numberProcessors
668 write(default_error,*) "rowComm: ", mpiSim%rowComm
669 write(default_error,*) "columnComm: ", mpiSim%columnComm
670 write(default_error,*) "numberRows: ", mpiSim%numberRows
671 write(default_error,*) "numberColumns: ", mpiSim%numberColumns
672 write(default_error,*) "nComponentsRow: ", mpiSim%nComponentsRow
673 write(default_error,*) "nComponentsColumn: ", mpiSim%nComponentsColumn
674 write(default_error,*) "rowIndex: ", mpiSim%rowIndex
675 write(default_error,*) "columnIndex: ", mpiSim%columnIndex
676 endif
677 end subroutine printComponentPlan
678
679 function getMyNode() result(myNode)
680 integer :: myNode
681 myNode = mpiSim%myNode
682 end function getMyNode
683
684
685 end module mpiSimulation
686