ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE/libmdtools/mpiSimulation_module.F90
Revision: 432
Committed: Thu Mar 27 23:33:40 2003 UTC (21 years, 3 months ago) by chuckv
File size: 21196 byte(s)
Log Message:
MPI buggy, fixed mpiRefresh issue.

File Contents

# User Rev Content
1 mmeineke 377 #ifdef IS_MPI
2    
3     !! MPI support for long range forces using force decomposition
4     !! on a square grid of processors.
5     !! Corresponds to mpiSimunation.cpp for C++
6     !! mpi_module also contains a private interface for mpi f90 routines.
7     !!
8     !! @author Charles F. Vardeman II
9     !! @author Matthew Meineke
10 chuckv 432 !! @version $Id: mpiSimulation_module.F90,v 1.3 2003-03-27 23:33:40 chuckv Exp $, $Date: 2003-03-27 23:33:40 $, $Name: not supported by cvs2svn $, $Revision: 1.3 $
11 mmeineke 377
12     module mpiSimulation
13     use definitions
14     use mpi
15     implicit none
16     PRIVATE
17    
18    
19     !! PUBLIC Subroutines contained in this module
20     !! gather and scatter are a generic interface
21     !! to gather and scatter routines
22     public :: gather, scatter
23     public :: setupSimParallel
24     public :: replanSimParallel
25     public :: getNcol
26     public :: getNrow
27     public :: isMPISimSet
28     public :: printComponentPlan
29     public :: getMyNode
30    
31     !! PUBLIC Subroutines contained in MPI module
32     public :: mpi_bcast
33     public :: mpi_allreduce
34     public :: mpi_reduce
35     public :: mpi_send
36     public :: mpi_recv
37     public :: mpi_get_processor_name
38     public :: mpi_finalize
39    
40     !! PUBLIC mpi variables
41     public :: mpi_comm_world
42     public :: mpi_character
43     public :: mpi_integer
44     public :: mpi_double_precision
45     public :: mpi_sum
46     public :: mpi_max
47     public :: mpi_status_size
48     public :: mpi_any_source
49    
50     !! Safety logical to prevent access to ComponetPlan until
51     !! set by C++.
52     logical :: ComponentPlanSet = .false.
53    
54    
55     !! generic mpi error declaration.
56     integer,public :: mpi_err
57    
58    
59    
60     !! Include mpiComponentPlan type. mpiComponentPlan is a
61     !! dual header file for both c and fortran.
62     #define __FORTRAN90
63     #include "mpiComponentPlan.h"
64    
65    
66    
67     !! Tags used during force loop for parallel simulation
68     integer, allocatable, dimension(:) :: tagLocal
69     integer, public, allocatable, dimension(:) :: tagRow
70     integer, public, allocatable, dimension(:) :: tagColumn
71    
72     !! Logical set true if mpiSimulation has been initialized
73     logical :: isSimSet = .false.
74    
75    
76     type (mpiComponentPlan) :: mpiSim
77    
78     !! gs_plan contains plans for gather and scatter routines
79     type, public :: gs_plan
80     private
81     type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
82     integer :: gsPlanSize !! size of this plan (nDim*nComponents)
83     integer :: globalPlanSize !! size of all components in plan
84     integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
85     integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
86     integer :: myPlanComm !! My communicator for this plan
87     integer :: myPlanRank !! My rank in this plan
88     integer :: planNprocs !! Number of processors in this plan
89     end type gs_plan
90    
91     ! plans for different decompositions
92     type (gs_plan), public :: plan_row
93     type (gs_plan), public :: plan_row3d
94     type (gs_plan), public :: plan_col
95     type (gs_plan), public :: plan_col3d
96     type(gs_plan), public :: plan_row_Rotation
97     type(gs_plan), public :: plan_col_Rotation
98    
99     type (mpiComponentPlan), pointer :: simComponentPlan
100    
101     ! interface for gather scatter routines
102     !! Generic interface for gather.
103     !! Gathers an local array into row or column array
104     !! Interface provided for integer, double and double
105     !! rank 2 arrays.
106     interface gather
107     module procedure gather_integer
108     module procedure gather_double
109     module procedure gather_double_2d
110     end interface
111    
112     !! Generic interface for scatter.
113     !! Scatters a row or column array, adding componets
114     !! and reducing them to a local nComponent array.
115     !! Interface provided for double and double rank=2 arrays.
116    
117     interface scatter
118     module procedure scatter_double
119     module procedure scatter_double_2d
120     end interface
121    
122    
123    
124     contains
125    
126     !! Sets up mpiComponentPlan with structure passed from C++.
127     subroutine setupSimParallel(thisComponentPlan,ntags,tags,status)
128     ! Passed Arguments
129     ! integer, intent(inout) :: nDim !! Number of dimensions
130     !! mpiComponentPlan struct from C
131     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
132     !! Number of tags passed, nlocal
133     integer, intent(in) :: ntags
134     !! Result status, 0 = normal, -1 = error
135     integer, intent(out) :: status
136     integer :: localStatus
137     !! Global reference tag for local particles
138     integer, dimension(ntags),intent(inout) :: tags
139    
140    
141     status = 0
142     if (componentPlanSet) then
143     return
144     endif
145     componentPlanSet = .true.
146    
147     !! copy c component plan to fortran
148     mpiSim = thisComponentPlan
149     write(*,*) "Seting up simParallel"
150    
151     call make_Force_Grid(mpiSim,localStatus)
152     if (localStatus /= 0) then
153     write(default_error,*) "Error creating force grid"
154     status = -1
155     return
156     endif
157    
158     call updateGridComponents(mpiSim,localStatus)
159     if (localStatus /= 0) then
160     write(default_error,*) "Error updating grid components"
161     status = -1
162     return
163     endif
164    
165    
166     !! initialize gather and scatter plans used in this simulation
167     call plan_gather_scatter(1,mpiSim%myNlocal,&
168     mpiSim,mpiSim%rowComm,plan_row)
169     call plan_gather_scatter(nDim,mpiSim%myNlocal,&
170     mpiSim,mpiSim%rowComm,plan_row3d)
171     call plan_gather_scatter(9,mpiSim%myNlocal,&
172     mpiSim,mpiSim%rowComm,plan_row_Rotation)
173     call plan_gather_scatter(1,mpiSim%myNlocal,&
174     mpiSim,mpiSim%columnComm,plan_col)
175     call plan_gather_scatter(nDim,mpiSim%myNlocal,&
176     mpiSim,mpiSim%columnComm,plan_col3d)
177     call plan_gather_scatter(9,mpiSim%myNlocal,&
178     mpiSim,mpiSim%columnComm,plan_col_Rotation)
179    
180    
181    
182     ! Initialize tags
183     call setTags(tags,localStatus)
184     if (localStatus /= 0) then
185     status = -1
186     return
187     endif
188     isSimSet = .true.
189    
190     ! call printComponentPlan(mpiSim,0)
191     end subroutine setupSimParallel
192    
193     subroutine replanSimParallel(thisComponentPlan,status)
194     ! Passed Arguments
195     !! mpiComponentPlan struct from C
196     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
197     integer, intent(out) :: status
198     integer :: localStatus
199     integer :: mpierror
200     status = 0
201    
202     call updateGridComponents(thisComponentPlan,localStatus)
203     if (localStatus /= 0) then
204     status = -1
205     return
206     endif
207    
208     !! Unplan Gather Scatter plans
209     call unplan_gather_scatter(plan_row)
210     call unplan_gather_scatter(plan_row3d)
211     call unplan_gather_scatter(plan_row_Rotation)
212     call unplan_gather_scatter(plan_col)
213     call unplan_gather_scatter(plan_col3d)
214     call unplan_gather_scatter(plan_col_Rotation)
215    
216     !! initialize gather and scatter plans used in this simulation
217     call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
218     thisComponentPlan,thisComponentPlan%rowComm,plan_row)
219     call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
220     thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
221     call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
222     thisComponentPlan,thisComponentPlan%rowComm,plan_row_Rotation)
223     call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
224     thisComponentPlan,thisComponentPlan%columnComm,plan_col)
225     call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
226     thisComponentPlan,thisComponentPlan%rowComm,plan_col3d)
227     call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
228     thisComponentPlan,thisComponentPlan%rowComm,plan_col_Rotation)
229    
230    
231    
232     end subroutine replanSimParallel
233    
234     !! Updates number of row and column components for long range forces.
235     subroutine updateGridComponents(thisComponentPlan,status)
236     type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
237    
238     !! Status return
239     !! - 0 Success
240     !! - -1 Failure
241     integer, intent(out) :: status
242     integer :: nComponentsLocal
243     integer :: nComponentsRow = 0
244     integer :: nComponentsColumn = 0
245     integer :: mpiErrors
246    
247     status = 0
248     if (.not. componentPlanSet) return
249    
250     if (thisComponentPlan%myNlocal == 0 ) then
251     status = -1
252     return
253     endif
254    
255     nComponentsLocal = thisComponentPlan%myNlocal
256    
257 chuckv 432 write(*,*) "UpdateGridComponents: myNlocal ", nComponentsLocal
258 mmeineke 377 call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
259     mpi_sum,thisComponentPlan%rowComm,mpiErrors)
260     if (mpiErrors /= 0) then
261     status = -1
262     return
263     endif
264    
265     call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
266     mpi_sum,thisComponentPlan%columnComm,mpiErrors)
267     if (mpiErrors /= 0) then
268     status = -1
269     return
270     endif
271    
272     thisComponentPlan%nComponentsRow = nComponentsRow
273     thisComponentPlan%nComponentsColumn = nComponentsColumn
274 chuckv 432 write(*,*) "UpdateGridComponents: myNRow ",&
275     thisComponentPlan%nComponentsRow
276     write(*,*) "UpdateGridComponents: myNColumn ",&
277     thisComponentPlan%nComponentsColumn
278 mmeineke 377
279     end subroutine updateGridComponents
280    
281    
282     !! Creates a square force decomposition of processors into row and column
283     !! communicators.
284     subroutine make_Force_Grid(thisComponentPlan,status)
285     type (mpiComponentPlan) :: thisComponentPlan
286     integer, intent(out) :: status !! status returns -1 if error
287     integer :: nColumnsMax !! Maximum number of columns
288     integer :: nWorldProcessors !! Total number of processors in World comm.
289     integer :: rowIndex !! Row for this processor.
290     integer :: columnIndex !! Column for this processor.
291     integer :: nRows !! Total number of rows.
292     integer :: nColumns !! Total number of columns.
293     integer :: mpiErrors !! MPI errors.
294     integer :: rowCommunicator !! MPI row communicator.
295     integer :: columnCommunicator
296     integer :: myWorldRank
297     integer :: i
298    
299    
300     if (.not. ComponentPlanSet) return
301     status = 0
302    
303     !! We make a dangerous assumption here that if numberProcessors is
304     !! zero, then we need to get the information from MPI.
305     if (thisComponentPlan%numberProcessors == 0 ) then
306     call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
307     if ( mpiErrors /= 0 ) then
308     status = -1
309     return
310     endif
311     call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
312     if ( mpiErrors /= 0 ) then
313     status = -1
314     return
315     endif
316    
317     else
318     nWorldProcessors = thisComponentPlan%numberProcessors
319     myWorldRank = thisComponentPlan%myNode
320     endif
321    
322    
323    
324    
325     nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
326    
327     do i = 1, nColumnsMax
328     if (mod(nWorldProcessors,i) == 0) nColumns = i
329     end do
330    
331     nRows = nWorldProcessors/nColumns
332    
333     rowIndex = myWorldRank/nColumns
334    
335    
336    
337     call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
338     if ( mpiErrors /= 0 ) then
339     write(default_error,*) "MPI comm split failed at row communicator"
340     status = -1
341     return
342     endif
343    
344     columnIndex = mod(myWorldRank,nColumns)
345     call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
346     if ( mpiErrors /= 0 ) then
347     write(default_error,*) "MPI comm split faild at columnCommunicator"
348     status = -1
349     return
350     endif
351    
352     ! Set appropriate components of thisComponentPlan
353     thisComponentPlan%rowComm = rowCommunicator
354     thisComponentPlan%columnComm = columnCommunicator
355     thisComponentPlan%rowIndex = rowIndex
356     thisComponentPlan%columnIndex = columnIndex
357     thisComponentPlan%numberRows = nRows
358     thisComponentPlan%numberColumns = nColumns
359    
360    
361     end subroutine make_Force_Grid
362    
363    
364     !! initalizes a gather scatter plan
365     subroutine plan_gather_scatter( nDim,nComponents,thisComponentPlan, &
366     thisComm, this_plan,status)
367     integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
368     integer, intent(in) :: nComponents
369     type (mpiComponentPlan), intent(in), target :: thisComponentPlan
370     type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
371     integer, intent(in) :: thisComm !! MPI communicator for this plan
372    
373     integer :: arraySize !! size to allocate plan for
374     integer, intent(out), optional :: status
375     integer :: ierror
376     integer :: i,junk
377    
378     if (present(status)) status = 0
379    
380    
381    
382     !! Set gsComponetPlan pointer
383     !! to the componet plan we want to use for this gather scatter plan.
384     !! WARNING this could be dangerous since thisComponentPlan was origionally
385     !! allocated in C++ and there is a significant difference between c and
386     !! f95 pointers....
387     this_plan%gsComponentPlan => thisComponentPlan
388    
389     ! Set this plan size for displs array.
390     this_plan%gsPlanSize = nDim * nComponents
391    
392     ! Duplicate communicator for this plan
393     call mpi_comm_dup(thisComm,this_plan%myPlanComm,mpi_err)
394     if (mpi_err /= 0) then
395     if (present(status)) status = -1
396     return
397     end if
398     call mpi_comm_rank(this_plan%myPlanComm,this_plan%myPlanRank,mpi_err)
399     if (mpi_err /= 0) then
400     if (present(status)) status = -1
401     return
402     end if
403    
404     call mpi_comm_size(this_plan%myPlanComm,this_plan%planNprocs,mpi_err)
405    
406     if (mpi_err /= 0) then
407     if (present(status)) status = -1
408     return
409     end if
410    
411     !! counts and displacements arrays are indexed from 0 to be compatable
412     !! with MPI arrays.
413     allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
414     if (ierror /= 0) then
415     if (present(status)) status = -1
416     return
417     end if
418    
419     allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
420     if (ierror /= 0) then
421     if (present(status)) status = -1
422     return
423     end if
424    
425     !! gather all the local sizes into a size # processors array.
426     call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
427     1,mpi_integer,thisComm,mpi_err)
428    
429     if (mpi_err /= 0) then
430     if (present(status)) status = -1
431     return
432     end if
433    
434    
435     !! figure out the total number of particles in this plan
436     this_plan%globalPlanSize = sum(this_plan%counts)
437    
438    
439     !! initialize plan displacements.
440     this_plan%displs(0) = 0
441     do i = 1, this_plan%planNprocs - 1,1
442     this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
443     end do
444    
445    
446     end subroutine plan_gather_scatter
447    
448    
449     subroutine unplan_gather_scatter(this_plan)
450     type (gs_plan), intent(inout) :: this_plan
451    
452    
453     this_plan%gsComponentPlan => null()
454     call mpi_comm_free(this_plan%myPlanComm,mpi_err)
455    
456     deallocate(this_plan%counts)
457     deallocate(this_plan%displs)
458    
459     end subroutine unplan_gather_scatter
460    
461     subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
462    
463     type (gs_plan), intent(in) :: this_plan
464     integer, dimension(:), intent(in) :: sbuffer
465     integer, dimension(:), intent(in) :: rbuffer
466     integer :: noffset
467     integer, intent(out), optional :: status
468     integer :: i
469    
470    
471    
472     if (present(status)) status = 0
473     noffset = this_plan%displs(this_plan%myPlanRank)
474    
475     ! if (getmyNode() == 1) then
476     ! write(*,*) "Node 0 printing allgatherv vars"
477     ! write(*,*) "Noffset: ", noffset
478     ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
479     ! write(*,*) "PlanComm: ", this_plan%myPlanComm
480     ! end if
481    
482     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
483     rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
484     this_plan%myPlanComm, mpi_err)
485    
486     if (mpi_err /= 0) then
487     if (present(status)) status = -1
488     endif
489    
490     end subroutine gather_integer
491    
492     subroutine gather_double( sbuffer, rbuffer, this_plan, status)
493    
494     type (gs_plan), intent(in) :: this_plan
495     real( kind = DP ), dimension(:), intent(in) :: sbuffer
496     real( kind = DP ), dimension(:), intent(in) :: rbuffer
497     integer :: noffset
498     integer, intent(out), optional :: status
499    
500    
501     if (present(status)) status = 0
502     noffset = this_plan%displs(this_plan%myPlanRank)
503    
504     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
505     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
506     this_plan%myPlanComm, mpi_err)
507    
508     if (mpi_err /= 0) then
509     if (present(status)) status = -1
510     endif
511    
512     end subroutine gather_double
513    
514     subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
515    
516     type (gs_plan), intent(in) :: this_plan
517     real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
518     real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
519     integer :: noffset,i,ierror
520     integer, intent(out), optional :: status
521    
522     external mpi_allgatherv
523    
524     if (present(status)) status = 0
525    
526     ! noffset = this_plan%displs(this_plan%me)
527    
528     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
529     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
530     this_plan%myPlanComm, mpi_err)
531    
532     if (mpi_err /= 0) then
533     if (present(status)) status = -1
534     endif
535    
536     end subroutine gather_double_2d
537    
538     subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
539    
540     type (gs_plan), intent(in) :: this_plan
541     real( kind = DP ), dimension(:), intent(in) :: sbuffer
542     real( kind = DP ), dimension(:), intent(in) :: rbuffer
543     integer, intent(out), optional :: status
544     external mpi_reduce_scatter
545    
546     if (present(status)) status = 0
547    
548     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
549     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
550    
551     if (mpi_err /= 0) then
552     if (present(status)) status = -1
553     endif
554    
555     end subroutine scatter_double
556    
557     subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
558    
559     type (gs_plan), intent(in) :: this_plan
560     real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
561     real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
562     integer, intent(out), optional :: status
563     external mpi_reduce_scatter
564    
565     if (present(status)) status = 0
566     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
567     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
568    
569     if (mpi_err /= 0) then
570     if (present(status)) status = -1
571     endif
572    
573     end subroutine scatter_double_2d
574    
575    
576     subroutine setTags(tags,status)
577     integer, dimension(:) :: tags
578     integer :: status
579    
580     integer :: alloc_stat
581    
582     integer :: ncol
583     integer :: nrow
584    
585     status = 0
586     ! allocate row arrays
587     nrow = getNrow(plan_row)
588     ncol = getNcol(plan_col)
589    
590     if (.not. allocated(tagRow)) then
591     allocate(tagRow(nrow),STAT=alloc_stat)
592     if (alloc_stat /= 0 ) then
593     status = -1
594     return
595     endif
596     else
597     deallocate(tagRow)
598     allocate(tagRow(nrow),STAT=alloc_stat)
599     if (alloc_stat /= 0 ) then
600     status = -1
601     return
602     endif
603    
604     endif
605     ! allocate column arrays
606     if (.not. allocated(tagColumn)) then
607     allocate(tagColumn(ncol),STAT=alloc_stat)
608     if (alloc_stat /= 0 ) then
609     status = -1
610     return
611     endif
612     else
613     deallocate(tagColumn)
614     allocate(tagColumn(ncol),STAT=alloc_stat)
615     if (alloc_stat /= 0 ) then
616     status = -1
617     return
618     endif
619     endif
620    
621     call gather(tags,tagRow,plan_row)
622     call gather(tags,tagColumn,plan_col)
623    
624     end subroutine setTags
625    
626     pure function getNcol(thisplan) result(ncol)
627     type (gs_plan), intent(in) :: thisplan
628     integer :: ncol
629     ncol = thisplan%gsComponentPlan%nComponentsColumn
630     end function getNcol
631    
632 chuckv 432 pure function getNrow(thisplan) result(nrow)
633 mmeineke 377 type (gs_plan), intent(in) :: thisplan
634 chuckv 432 integer :: nrow
635     nrow = thisplan%gsComponentPlan%nComponentsRow
636 mmeineke 377 end function getNrow
637    
638     function isMPISimSet() result(isthisSimSet)
639     logical :: isthisSimSet
640     if (isSimSet) then
641     isthisSimSet = .true.
642     else
643     isthisSimSet = .false.
644     endif
645     end function isMPISimSet
646    
647    
648    
649     subroutine printComponentPlan(this_plan,printNode)
650    
651     type (mpiComponentPlan), intent(in) :: this_plan
652     integer, optional :: printNode
653     logical :: print_me = .false.
654    
655     if (present(printNode)) then
656     if (printNode == mpiSim%myNode) print_me = .true.
657     else
658     print_me = .true.
659     endif
660    
661     if (print_me) then
662     write(default_error,*) "SetupSimParallel: writing component plan"
663    
664     write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
665     write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
666     write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
667     write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
668     write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
669     write(default_error,*) "myNlocal: ", mpiSim%myNlocal
670     write(default_error,*) "myNode: ", mpiSim%myNode
671     write(default_error,*) "numberProcessors: ", mpiSim%numberProcessors
672     write(default_error,*) "rowComm: ", mpiSim%rowComm
673     write(default_error,*) "columnComm: ", mpiSim%columnComm
674     write(default_error,*) "numberRows: ", mpiSim%numberRows
675     write(default_error,*) "numberColumns: ", mpiSim%numberColumns
676     write(default_error,*) "nComponentsRow: ", mpiSim%nComponentsRow
677     write(default_error,*) "nComponentsColumn: ", mpiSim%nComponentsColumn
678     write(default_error,*) "rowIndex: ", mpiSim%rowIndex
679     write(default_error,*) "columnIndex: ", mpiSim%columnIndex
680     endif
681     end subroutine printComponentPlan
682    
683     function getMyNode() result(myNode)
684     integer :: myNode
685     myNode = mpiSim%myNode
686     end function getMyNode
687    
688    
689     end module mpiSimulation
690    
691     #endif // is_mpi