ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE/libmdtools/mpiSimulation_module.F90
Revision: 441
Committed: Tue Apr 1 16:50:14 2003 UTC (21 years, 3 months ago) by chuckv
File size: 21292 byte(s)
Log Message:
more bug fixes....

File Contents

# Content
1 #ifdef IS_MPI
2
3 !! MPI support for long range forces using force decomposition
4 !! on a square grid of processors.
5 !! Corresponds to mpiSimunation.cpp for C++
6 !! mpi_module also contains a private interface for mpi f90 routines.
7 !!
8 !! @author Charles F. Vardeman II
9 !! @author Matthew Meineke
10 !! @version $Id: mpiSimulation_module.F90,v 1.4 2003-04-01 16:50:14 chuckv Exp $, $Date: 2003-04-01 16:50:14 $, $Name: not supported by cvs2svn $, $Revision: 1.4 $
11
12 module mpiSimulation
13 use definitions
14 use mpi
15 implicit none
16 PRIVATE
17
18
19 !! PUBLIC Subroutines contained in this module
20 !! gather and scatter are a generic interface
21 !! to gather and scatter routines
22 public :: gather, scatter
23 public :: setupSimParallel
24 public :: replanSimParallel
25 public :: getNcol
26 public :: getNrow
27 public :: isMPISimSet
28 public :: printComponentPlan
29 public :: getMyNode
30
31 !! PUBLIC Subroutines contained in MPI module
32 public :: mpi_bcast
33 public :: mpi_allreduce
34 public :: mpi_reduce
35 public :: mpi_send
36 public :: mpi_recv
37 public :: mpi_get_processor_name
38 public :: mpi_finalize
39
40 !! PUBLIC mpi variables
41 public :: mpi_comm_world
42 public :: mpi_character
43 public :: mpi_integer
44 public :: mpi_double_precision
45 public :: mpi_sum
46 public :: mpi_max
47 public :: mpi_status_size
48 public :: mpi_any_source
49
50 !! Safety logical to prevent access to ComponetPlan until
51 !! set by C++.
52 logical :: ComponentPlanSet = .false.
53
54
55 !! generic mpi error declaration.
56 integer,public :: mpi_err
57
58
59
60 !! Include mpiComponentPlan type. mpiComponentPlan is a
61 !! dual header file for both c and fortran.
62 #define __FORTRAN90
63 #include "mpiComponentPlan.h"
64
65
66
67 !! Tags used during force loop for parallel simulation
68 integer, allocatable, dimension(:) :: tagLocal
69 integer, public, allocatable, dimension(:) :: tagRow
70 integer, public, allocatable, dimension(:) :: tagColumn
71
72 !! Logical set true if mpiSimulation has been initialized
73 logical :: isSimSet = .false.
74
75
76 type (mpiComponentPlan) :: mpiSim
77
78 !! gs_plan contains plans for gather and scatter routines
79 type, public :: gs_plan
80 private
81 type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
82 integer :: gsPlanSize !! size of this plan (nDim*nComponents)
83 integer :: globalPlanSize !! size of all components in plan
84 integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
85 integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
86 integer :: myPlanComm !! My communicator for this plan
87 integer :: myPlanRank !! My rank in this plan
88 integer :: planNprocs !! Number of processors in this plan
89 end type gs_plan
90
91 ! plans for different decompositions
92 type (gs_plan), public :: plan_row
93 type (gs_plan), public :: plan_row3d
94 type (gs_plan), public :: plan_col
95 type (gs_plan), public :: plan_col3d
96 type(gs_plan), public :: plan_row_Rotation
97 type(gs_plan), public :: plan_col_Rotation
98
99 type (mpiComponentPlan), pointer :: simComponentPlan
100
101 ! interface for gather scatter routines
102 !! Generic interface for gather.
103 !! Gathers an local array into row or column array
104 !! Interface provided for integer, double and double
105 !! rank 2 arrays.
106 interface gather
107 module procedure gather_integer
108 module procedure gather_double
109 module procedure gather_double_2d
110 end interface
111
112 !! Generic interface for scatter.
113 !! Scatters a row or column array, adding componets
114 !! and reducing them to a local nComponent array.
115 !! Interface provided for double and double rank=2 arrays.
116
117 interface scatter
118 module procedure scatter_double
119 module procedure scatter_double_2d
120 end interface
121
122
123
124 contains
125
126 !! Sets up mpiComponentPlan with structure passed from C++.
127 subroutine setupSimParallel(thisComponentPlan,ntags,tags,status)
128 ! Passed Arguments
129 ! integer, intent(inout) :: nDim !! Number of dimensions
130 !! mpiComponentPlan struct from C
131 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
132 !! Number of tags passed, nlocal
133 integer, intent(in) :: ntags
134 !! Result status, 0 = normal, -1 = error
135 integer, intent(out) :: status
136 integer :: localStatus
137 !! Global reference tag for local particles
138 integer, dimension(ntags),intent(inout) :: tags
139
140 write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, ' has tags(1) = ', tags(1)
141
142
143
144 status = 0
145 if (componentPlanSet) then
146 return
147 endif
148 componentPlanSet = .true.
149
150 !! copy c component plan to fortran
151 mpiSim = thisComponentPlan
152 write(*,*) "Seting up simParallel"
153
154 call make_Force_Grid(mpiSim,localStatus)
155 if (localStatus /= 0) then
156 write(default_error,*) "Error creating force grid"
157 status = -1
158 return
159 endif
160
161 call updateGridComponents(mpiSim,localStatus)
162 if (localStatus /= 0) then
163 write(default_error,*) "Error updating grid components"
164 status = -1
165 return
166 endif
167
168
169 !! initialize gather and scatter plans used in this simulation
170 call plan_gather_scatter(1,mpiSim%myNlocal,&
171 mpiSim,mpiSim%rowComm,plan_row)
172 call plan_gather_scatter(nDim,mpiSim%myNlocal,&
173 mpiSim,mpiSim%rowComm,plan_row3d)
174 call plan_gather_scatter(9,mpiSim%myNlocal,&
175 mpiSim,mpiSim%rowComm,plan_row_Rotation)
176 call plan_gather_scatter(1,mpiSim%myNlocal,&
177 mpiSim,mpiSim%columnComm,plan_col)
178 call plan_gather_scatter(nDim,mpiSim%myNlocal,&
179 mpiSim,mpiSim%columnComm,plan_col3d)
180 call plan_gather_scatter(9,mpiSim%myNlocal,&
181 mpiSim,mpiSim%columnComm,plan_col_Rotation)
182
183
184
185 ! Initialize tags
186 call setTags(tags,localStatus)
187 if (localStatus /= 0) then
188 status = -1
189 return
190 endif
191 isSimSet = .true.
192
193 ! call printComponentPlan(mpiSim,0)
194 end subroutine setupSimParallel
195
196 subroutine replanSimParallel(thisComponentPlan,status)
197 ! Passed Arguments
198 !! mpiComponentPlan struct from C
199 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
200 integer, intent(out) :: status
201 integer :: localStatus
202 integer :: mpierror
203 status = 0
204
205 call updateGridComponents(thisComponentPlan,localStatus)
206 if (localStatus /= 0) then
207 status = -1
208 return
209 endif
210
211 !! Unplan Gather Scatter plans
212 call unplan_gather_scatter(plan_row)
213 call unplan_gather_scatter(plan_row3d)
214 call unplan_gather_scatter(plan_row_Rotation)
215 call unplan_gather_scatter(plan_col)
216 call unplan_gather_scatter(plan_col3d)
217 call unplan_gather_scatter(plan_col_Rotation)
218
219 !! initialize gather and scatter plans used in this simulation
220 call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
221 thisComponentPlan,thisComponentPlan%rowComm,plan_row)
222 call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
223 thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
224 call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
225 thisComponentPlan,thisComponentPlan%rowComm,plan_row_Rotation)
226 call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
227 thisComponentPlan,thisComponentPlan%columnComm,plan_col)
228 call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
229 thisComponentPlan,thisComponentPlan%rowComm,plan_col3d)
230 call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
231 thisComponentPlan,thisComponentPlan%rowComm,plan_col_Rotation)
232
233
234
235 end subroutine replanSimParallel
236
237 !! Updates number of row and column components for long range forces.
238 subroutine updateGridComponents(thisComponentPlan,status)
239 type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
240
241 !! Status return
242 !! - 0 Success
243 !! - -1 Failure
244 integer, intent(out) :: status
245 integer :: nComponentsLocal
246 integer :: nComponentsRow = 0
247 integer :: nComponentsColumn = 0
248 integer :: mpiErrors
249
250 status = 0
251 if (.not. componentPlanSet) return
252
253 if (thisComponentPlan%myNlocal == 0 ) then
254 status = -1
255 return
256 endif
257
258 nComponentsLocal = thisComponentPlan%myNlocal
259
260 write(*,*) "UpdateGridComponents: myNlocal ", nComponentsLocal
261 call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
262 mpi_sum,thisComponentPlan%rowComm,mpiErrors)
263 if (mpiErrors /= 0) then
264 status = -1
265 return
266 endif
267
268 call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
269 mpi_sum,thisComponentPlan%columnComm,mpiErrors)
270 if (mpiErrors /= 0) then
271 status = -1
272 return
273 endif
274
275 thisComponentPlan%nComponentsRow = nComponentsRow
276 thisComponentPlan%nComponentsColumn = nComponentsColumn
277 write(*,*) "UpdateGridComponents: myNRow ",&
278 thisComponentPlan%nComponentsRow
279 write(*,*) "UpdateGridComponents: myNColumn ",&
280 thisComponentPlan%nComponentsColumn
281
282 end subroutine updateGridComponents
283
284
285 !! Creates a square force decomposition of processors into row and column
286 !! communicators.
287 subroutine make_Force_Grid(thisComponentPlan,status)
288 type (mpiComponentPlan) :: thisComponentPlan
289 integer, intent(out) :: status !! status returns -1 if error
290 integer :: nColumnsMax !! Maximum number of columns
291 integer :: nWorldProcessors !! Total number of processors in World comm.
292 integer :: rowIndex !! Row for this processor.
293 integer :: columnIndex !! Column for this processor.
294 integer :: nRows !! Total number of rows.
295 integer :: nColumns !! Total number of columns.
296 integer :: mpiErrors !! MPI errors.
297 integer :: rowCommunicator !! MPI row communicator.
298 integer :: columnCommunicator
299 integer :: myWorldRank
300 integer :: i
301
302
303 if (.not. ComponentPlanSet) return
304 status = 0
305
306 !! We make a dangerous assumption here that if numberProcessors is
307 !! zero, then we need to get the information from MPI.
308 if (thisComponentPlan%numberProcessors == 0 ) then
309 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
310 if ( mpiErrors /= 0 ) then
311 status = -1
312 return
313 endif
314 call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
315 if ( mpiErrors /= 0 ) then
316 status = -1
317 return
318 endif
319
320 else
321 nWorldProcessors = thisComponentPlan%numberProcessors
322 myWorldRank = thisComponentPlan%myNode
323 endif
324
325
326
327
328 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
329
330 do i = 1, nColumnsMax
331 if (mod(nWorldProcessors,i) == 0) nColumns = i
332 end do
333
334 nRows = nWorldProcessors/nColumns
335
336 rowIndex = myWorldRank/nColumns
337
338
339
340 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
341 if ( mpiErrors /= 0 ) then
342 write(default_error,*) "MPI comm split failed at row communicator"
343 status = -1
344 return
345 endif
346
347 columnIndex = mod(myWorldRank,nColumns)
348 call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
349 if ( mpiErrors /= 0 ) then
350 write(default_error,*) "MPI comm split faild at columnCommunicator"
351 status = -1
352 return
353 endif
354
355 ! Set appropriate components of thisComponentPlan
356 thisComponentPlan%rowComm = rowCommunicator
357 thisComponentPlan%columnComm = columnCommunicator
358 thisComponentPlan%rowIndex = rowIndex
359 thisComponentPlan%columnIndex = columnIndex
360 thisComponentPlan%numberRows = nRows
361 thisComponentPlan%numberColumns = nColumns
362
363
364 end subroutine make_Force_Grid
365
366
367 !! initalizes a gather scatter plan
368 subroutine plan_gather_scatter( nDim,nComponents,thisComponentPlan, &
369 thisComm, this_plan,status)
370 integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
371 integer, intent(in) :: nComponents
372 type (mpiComponentPlan), intent(in), target :: thisComponentPlan
373 type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
374 integer, intent(in) :: thisComm !! MPI communicator for this plan
375
376 integer :: arraySize !! size to allocate plan for
377 integer, intent(out), optional :: status
378 integer :: ierror
379 integer :: i,junk
380
381 if (present(status)) status = 0
382
383
384
385 !! Set gsComponetPlan pointer
386 !! to the componet plan we want to use for this gather scatter plan.
387 !! WARNING this could be dangerous since thisComponentPlan was origionally
388 !! allocated in C++ and there is a significant difference between c and
389 !! f95 pointers....
390 this_plan%gsComponentPlan => thisComponentPlan
391
392 ! Set this plan size for displs array.
393 this_plan%gsPlanSize = nDim * nComponents
394
395 ! Duplicate communicator for this plan
396 call mpi_comm_dup(thisComm,this_plan%myPlanComm,mpi_err)
397 if (mpi_err /= 0) then
398 if (present(status)) status = -1
399 return
400 end if
401 call mpi_comm_rank(this_plan%myPlanComm,this_plan%myPlanRank,mpi_err)
402 if (mpi_err /= 0) then
403 if (present(status)) status = -1
404 return
405 end if
406
407 call mpi_comm_size(this_plan%myPlanComm,this_plan%planNprocs,mpi_err)
408
409 if (mpi_err /= 0) then
410 if (present(status)) status = -1
411 return
412 end if
413
414 !! counts and displacements arrays are indexed from 0 to be compatable
415 !! with MPI arrays.
416 allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
417 if (ierror /= 0) then
418 if (present(status)) status = -1
419 return
420 end if
421
422 allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
423 if (ierror /= 0) then
424 if (present(status)) status = -1
425 return
426 end if
427
428 !! gather all the local sizes into a size # processors array.
429 call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
430 1,mpi_integer,thisComm,mpi_err)
431
432 if (mpi_err /= 0) then
433 if (present(status)) status = -1
434 return
435 end if
436
437
438 !! figure out the total number of particles in this plan
439 this_plan%globalPlanSize = sum(this_plan%counts)
440
441
442 !! initialize plan displacements.
443 this_plan%displs(0) = 0
444 do i = 1, this_plan%planNprocs - 1,1
445 this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
446 end do
447
448
449 end subroutine plan_gather_scatter
450
451
452 subroutine unplan_gather_scatter(this_plan)
453 type (gs_plan), intent(inout) :: this_plan
454
455
456 this_plan%gsComponentPlan => null()
457 call mpi_comm_free(this_plan%myPlanComm,mpi_err)
458
459 deallocate(this_plan%counts)
460 deallocate(this_plan%displs)
461
462 end subroutine unplan_gather_scatter
463
464 subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
465
466 type (gs_plan), intent(in) :: this_plan
467 integer, dimension(:), intent(in) :: sbuffer
468 integer, dimension(:), intent(in) :: rbuffer
469 integer :: noffset
470 integer, intent(out), optional :: status
471 integer :: i
472
473
474
475 if (present(status)) status = 0
476 noffset = this_plan%displs(this_plan%myPlanRank)
477
478 ! if (getmyNode() == 1) then
479 ! write(*,*) "Node 0 printing allgatherv vars"
480 ! write(*,*) "Noffset: ", noffset
481 ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
482 ! write(*,*) "PlanComm: ", this_plan%myPlanComm
483 ! end if
484
485 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
486 rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
487 this_plan%myPlanComm, mpi_err)
488
489 if (mpi_err /= 0) then
490 if (present(status)) status = -1
491 endif
492
493 end subroutine gather_integer
494
495 subroutine gather_double( sbuffer, rbuffer, this_plan, status)
496
497 type (gs_plan), intent(in) :: this_plan
498 real( kind = DP ), dimension(:), intent(in) :: sbuffer
499 real( kind = DP ), dimension(:), intent(in) :: rbuffer
500 integer :: noffset
501 integer, intent(out), optional :: status
502
503
504 if (present(status)) status = 0
505 noffset = this_plan%displs(this_plan%myPlanRank)
506
507 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
508 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
509 this_plan%myPlanComm, mpi_err)
510
511 if (mpi_err /= 0) then
512 if (present(status)) status = -1
513 endif
514
515 end subroutine gather_double
516
517 subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
518
519 type (gs_plan), intent(in) :: this_plan
520 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
521 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
522 integer :: noffset,i,ierror
523 integer, intent(out), optional :: status
524
525 external mpi_allgatherv
526
527 if (present(status)) status = 0
528
529 ! noffset = this_plan%displs(this_plan%me)
530
531 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
532 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
533 this_plan%myPlanComm, mpi_err)
534
535 if (mpi_err /= 0) then
536 if (present(status)) status = -1
537 endif
538
539 end subroutine gather_double_2d
540
541 subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
542
543 type (gs_plan), intent(in) :: this_plan
544 real( kind = DP ), dimension(:), intent(in) :: sbuffer
545 real( kind = DP ), dimension(:), intent(in) :: rbuffer
546 integer, intent(out), optional :: status
547 external mpi_reduce_scatter
548
549 if (present(status)) status = 0
550
551 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
552 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
553
554 if (mpi_err /= 0) then
555 if (present(status)) status = -1
556 endif
557
558 end subroutine scatter_double
559
560 subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
561
562 type (gs_plan), intent(in) :: this_plan
563 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
564 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
565 integer, intent(out), optional :: status
566 external mpi_reduce_scatter
567
568 if (present(status)) status = 0
569 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
570 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
571
572 if (mpi_err /= 0) then
573 if (present(status)) status = -1
574 endif
575
576 end subroutine scatter_double_2d
577
578
579 subroutine setTags(tags,status)
580 integer, dimension(:) :: tags
581 integer :: status
582
583 integer :: alloc_stat
584
585 integer :: ncol
586 integer :: nrow
587
588 status = 0
589 ! allocate row arrays
590 nrow = getNrow(plan_row)
591 ncol = getNcol(plan_col)
592
593 if (.not. allocated(tagRow)) then
594 allocate(tagRow(nrow),STAT=alloc_stat)
595 if (alloc_stat /= 0 ) then
596 status = -1
597 return
598 endif
599 else
600 deallocate(tagRow)
601 allocate(tagRow(nrow),STAT=alloc_stat)
602 if (alloc_stat /= 0 ) then
603 status = -1
604 return
605 endif
606
607 endif
608 ! allocate column arrays
609 if (.not. allocated(tagColumn)) then
610 allocate(tagColumn(ncol),STAT=alloc_stat)
611 if (alloc_stat /= 0 ) then
612 status = -1
613 return
614 endif
615 else
616 deallocate(tagColumn)
617 allocate(tagColumn(ncol),STAT=alloc_stat)
618 if (alloc_stat /= 0 ) then
619 status = -1
620 return
621 endif
622 endif
623
624 call gather(tags,tagRow,plan_row)
625 call gather(tags,tagColumn,plan_col)
626
627 end subroutine setTags
628
629 pure function getNcol(thisplan) result(ncol)
630 type (gs_plan), intent(in) :: thisplan
631 integer :: ncol
632 ncol = thisplan%gsComponentPlan%nComponentsColumn
633 end function getNcol
634
635 pure function getNrow(thisplan) result(nrow)
636 type (gs_plan), intent(in) :: thisplan
637 integer :: nrow
638 nrow = thisplan%gsComponentPlan%nComponentsRow
639 end function getNrow
640
641 function isMPISimSet() result(isthisSimSet)
642 logical :: isthisSimSet
643 if (isSimSet) then
644 isthisSimSet = .true.
645 else
646 isthisSimSet = .false.
647 endif
648 end function isMPISimSet
649
650
651
652 subroutine printComponentPlan(this_plan,printNode)
653
654 type (mpiComponentPlan), intent(in) :: this_plan
655 integer, optional :: printNode
656 logical :: print_me = .false.
657
658 if (present(printNode)) then
659 if (printNode == mpiSim%myNode) print_me = .true.
660 else
661 print_me = .true.
662 endif
663
664 if (print_me) then
665 write(default_error,*) "SetupSimParallel: writing component plan"
666
667 write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
668 write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
669 write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
670 write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
671 write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
672 write(default_error,*) "myNlocal: ", mpiSim%myNlocal
673 write(default_error,*) "myNode: ", mpiSim%myNode
674 write(default_error,*) "numberProcessors: ", mpiSim%numberProcessors
675 write(default_error,*) "rowComm: ", mpiSim%rowComm
676 write(default_error,*) "columnComm: ", mpiSim%columnComm
677 write(default_error,*) "numberRows: ", mpiSim%numberRows
678 write(default_error,*) "numberColumns: ", mpiSim%numberColumns
679 write(default_error,*) "nComponentsRow: ", mpiSim%nComponentsRow
680 write(default_error,*) "nComponentsColumn: ", mpiSim%nComponentsColumn
681 write(default_error,*) "rowIndex: ", mpiSim%rowIndex
682 write(default_error,*) "columnIndex: ", mpiSim%columnIndex
683 endif
684 end subroutine printComponentPlan
685
686 function getMyNode() result(myNode)
687 integer :: myNode
688 myNode = mpiSim%myNode
689 end function getMyNode
690
691
692 end module mpiSimulation
693
694 #endif // is_mpi