ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE/libmdtools/mpiSimulation_module.F90
Revision: 747
Committed: Fri Sep 5 21:28:52 2003 UTC (20 years, 9 months ago) by gezelter
File size: 22303 byte(s)
Log Message:
Changes to autoconf / configure method of configuring OOPSE

File Contents

# Content
1
2
3 !! MPI support for long range forces using force decomposition
4 !! on a square grid of processors.
5 !! Corresponds to mpiSimunation.cpp for C++
6 !! mpi_module also contains a private interface for mpi f90 routines.
7 !!
8 !! @author Charles F. Vardeman II
9 !! @author Matthew Meineke
10 !! @version $Id: mpiSimulation_module.F90,v 1.7 2003-09-05 21:28:19 gezelter Exp $, $Date: 2003-09-05 21:28:19 $, $Name: not supported by cvs2svn $, $Revision: 1.7 $
11
12 module mpiSimulation
13 use definitions
14 #ifdef IS_MPI
15 use mpi
16 implicit none
17 PRIVATE
18
19
20 !! PUBLIC Subroutines contained in this module
21 !! gather and scatter are a generic interface
22 !! to gather and scatter routines
23 public :: gather, scatter
24 public :: setupSimParallel
25 public :: replanSimParallel
26 public :: getNcol
27 public :: getNrow
28 public :: isMPISimSet
29 public :: printComponentPlan
30 public :: getMyNode
31
32 !! PUBLIC Subroutines contained in MPI module
33 public :: mpi_bcast
34 public :: mpi_allreduce
35 public :: mpi_reduce
36 public :: mpi_send
37 public :: mpi_recv
38 public :: mpi_get_processor_name
39 public :: mpi_finalize
40
41 !! PUBLIC mpi variables
42 public :: mpi_comm_world
43 public :: mpi_character
44 public :: mpi_integer
45 public :: mpi_double_precision
46 public :: mpi_sum
47 public :: mpi_max
48 public :: mpi_status_size
49 public :: mpi_any_source
50
51
52
53 !! Safety logical to prevent access to ComponetPlan until
54 !! set by C++.
55 logical, save :: ComponentPlanSet = .false.
56
57
58 !! generic mpi error declaration.
59 integer, public :: mpi_err
60
61 #ifdef PROFILE
62 public :: printCommTime
63
64 real(kind = dp ), save :: commTime = 0.0_dp
65 real(kind = dp ) :: commTimeInitial,commTimeFinal
66 #endif
67
68 !! Include mpiComponentPlan type. mpiComponentPlan is a
69 !! dual header file for both c and fortran.
70 #define __FORTRAN90
71 #include "mpiComponentPlan.h"
72
73
74
75 !! Tags used during force loop for parallel simulation
76 integer, allocatable, dimension(:) :: tagLocal
77 integer, public, allocatable, dimension(:) :: tagRow
78 integer, public, allocatable, dimension(:) :: tagColumn
79
80 !! Logical set true if mpiSimulation has been initialized
81 logical, save :: isSimSet = .false.
82
83
84 type (mpiComponentPlan), save :: mpiSim
85
86 !! gs_plan contains plans for gather and scatter routines
87 type, public :: gs_plan
88 private
89 type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
90 integer :: gsPlanSize !! size of this plan (nDim*nComponents)
91 integer :: globalPlanSize !! size of all components in plan
92 integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
93 integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
94 integer :: myPlanComm !! My communicator for this plan
95 integer :: myPlanRank !! My rank in this plan
96 integer :: planNprocs !! Number of processors in this plan
97 end type gs_plan
98
99 ! plans for different decompositions
100 type (gs_plan), public, save :: plan_row
101 type (gs_plan), public, save :: plan_row3d
102 type (gs_plan), public, save :: plan_col
103 type (gs_plan), public, save :: plan_col3d
104 type (gs_plan), public, save :: plan_row_Rotation
105 type (gs_plan), public, save :: plan_col_Rotation
106
107 type (mpiComponentPlan), pointer :: simComponentPlan
108
109 ! interface for gather scatter routines
110 !! Generic interface for gather.
111 !! Gathers an local array into row or column array
112 !! Interface provided for integer, double and double
113 !! rank 2 arrays.
114 interface gather
115 module procedure gather_integer
116 module procedure gather_double
117 module procedure gather_double_2d
118 end interface
119
120 !! Generic interface for scatter.
121 !! Scatters a row or column array, adding componets
122 !! and reducing them to a local nComponent array.
123 !! Interface provided for double and double rank=2 arrays.
124
125 interface scatter
126 module procedure scatter_double
127 module procedure scatter_double_2d
128 end interface
129
130
131
132 contains
133
134 !! Sets up mpiComponentPlan with structure passed from C++.
135 subroutine setupSimParallel(thisComponentPlan,ntags,tags,status)
136 ! Passed Arguments
137 ! integer, intent(inout) :: nDim !! Number of dimensions
138 !! mpiComponentPlan struct from C
139 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
140 !! Number of tags passed, nlocal
141 integer, intent(in) :: ntags
142 !! Result status, 0 = normal, -1 = error
143 integer, intent(out) :: status
144 integer :: localStatus
145 !! Global reference tag for local particles
146 integer, dimension(ntags),intent(inout) :: tags
147
148 write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, ' has tags(1) = ', tags(1)
149
150
151
152 status = 0
153 if (componentPlanSet) then
154 return
155 endif
156 componentPlanSet = .true.
157
158 !! copy c component plan to fortran
159 mpiSim = thisComponentPlan
160 write(*,*) "Seting up simParallel"
161
162 call make_Force_Grid(mpiSim,localStatus)
163 if (localStatus /= 0) then
164 write(default_error,*) "Error creating force grid"
165 status = -1
166 return
167 endif
168
169 call updateGridComponents(mpiSim,localStatus)
170 if (localStatus /= 0) then
171 write(default_error,*) "Error updating grid components"
172 status = -1
173 return
174 endif
175
176
177 !! initialize gather and scatter plans used in this simulation
178 call plan_gather_scatter(1,mpiSim%myNlocal,&
179 mpiSim,mpiSim%rowComm,plan_row)
180 call plan_gather_scatter(nDim,mpiSim%myNlocal,&
181 mpiSim,mpiSim%rowComm,plan_row3d)
182 call plan_gather_scatter(9,mpiSim%myNlocal,&
183 mpiSim,mpiSim%rowComm,plan_row_Rotation)
184 call plan_gather_scatter(1,mpiSim%myNlocal,&
185 mpiSim,mpiSim%columnComm,plan_col)
186 call plan_gather_scatter(nDim,mpiSim%myNlocal,&
187 mpiSim,mpiSim%columnComm,plan_col3d)
188 call plan_gather_scatter(9,mpiSim%myNlocal,&
189 mpiSim,mpiSim%columnComm,plan_col_Rotation)
190
191
192
193 ! Initialize tags
194 call setTags(tags,localStatus)
195 if (localStatus /= 0) then
196 status = -1
197 return
198 endif
199 isSimSet = .true.
200
201 ! call printComponentPlan(mpiSim,0)
202 end subroutine setupSimParallel
203
204 subroutine replanSimParallel(thisComponentPlan,status)
205 ! Passed Arguments
206 !! mpiComponentPlan struct from C
207 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
208 integer, intent(out) :: status
209 integer :: localStatus
210 integer :: mpierror
211 status = 0
212
213 call updateGridComponents(thisComponentPlan,localStatus)
214 if (localStatus /= 0) then
215 status = -1
216 return
217 endif
218
219 !! Unplan Gather Scatter plans
220 call unplan_gather_scatter(plan_row)
221 call unplan_gather_scatter(plan_row3d)
222 call unplan_gather_scatter(plan_row_Rotation)
223 call unplan_gather_scatter(plan_col)
224 call unplan_gather_scatter(plan_col3d)
225 call unplan_gather_scatter(plan_col_Rotation)
226
227 !! initialize gather and scatter plans used in this simulation
228 call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
229 thisComponentPlan,thisComponentPlan%rowComm,plan_row)
230 call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
231 thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
232 call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
233 thisComponentPlan,thisComponentPlan%rowComm,plan_row_Rotation)
234 call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
235 thisComponentPlan,thisComponentPlan%columnComm,plan_col)
236 call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
237 thisComponentPlan,thisComponentPlan%rowComm,plan_col3d)
238 call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
239 thisComponentPlan,thisComponentPlan%rowComm,plan_col_Rotation)
240
241
242
243 end subroutine replanSimParallel
244
245 !! Updates number of row and column components for long range forces.
246 subroutine updateGridComponents(thisComponentPlan,status)
247 type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
248
249 !! Status return
250 !! - 0 Success
251 !! - -1 Failure
252 integer, intent(out) :: status
253 integer :: nComponentsLocal
254 integer :: nComponentsRow = 0
255 integer :: nComponentsColumn = 0
256 integer :: mpiErrors
257
258 status = 0
259 if (.not. componentPlanSet) return
260
261 if (thisComponentPlan%myNlocal == 0 ) then
262 status = -1
263 return
264 endif
265
266 nComponentsLocal = thisComponentPlan%myNlocal
267
268 write(*,*) "UpdateGridComponents: myNlocal ", nComponentsLocal
269 call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
270 mpi_sum,thisComponentPlan%rowComm,mpiErrors)
271 if (mpiErrors /= 0) then
272 status = -1
273 return
274 endif
275
276 call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
277 mpi_sum,thisComponentPlan%columnComm,mpiErrors)
278 if (mpiErrors /= 0) then
279 status = -1
280 return
281 endif
282
283 thisComponentPlan%nComponentsRow = nComponentsRow
284 thisComponentPlan%nComponentsColumn = nComponentsColumn
285 write(*,*) "UpdateGridComponents: myNRow ",&
286 thisComponentPlan%nComponentsRow
287 write(*,*) "UpdateGridComponents: myNColumn ",&
288 thisComponentPlan%nComponentsColumn
289
290 end subroutine updateGridComponents
291
292
293 !! Creates a square force decomposition of processors into row and column
294 !! communicators.
295 subroutine make_Force_Grid(thisComponentPlan,status)
296 type (mpiComponentPlan) :: thisComponentPlan
297 integer, intent(out) :: status !! status returns -1 if error
298 integer :: nColumnsMax !! Maximum number of columns
299 integer :: nWorldProcessors !! Total number of processors in World comm.
300 integer :: rowIndex !! Row for this processor.
301 integer :: columnIndex !! Column for this processor.
302 integer :: nRows !! Total number of rows.
303 integer :: nColumns !! Total number of columns.
304 integer :: mpiErrors !! MPI errors.
305 integer :: rowCommunicator !! MPI row communicator.
306 integer :: columnCommunicator
307 integer :: myWorldRank
308 integer :: i
309
310
311 if (.not. ComponentPlanSet) return
312 status = 0
313
314 !! We make a dangerous assumption here that if numberProcessors is
315 !! zero, then we need to get the information from MPI.
316 if (thisComponentPlan%numberProcessors == 0 ) then
317 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
318 if ( mpiErrors /= 0 ) then
319 status = -1
320 return
321 endif
322 call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
323 if ( mpiErrors /= 0 ) then
324 status = -1
325 return
326 endif
327
328 else
329 nWorldProcessors = thisComponentPlan%numberProcessors
330 myWorldRank = thisComponentPlan%myNode
331 endif
332
333
334
335
336 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
337
338 do i = 1, nColumnsMax
339 if (mod(nWorldProcessors,i) == 0) nColumns = i
340 end do
341
342 nRows = nWorldProcessors/nColumns
343
344 rowIndex = myWorldRank/nColumns
345
346
347
348 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
349 if ( mpiErrors /= 0 ) then
350 write(default_error,*) "MPI comm split failed at row communicator"
351 status = -1
352 return
353 endif
354
355 columnIndex = mod(myWorldRank,nColumns)
356 call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
357 if ( mpiErrors /= 0 ) then
358 write(default_error,*) "MPI comm split faild at columnCommunicator"
359 status = -1
360 return
361 endif
362
363 ! Set appropriate components of thisComponentPlan
364 thisComponentPlan%rowComm = rowCommunicator
365 thisComponentPlan%columnComm = columnCommunicator
366 thisComponentPlan%rowIndex = rowIndex
367 thisComponentPlan%columnIndex = columnIndex
368 thisComponentPlan%numberRows = nRows
369 thisComponentPlan%numberColumns = nColumns
370
371
372 end subroutine make_Force_Grid
373
374
375 !! initalizes a gather scatter plan
376 subroutine plan_gather_scatter( nDim,nComponents,thisComponentPlan, &
377 thisComm, this_plan,status)
378 integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
379 integer, intent(in) :: nComponents
380 type (mpiComponentPlan), intent(in), target :: thisComponentPlan
381 type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
382 integer, intent(in) :: thisComm !! MPI communicator for this plan
383
384 integer :: arraySize !! size to allocate plan for
385 integer, intent(out), optional :: status
386 integer :: ierror
387 integer :: i,junk
388
389 if (present(status)) status = 0
390
391
392
393 !! Set gsComponetPlan pointer
394 !! to the componet plan we want to use for this gather scatter plan.
395 !! WARNING this could be dangerous since thisComponentPlan was origionally
396 !! allocated in C++ and there is a significant difference between c and
397 !! f95 pointers....
398 this_plan%gsComponentPlan => thisComponentPlan
399
400 ! Set this plan size for displs array.
401 this_plan%gsPlanSize = nDim * nComponents
402
403 ! Duplicate communicator for this plan
404 call mpi_comm_dup(thisComm,this_plan%myPlanComm,mpi_err)
405 if (mpi_err /= 0) then
406 if (present(status)) status = -1
407 return
408 end if
409 call mpi_comm_rank(this_plan%myPlanComm,this_plan%myPlanRank,mpi_err)
410 if (mpi_err /= 0) then
411 if (present(status)) status = -1
412 return
413 end if
414
415 call mpi_comm_size(this_plan%myPlanComm,this_plan%planNprocs,mpi_err)
416
417 if (mpi_err /= 0) then
418 if (present(status)) status = -1
419 return
420 end if
421
422 !! counts and displacements arrays are indexed from 0 to be compatable
423 !! with MPI arrays.
424 allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
425 if (ierror /= 0) then
426 if (present(status)) status = -1
427 return
428 end if
429
430 allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
431 if (ierror /= 0) then
432 if (present(status)) status = -1
433 return
434 end if
435
436 !! gather all the local sizes into a size # processors array.
437 call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
438 1,mpi_integer,thisComm,mpi_err)
439
440 if (mpi_err /= 0) then
441 if (present(status)) status = -1
442 return
443 end if
444
445
446 !! figure out the total number of particles in this plan
447 this_plan%globalPlanSize = sum(this_plan%counts)
448
449
450 !! initialize plan displacements.
451 this_plan%displs(0) = 0
452 do i = 1, this_plan%planNprocs - 1,1
453 this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
454 end do
455
456
457 end subroutine plan_gather_scatter
458
459
460 subroutine unplan_gather_scatter(this_plan)
461 type (gs_plan), intent(inout) :: this_plan
462
463
464 this_plan%gsComponentPlan => null()
465 call mpi_comm_free(this_plan%myPlanComm,mpi_err)
466
467 deallocate(this_plan%counts)
468 deallocate(this_plan%displs)
469
470 end subroutine unplan_gather_scatter
471
472 subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
473
474 type (gs_plan), intent(in) :: this_plan
475 integer, dimension(:), intent(in) :: sbuffer
476 integer, dimension(:), intent(in) :: rbuffer
477 integer :: noffset
478 integer, intent(out), optional :: status
479 integer :: i
480
481
482
483 if (present(status)) status = 0
484 noffset = this_plan%displs(this_plan%myPlanRank)
485
486 ! if (getmyNode() == 1) then
487 ! write(*,*) "Node 0 printing allgatherv vars"
488 ! write(*,*) "Noffset: ", noffset
489 ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
490 ! write(*,*) "PlanComm: ", this_plan%myPlanComm
491 ! end if
492
493 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
494 rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
495 this_plan%myPlanComm, mpi_err)
496
497 if (mpi_err /= 0) then
498 if (present(status)) status = -1
499 endif
500
501 end subroutine gather_integer
502
503 subroutine gather_double( sbuffer, rbuffer, this_plan, status)
504
505 type (gs_plan), intent(in) :: this_plan
506 real( kind = DP ), dimension(:), intent(in) :: sbuffer
507 real( kind = DP ), dimension(:), intent(in) :: rbuffer
508 integer :: noffset
509 integer, intent(out), optional :: status
510
511
512 if (present(status)) status = 0
513 noffset = this_plan%displs(this_plan%myPlanRank)
514 #ifdef PROFILE
515 commTimeInitial = mpi_wtime()
516 #endif
517 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
518 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
519 this_plan%myPlanComm, mpi_err)
520 #ifdef PROFILE
521 commTimeFinal = mpi_wtime()
522 commTime = commTime + commTimeFinal - commTimeInitial
523 #endif
524
525 if (mpi_err /= 0) then
526 if (present(status)) status = -1
527 endif
528
529 end subroutine gather_double
530
531 subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
532
533 type (gs_plan), intent(in) :: this_plan
534 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
535 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
536 integer :: noffset,i,ierror
537 integer, intent(out), optional :: status
538
539 external mpi_allgatherv
540
541 if (present(status)) status = 0
542
543 ! noffset = this_plan%displs(this_plan%me)
544 #ifdef PROFILE
545 commTimeInitial = mpi_wtime()
546 #endif
547
548 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
549 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
550 this_plan%myPlanComm, mpi_err)
551
552 #ifdef PROFILE
553 commTimeFinal = mpi_wtime()
554 commTime = commTime + commTimeFinal - commTimeInitial
555 #endif
556
557 if (mpi_err /= 0) then
558 if (present(status)) status = -1
559 endif
560
561 end subroutine gather_double_2d
562
563 subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
564
565 type (gs_plan), intent(in) :: this_plan
566 real( kind = DP ), dimension(:), intent(in) :: sbuffer
567 real( kind = DP ), dimension(:), intent(in) :: rbuffer
568 integer, intent(out), optional :: status
569 external mpi_reduce_scatter
570
571 if (present(status)) status = 0
572
573 #ifdef PROFILE
574 commTimeInitial = mpi_wtime()
575 #endif
576 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
577 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
578 #ifdef PROFILE
579 commTimeFinal = mpi_wtime()
580 commTime = commTime + commTimeFinal - commTimeInitial
581 #endif
582
583 if (mpi_err /= 0) then
584 if (present(status)) status = -1
585 endif
586
587 end subroutine scatter_double
588
589 subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
590
591 type (gs_plan), intent(in) :: this_plan
592 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
593 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
594 integer, intent(out), optional :: status
595 external mpi_reduce_scatter
596
597 if (present(status)) status = 0
598 #ifdef PROFILE
599 commTimeInitial = mpi_wtime()
600 #endif
601
602 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
603 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
604 #ifdef PROFILE
605 commTimeFinal = mpi_wtime()
606 commTime = commTime + commTimeFinal - commTimeInitial
607 #endif
608
609 if (mpi_err /= 0) then
610 if (present(status)) status = -1
611 endif
612
613 end subroutine scatter_double_2d
614
615
616 subroutine setTags(tags,status)
617 integer, dimension(:) :: tags
618 integer :: status
619
620 integer :: alloc_stat
621
622 integer :: ncol
623 integer :: nrow
624
625 status = 0
626 ! allocate row arrays
627 nrow = getNrow(plan_row)
628 ncol = getNcol(plan_col)
629
630 if (.not. allocated(tagRow)) then
631 allocate(tagRow(nrow),STAT=alloc_stat)
632 if (alloc_stat /= 0 ) then
633 status = -1
634 return
635 endif
636 else
637 deallocate(tagRow)
638 allocate(tagRow(nrow),STAT=alloc_stat)
639 if (alloc_stat /= 0 ) then
640 status = -1
641 return
642 endif
643
644 endif
645 ! allocate column arrays
646 if (.not. allocated(tagColumn)) then
647 allocate(tagColumn(ncol),STAT=alloc_stat)
648 if (alloc_stat /= 0 ) then
649 status = -1
650 return
651 endif
652 else
653 deallocate(tagColumn)
654 allocate(tagColumn(ncol),STAT=alloc_stat)
655 if (alloc_stat /= 0 ) then
656 status = -1
657 return
658 endif
659 endif
660
661 call gather(tags,tagRow,plan_row)
662 call gather(tags,tagColumn,plan_col)
663
664 end subroutine setTags
665
666 pure function getNcol(thisplan) result(ncol)
667 type (gs_plan), intent(in) :: thisplan
668 integer :: ncol
669 ncol = thisplan%gsComponentPlan%nComponentsColumn
670 end function getNcol
671
672 pure function getNrow(thisplan) result(nrow)
673 type (gs_plan), intent(in) :: thisplan
674 integer :: nrow
675 nrow = thisplan%gsComponentPlan%nComponentsRow
676 end function getNrow
677
678 function isMPISimSet() result(isthisSimSet)
679 logical :: isthisSimSet
680 if (isSimSet) then
681 isthisSimSet = .true.
682 else
683 isthisSimSet = .false.
684 endif
685 end function isMPISimSet
686
687
688
689 subroutine printComponentPlan(this_plan,printNode)
690
691 type (mpiComponentPlan), intent(in) :: this_plan
692 integer, optional :: printNode
693 logical :: print_me = .false.
694
695 if (present(printNode)) then
696 if (printNode == mpiSim%myNode) print_me = .true.
697 else
698 print_me = .true.
699 endif
700
701 if (print_me) then
702 write(default_error,*) "SetupSimParallel: writing component plan"
703
704 write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
705 write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
706 write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
707 write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
708 write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
709 write(default_error,*) "myNlocal: ", mpiSim%myNlocal
710 write(default_error,*) "myNode: ", mpiSim%myNode
711 write(default_error,*) "numberProcessors: ", mpiSim%numberProcessors
712 write(default_error,*) "rowComm: ", mpiSim%rowComm
713 write(default_error,*) "columnComm: ", mpiSim%columnComm
714 write(default_error,*) "numberRows: ", mpiSim%numberRows
715 write(default_error,*) "numberColumns: ", mpiSim%numberColumns
716 write(default_error,*) "nComponentsRow: ", mpiSim%nComponentsRow
717 write(default_error,*) "nComponentsColumn: ", mpiSim%nComponentsColumn
718 write(default_error,*) "rowIndex: ", mpiSim%rowIndex
719 write(default_error,*) "columnIndex: ", mpiSim%columnIndex
720 endif
721 end subroutine printComponentPlan
722
723 function getMyNode() result(myNode)
724 integer :: myNode
725 myNode = mpiSim%myNode
726 end function getMyNode
727
728 #ifdef PROFILE
729 subroutine printCommTime()
730
731 write(*,*) "MPI communication time is: ", commTime
732
733 end subroutine printCommTime
734 #endif
735
736 #endif // is_mpi
737 end module mpiSimulation
738
739