ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE/libmdtools/mpiSimulation_module.F90
Revision: 1214
Committed: Tue Jun 1 18:42:58 2004 UTC (20 years, 1 month ago) by gezelter
File size: 27071 byte(s)
Log Message:
Cutoff Groups for MPI

File Contents

# User Rev Content
1 mmeineke 377
2 chuckv 631
3 mmeineke 377 !! MPI support for long range forces using force decomposition
4     !! on a square grid of processors.
5     !! Corresponds to mpiSimunation.cpp for C++
6     !! mpi_module also contains a private interface for mpi f90 routines.
7     !!
8     !! @author Charles F. Vardeman II
9     !! @author Matthew Meineke
10 gezelter 1214 !! @version $Id: mpiSimulation_module.F90,v 1.15 2004-06-01 18:42:58 gezelter Exp $, $Date: 2004-06-01 18:42:58 $, $Name: not supported by cvs2svn $, $Revision: 1.15 $
11 mmeineke 377
12     module mpiSimulation
13     use definitions
14 chuckv 631 #ifdef IS_MPI
15 chuckv 858 use oopseMPI
16 mmeineke 377 implicit none
17     PRIVATE
18    
19    
20     !! PUBLIC Subroutines contained in this module
21     !! gather and scatter are a generic interface
22     !! to gather and scatter routines
23     public :: gather, scatter
24     public :: setupSimParallel
25     public :: replanSimParallel
26 tim 1198 public :: getNatomsInCol
27     public :: getNatomsInRow
28     public :: getNgroupsInCol
29     public :: getNgroupsInRow
30 mmeineke 377 public :: isMPISimSet
31     public :: printComponentPlan
32     public :: getMyNode
33    
34     !! PUBLIC Subroutines contained in MPI module
35     public :: mpi_bcast
36     public :: mpi_allreduce
37 chuckv 858 ! public :: mpi_reduce
38 mmeineke 377 public :: mpi_send
39     public :: mpi_recv
40     public :: mpi_get_processor_name
41     public :: mpi_finalize
42    
43     !! PUBLIC mpi variables
44     public :: mpi_comm_world
45     public :: mpi_character
46     public :: mpi_integer
47     public :: mpi_double_precision
48     public :: mpi_sum
49     public :: mpi_max
50     public :: mpi_status_size
51     public :: mpi_any_source
52    
53 chuckv 694
54    
55 mmeineke 377 !! Safety logical to prevent access to ComponetPlan until
56     !! set by C++.
57 gezelter 747 logical, save :: ComponentPlanSet = .false.
58 mmeineke 377
59    
60     !! generic mpi error declaration.
61 gezelter 747 integer, public :: mpi_err
62 mmeineke 377
63 chuckv 694 #ifdef PROFILE
64     public :: printCommTime
65 chuckv 883 public :: getCommTime
66     real,save :: commTime = 0.0
67     real :: commTimeInitial,commTimeFinal
68 chuckv 694 #endif
69    
70 mmeineke 377 !! Include mpiComponentPlan type. mpiComponentPlan is a
71     !! dual header file for both c and fortran.
72     #define __FORTRAN90
73     #include "mpiComponentPlan.h"
74    
75    
76     !! Tags used during force loop for parallel simulation
77 tim 1198 integer, public, allocatable, dimension(:) :: AtomLocalToGlobal
78     integer, public, allocatable, dimension(:) :: AtomRowToGlobal
79     integer, public, allocatable, dimension(:) :: AtomColToGlobal
80     integer, public, allocatable, dimension(:) :: GroupLocalToGlobal
81     integer, public, allocatable, dimension(:) :: GroupRowToGlobal
82     integer, public, allocatable, dimension(:) :: GroupColToGlobal
83 mmeineke 377
84     !! Logical set true if mpiSimulation has been initialized
85 gezelter 747 logical, save :: isSimSet = .false.
86 mmeineke 377
87    
88 gezelter 747 type (mpiComponentPlan), save :: mpiSim
89 mmeineke 377
90     !! gs_plan contains plans for gather and scatter routines
91     type, public :: gs_plan
92     private
93     type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
94     integer :: gsPlanSize !! size of this plan (nDim*nComponents)
95     integer :: globalPlanSize !! size of all components in plan
96     integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
97     integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
98     integer :: myPlanComm !! My communicator for this plan
99     integer :: myPlanRank !! My rank in this plan
100     integer :: planNprocs !! Number of processors in this plan
101     end type gs_plan
102    
103     ! plans for different decompositions
104 tim 1198 type (gs_plan), public, save :: plan_atom_row
105     type (gs_plan), public, save :: plan_atom_row_3d
106     type (gs_plan), public, save :: plan_atom_col
107     type (gs_plan), public, save :: plan_atom_col_3d
108     type (gs_plan), public, save :: plan_atom_row_Rotation
109     type (gs_plan), public, save :: plan_atom_col_Rotation
110     type (gs_plan), public, save :: plan_group_row
111     type (gs_plan), public, save :: plan_group_col
112     type (gs_plan), public, save :: plan_group_row_3d
113     type (gs_plan), public, save :: plan_group_col_3d
114 mmeineke 377
115     type (mpiComponentPlan), pointer :: simComponentPlan
116    
117     ! interface for gather scatter routines
118     !! Generic interface for gather.
119     !! Gathers an local array into row or column array
120     !! Interface provided for integer, double and double
121     !! rank 2 arrays.
122     interface gather
123     module procedure gather_integer
124     module procedure gather_double
125     module procedure gather_double_2d
126     end interface
127    
128     !! Generic interface for scatter.
129     !! Scatters a row or column array, adding componets
130     !! and reducing them to a local nComponent array.
131     !! Interface provided for double and double rank=2 arrays.
132    
133     interface scatter
134     module procedure scatter_double
135     module procedure scatter_double_2d
136     end interface
137    
138    
139    
140     contains
141    
142 gezelter 1203 !! Sets up mpiComponentPlan with structure passed from C++.
143     subroutine setupSimParallel(thisComponentPlan, nAtomTags, atomTags, &
144 gezelter 1214 nGroupTags, groupTags, status)
145 gezelter 1203 !! Passed Arguments
146 mmeineke 377 !! mpiComponentPlan struct from C
147     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
148 gezelter 1203 !! Number of tags passed
149 gezelter 1214 integer, intent(in) :: nAtomTags, nGroupTags
150 gezelter 1203 !! Result status, 0 = normal, -1 = error
151 mmeineke 377 integer, intent(out) :: status
152     integer :: localStatus
153 gezelter 1203 !! Global reference tag for local particles
154     integer, dimension(nAtomTags), intent(inout) :: atomTags
155 gezelter 1214 integer, dimension(nGroupTags), intent(inout) :: groupTags
156 mmeineke 377
157 gezelter 1203 !write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, &
158     ! ' has atomTags(1) = ', atomTags(1)
159    
160 mmeineke 377 status = 0
161     if (componentPlanSet) then
162     return
163     endif
164     componentPlanSet = .true.
165    
166 gezelter 1203 !! copy c component plan to fortran
167 mmeineke 377 mpiSim = thisComponentPlan
168 gezelter 1203 !write(*,*) "Seting up simParallel"
169    
170 tim 1198 call make_Force_Grid(mpiSim, localStatus)
171 mmeineke 377 if (localStatus /= 0) then
172     write(default_error,*) "Error creating force grid"
173     status = -1
174     return
175     endif
176 gezelter 1203
177 tim 1198 call updateGridComponents(mpiSim, localStatus)
178 mmeineke 377 if (localStatus /= 0) then
179     write(default_error,*) "Error updating grid components"
180     status = -1
181     return
182 gezelter 1203 endif
183 mmeineke 377
184     !! initialize gather and scatter plans used in this simulation
185 tim 1198 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
186     mpiSim, mpiSim%rowComm, plan_atom_row)
187     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
188     mpiSim, mpiSim%rowComm, plan_atom_row_3d)
189     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
190     mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
191     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
192     mpiSim, mpiSim%rowComm, plan_group_row)
193     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
194     mpiSim, mpiSim%rowComm, plan_group_row_3d)
195 gezelter 1150
196 tim 1198 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
197     mpiSim, mpiSim%columnComm, plan_atom_col)
198     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
199     mpiSim, mpiSim%columnComm, plan_atom_col_3d)
200     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
201     mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
202     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
203     mpiSim, mpiSim%columnComm, plan_group_col)
204     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
205     mpiSim, mpiSim%columnComm, plan_group_col_3d)
206 gezelter 1150
207 mmeineke 377 ! Initialize tags
208 tim 1198
209     call setAtomTags(atomTags,localStatus)
210 mmeineke 377 if (localStatus /= 0) then
211     status = -1
212     return
213     endif
214 gezelter 1214
215    
216     call setGroupTags(groupTags,localStatus)
217     if (localStatus /= 0) then
218     status = -1
219     return
220     endif
221    
222 mmeineke 377 isSimSet = .true.
223    
224     ! call printComponentPlan(mpiSim,0)
225     end subroutine setupSimParallel
226    
227     subroutine replanSimParallel(thisComponentPlan,status)
228     ! Passed Arguments
229     !! mpiComponentPlan struct from C
230     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
231     integer, intent(out) :: status
232     integer :: localStatus
233     integer :: mpierror
234     status = 0
235    
236     call updateGridComponents(thisComponentPlan,localStatus)
237     if (localStatus /= 0) then
238     status = -1
239     return
240     endif
241    
242     !! Unplan Gather Scatter plans
243 tim 1198 call unplan_gather_scatter(plan_atom_row)
244     call unplan_gather_scatter(plan_atom_row_3d)
245     call unplan_gather_scatter(plan_atom_row_Rotation)
246     call unplan_gather_scatter(plan_group_row)
247     call unplan_gather_scatter(plan_group_row_3d)
248 gezelter 1150
249 tim 1198 call unplan_gather_scatter(plan_atom_col)
250     call unplan_gather_scatter(plan_atom_col_3d)
251     call unplan_gather_scatter(plan_atom_col_Rotation)
252     call unplan_gather_scatter(plan_group_col)
253     call unplan_gather_scatter(plan_group_col_3d)
254 mmeineke 377
255     !! initialize gather and scatter plans used in this simulation
256 tim 1198 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
257     mpiSim, mpiSim%rowComm, plan_atom_row)
258     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
259     mpiSim, mpiSim%rowComm, plan_atom_row_3d)
260     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
261     mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
262     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
263     mpiSim, mpiSim%rowComm, plan_group_row)
264     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
265     mpiSim, mpiSim%rowComm, plan_group_row_3d)
266    
267     call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
268     mpiSim, mpiSim%columnComm, plan_atom_col)
269     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
270     mpiSim, mpiSim%columnComm, plan_atom_col_3d)
271     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
272     mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
273     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
274     mpiSim, mpiSim%columnComm, plan_group_col)
275     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
276     mpiSim, mpiSim%columnComm, plan_group_col_3d)
277    
278 mmeineke 377 end subroutine replanSimParallel
279    
280 gezelter 1203 !! Updates number of row and column components for long range forces.
281     subroutine updateGridComponents(thisComponentPlan, status)
282 mmeineke 377 type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
283 gezelter 1203
284     !! Status return
285     !! - 0 Success
286     !! - -1 Failure
287 mmeineke 377 integer, intent(out) :: status
288 tim 1198 integer :: nAtomsLocal
289     integer :: nAtomsInRow = 0
290     integer :: nAtomsInColumn = 0
291     integer :: nGroupsLocal
292     integer :: nGroupsInRow = 0
293     integer :: nGroupsInColumn = 0
294 mmeineke 377 integer :: mpiErrors
295    
296     status = 0
297     if (.not. componentPlanSet) return
298    
299 tim 1198 if (thisComponentPlan%nAtomsLocal == 0) then
300 mmeineke 377 status = -1
301     return
302 tim 1198 endif
303     if (thisComponentPlan%nGroupsLocal == 0) then
304 gezelter 1203 write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal
305 tim 1198 status = -1
306     return
307 mmeineke 377 endif
308    
309 tim 1198 nAtomsLocal = thisComponentPlan%nAtomsLocal
310     nGroupsLocal = thisComponentPlan%nGroupsLocal
311 mmeineke 377
312 tim 1198 call mpi_allreduce(nAtomsLocal, nAtomsInRow, 1, mpi_integer, &
313     mpi_sum, thisComponentPlan%rowComm, mpiErrors)
314 mmeineke 377 if (mpiErrors /= 0) then
315     status = -1
316     return
317     endif
318    
319 tim 1198 call mpi_allreduce(nAtomsLocal, nAtomsInColumn, 1, mpi_integer, &
320     mpi_sum, thisComponentPlan%columnComm, mpiErrors)
321 mmeineke 377 if (mpiErrors /= 0) then
322     status = -1
323     return
324     endif
325 tim 1198
326     call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, &
327     mpi_sum, thisComponentPlan%rowComm, mpiErrors)
328     if (mpiErrors /= 0) then
329     status = -1
330     return
331     endif
332 mmeineke 377
333 tim 1198 call mpi_allreduce(nGroupsLocal, nGroupsInColumn, 1, mpi_integer, &
334     mpi_sum, thisComponentPlan%columnComm, mpiErrors)
335     if (mpiErrors /= 0) then
336     status = -1
337     return
338     endif
339 mmeineke 377
340 tim 1198 thisComponentPlan%nAtomsInRow = nAtomsInRow
341     thisComponentPlan%nAtomsInColumn = nAtomsInColumn
342     thisComponentPlan%nGroupsInRow = nGroupsInRow
343     thisComponentPlan%nGroupsInColumn = nGroupsInColumn
344    
345 mmeineke 377 end subroutine updateGridComponents
346    
347    
348 gezelter 1203 !! Creates a square force decomposition of processors into row and column
349     !! communicators.
350 mmeineke 377 subroutine make_Force_Grid(thisComponentPlan,status)
351     type (mpiComponentPlan) :: thisComponentPlan
352     integer, intent(out) :: status !! status returns -1 if error
353 gezelter 1203 integer :: nColumnsMax !! Maximum number of columns
354     integer :: nWorldProcessors !! Total number of processors in World comm.
355 mmeineke 377 integer :: rowIndex !! Row for this processor.
356     integer :: columnIndex !! Column for this processor.
357     integer :: nRows !! Total number of rows.
358     integer :: nColumns !! Total number of columns.
359     integer :: mpiErrors !! MPI errors.
360     integer :: rowCommunicator !! MPI row communicator.
361     integer :: columnCommunicator
362     integer :: myWorldRank
363     integer :: i
364    
365    
366     if (.not. ComponentPlanSet) return
367     status = 0
368    
369 gezelter 1203 !! We make a dangerous assumption here that if numberProcessors is
370     !! zero, then we need to get the information from MPI.
371 tim 1198 if (thisComponentPlan%nProcessors == 0 ) then
372 mmeineke 377 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
373     if ( mpiErrors /= 0 ) then
374     status = -1
375     return
376     endif
377     call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
378     if ( mpiErrors /= 0 ) then
379     status = -1
380     return
381     endif
382 gezelter 1203
383 mmeineke 377 else
384 tim 1198 nWorldProcessors = thisComponentPlan%nProcessors
385 mmeineke 377 myWorldRank = thisComponentPlan%myNode
386     endif
387 gezelter 1203
388 mmeineke 377 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
389 gezelter 1203
390 mmeineke 377 do i = 1, nColumnsMax
391     if (mod(nWorldProcessors,i) == 0) nColumns = i
392     end do
393 gezelter 1203
394 mmeineke 377 nRows = nWorldProcessors/nColumns
395 gezelter 1203
396 mmeineke 377 rowIndex = myWorldRank/nColumns
397 gezelter 1203
398 mmeineke 377 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
399     if ( mpiErrors /= 0 ) then
400     write(default_error,*) "MPI comm split failed at row communicator"
401     status = -1
402     return
403     endif
404 gezelter 1203
405 mmeineke 377 columnIndex = mod(myWorldRank,nColumns)
406     call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
407     if ( mpiErrors /= 0 ) then
408     write(default_error,*) "MPI comm split faild at columnCommunicator"
409     status = -1
410     return
411     endif
412 gezelter 1203
413     ! Set appropriate components of thisComponentPlan
414 mmeineke 377 thisComponentPlan%rowComm = rowCommunicator
415     thisComponentPlan%columnComm = columnCommunicator
416     thisComponentPlan%rowIndex = rowIndex
417     thisComponentPlan%columnIndex = columnIndex
418 tim 1198 thisComponentPlan%nRows = nRows
419     thisComponentPlan%nColumns = nColumns
420 mmeineke 377
421     end subroutine make_Force_Grid
422 gezelter 1203
423 mmeineke 377 !! initalizes a gather scatter plan
424 tim 1198 subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, &
425     thisComm, this_plan, status)
426 mmeineke 377 integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
427 tim 1198 integer, intent(in) :: nObjects
428 mmeineke 377 type (mpiComponentPlan), intent(in), target :: thisComponentPlan
429     type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
430     integer, intent(in) :: thisComm !! MPI communicator for this plan
431    
432     integer :: arraySize !! size to allocate plan for
433     integer, intent(out), optional :: status
434     integer :: ierror
435     integer :: i,junk
436    
437     if (present(status)) status = 0
438    
439 tim 1198 !! Set gsComponentPlan pointer
440 mmeineke 377 !! to the componet plan we want to use for this gather scatter plan.
441     !! WARNING this could be dangerous since thisComponentPlan was origionally
442     !! allocated in C++ and there is a significant difference between c and
443     !! f95 pointers....
444     this_plan%gsComponentPlan => thisComponentPlan
445    
446     ! Set this plan size for displs array.
447 tim 1198 this_plan%gsPlanSize = nDim * nObjects
448 mmeineke 377
449     ! Duplicate communicator for this plan
450 tim 1198 call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err)
451 mmeineke 377 if (mpi_err /= 0) then
452     if (present(status)) status = -1
453     return
454     end if
455 tim 1198 call mpi_comm_rank(this_plan%myPlanComm, this_plan%myPlanRank, mpi_err)
456 mmeineke 377 if (mpi_err /= 0) then
457     if (present(status)) status = -1
458     return
459     end if
460    
461 tim 1198 call mpi_comm_size(this_plan%myPlanComm, this_plan%planNprocs, mpi_err)
462 mmeineke 377
463     if (mpi_err /= 0) then
464     if (present(status)) status = -1
465     return
466     end if
467    
468     !! counts and displacements arrays are indexed from 0 to be compatable
469     !! with MPI arrays.
470     allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
471     if (ierror /= 0) then
472     if (present(status)) status = -1
473     return
474     end if
475    
476     allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
477     if (ierror /= 0) then
478     if (present(status)) status = -1
479     return
480     end if
481    
482     !! gather all the local sizes into a size # processors array.
483     call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
484     1,mpi_integer,thisComm,mpi_err)
485    
486     if (mpi_err /= 0) then
487     if (present(status)) status = -1
488     return
489     end if
490    
491     !! figure out the total number of particles in this plan
492     this_plan%globalPlanSize = sum(this_plan%counts)
493    
494     !! initialize plan displacements.
495     this_plan%displs(0) = 0
496     do i = 1, this_plan%planNprocs - 1,1
497     this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
498     end do
499     end subroutine plan_gather_scatter
500    
501     subroutine unplan_gather_scatter(this_plan)
502     type (gs_plan), intent(inout) :: this_plan
503    
504     this_plan%gsComponentPlan => null()
505     call mpi_comm_free(this_plan%myPlanComm,mpi_err)
506    
507     deallocate(this_plan%counts)
508     deallocate(this_plan%displs)
509    
510     end subroutine unplan_gather_scatter
511    
512     subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
513    
514 chuckv 858 type (gs_plan), intent(inout) :: this_plan
515     integer, dimension(:), intent(inout) :: sbuffer
516     integer, dimension(:), intent(inout) :: rbuffer
517 mmeineke 377 integer :: noffset
518     integer, intent(out), optional :: status
519     integer :: i
520    
521     if (present(status)) status = 0
522     noffset = this_plan%displs(this_plan%myPlanRank)
523    
524     ! if (getmyNode() == 1) then
525     ! write(*,*) "Node 0 printing allgatherv vars"
526     ! write(*,*) "Noffset: ", noffset
527     ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
528     ! write(*,*) "PlanComm: ", this_plan%myPlanComm
529     ! end if
530    
531     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
532     rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
533     this_plan%myPlanComm, mpi_err)
534    
535     if (mpi_err /= 0) then
536     if (present(status)) status = -1
537     endif
538    
539     end subroutine gather_integer
540    
541     subroutine gather_double( sbuffer, rbuffer, this_plan, status)
542    
543     type (gs_plan), intent(in) :: this_plan
544 chuckv 858 real( kind = DP ), dimension(:), intent(inout) :: sbuffer
545     real( kind = DP ), dimension(:), intent(inout) :: rbuffer
546 mmeineke 377 integer :: noffset
547     integer, intent(out), optional :: status
548    
549    
550     if (present(status)) status = 0
551     noffset = this_plan%displs(this_plan%myPlanRank)
552 chuckv 694 #ifdef PROFILE
553 chuckv 883 call cpu_time(commTimeInitial)
554 chuckv 694 #endif
555 mmeineke 377 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
556     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
557     this_plan%myPlanComm, mpi_err)
558 chuckv 694 #ifdef PROFILE
559 chuckv 883 call cpu_time(commTimeFinal)
560 chuckv 694 commTime = commTime + commTimeFinal - commTimeInitial
561     #endif
562 mmeineke 377
563     if (mpi_err /= 0) then
564     if (present(status)) status = -1
565     endif
566    
567     end subroutine gather_double
568    
569     subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
570    
571     type (gs_plan), intent(in) :: this_plan
572 chuckv 858 real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
573     real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
574 mmeineke 377 integer :: noffset,i,ierror
575     integer, intent(out), optional :: status
576    
577     external mpi_allgatherv
578    
579     if (present(status)) status = 0
580    
581     ! noffset = this_plan%displs(this_plan%me)
582 chuckv 694 #ifdef PROFILE
583 chuckv 883 call cpu_time(commTimeInitial)
584 chuckv 694 #endif
585    
586 mmeineke 377 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
587     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
588     this_plan%myPlanComm, mpi_err)
589    
590 chuckv 694 #ifdef PROFILE
591 chuckv 883 call cpu_time(commTimeFinal)
592 chuckv 694 commTime = commTime + commTimeFinal - commTimeInitial
593     #endif
594    
595 mmeineke 377 if (mpi_err /= 0) then
596     if (present(status)) status = -1
597     endif
598    
599     end subroutine gather_double_2d
600    
601     subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
602    
603     type (gs_plan), intent(in) :: this_plan
604 chuckv 858 real( kind = DP ), dimension(:), intent(inout) :: sbuffer
605     real( kind = DP ), dimension(:), intent(inout) :: rbuffer
606 mmeineke 377 integer, intent(out), optional :: status
607     external mpi_reduce_scatter
608    
609     if (present(status)) status = 0
610    
611 chuckv 694 #ifdef PROFILE
612 chuckv 883 call cpu_time(commTimeInitial)
613 chuckv 694 #endif
614 mmeineke 377 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
615     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
616 chuckv 694 #ifdef PROFILE
617 chuckv 883 call cpu_time(commTimeFinal)
618 chuckv 694 commTime = commTime + commTimeFinal - commTimeInitial
619     #endif
620 mmeineke 377
621     if (mpi_err /= 0) then
622     if (present(status)) status = -1
623     endif
624    
625     end subroutine scatter_double
626    
627     subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
628    
629     type (gs_plan), intent(in) :: this_plan
630 chuckv 858 real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
631     real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
632 mmeineke 377 integer, intent(out), optional :: status
633     external mpi_reduce_scatter
634    
635     if (present(status)) status = 0
636 chuckv 694 #ifdef PROFILE
637 chuckv 883 call cpu_time(commTimeInitial)
638 chuckv 694 #endif
639    
640 mmeineke 377 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
641     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
642 chuckv 694 #ifdef PROFILE
643 chuckv 883 call cpu_time(commTimeFinal)
644 chuckv 694 commTime = commTime + commTimeFinal - commTimeInitial
645     #endif
646 mmeineke 377
647     if (mpi_err /= 0) then
648     if (present(status)) status = -1
649     endif
650    
651     end subroutine scatter_double_2d
652 tim 1198
653     subroutine setAtomTags(tags, status)
654 mmeineke 377 integer, dimension(:) :: tags
655     integer :: status
656    
657     integer :: alloc_stat
658    
659 tim 1198 integer :: nAtomsInCol
660     integer :: nAtomsInRow
661 mmeineke 377
662     status = 0
663     ! allocate row arrays
664 tim 1198 nAtomsInRow = getNatomsInRow(plan_atom_row)
665     nAtomsInCol = getNatomsInCol(plan_atom_col)
666 chuckv 882
667 tim 1198 if(.not. allocated(AtomLocalToGlobal)) then
668     allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
669 chuckv 882 if (alloc_stat /= 0 ) then
670     status = -1
671     return
672     endif
673     else
674 tim 1198 deallocate(AtomLocalToGlobal)
675     allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
676 chuckv 882 if (alloc_stat /= 0 ) then
677     status = -1
678     return
679     endif
680 mmeineke 377
681 chuckv 882 endif
682    
683 tim 1198 AtomLocalToGlobal = tags
684 chuckv 882
685 tim 1198 if (.not. allocated(AtomRowToGlobal)) then
686     allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
687 mmeineke 377 if (alloc_stat /= 0 ) then
688     status = -1
689     return
690     endif
691     else
692 tim 1198 deallocate(AtomRowToGlobal)
693     allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
694 mmeineke 377 if (alloc_stat /= 0 ) then
695     status = -1
696     return
697     endif
698    
699     endif
700     ! allocate column arrays
701 tim 1198 if (.not. allocated(AtomColToGlobal)) then
702     allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
703 mmeineke 377 if (alloc_stat /= 0 ) then
704     status = -1
705     return
706     endif
707     else
708 tim 1198 deallocate(AtomColToGlobal)
709     allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
710 mmeineke 377 if (alloc_stat /= 0 ) then
711     status = -1
712     return
713     endif
714     endif
715    
716 tim 1198 call gather(tags, AtomRowToGlobal, plan_atom_row)
717     call gather(tags, AtomColToGlobal, plan_atom_col)
718    
719     end subroutine setAtomTags
720 gezelter 1214
721     subroutine setGroupTags(tags, status)
722     integer, dimension(:) :: tags
723     integer :: status
724    
725     integer :: alloc_stat
726    
727     integer :: nGroupsInCol
728     integer :: nGroupsInRow
729    
730     status = 0
731    
732     nGroupsInRow = getNgroupsInRow(plan_group_row)
733     nGroupsInCol = getNgroupsInCol(plan_group_col)
734    
735     if(allocated(GroupLocalToGlobal)) then
736     deallocate(GroupLocalToGlobal)
737     endif
738     allocate(GroupLocalToGlobal(size(tags)),STAT=alloc_stat)
739     if (alloc_stat /= 0 ) then
740     status = -1
741     return
742     endif
743    
744     GroupLocalToGlobal = tags
745    
746     if(allocated(GroupRowToGlobal)) then
747     deallocate(GroupRowToGlobal)
748     endif
749     allocate(GroupRowToGlobal(nGroupsInRow),STAT=alloc_stat)
750     if (alloc_stat /= 0 ) then
751     status = -1
752     return
753     endif
754    
755     if(allocated(GroupColToGlobal)) then
756     deallocate(GroupColToGlobal)
757     endif
758     allocate(GroupColToGlobal(nGroupsInCol),STAT=alloc_stat)
759     if (alloc_stat /= 0 ) then
760     status = -1
761     return
762     endif
763    
764     call gather(tags, GroupRowToGlobal, plan_group_row)
765     call gather(tags, GroupColToGlobal, plan_group_col)
766    
767     end subroutine setGroupTags
768 tim 1198
769     function getNatomsInCol(thisplan) result(nInCol)
770 mmeineke 377 type (gs_plan), intent(in) :: thisplan
771 tim 1198 integer :: nInCol
772     nInCol = thisplan%gsComponentPlan%nAtomsInColumn
773     end function getNatomsInCol
774 mmeineke 377
775 tim 1198 function getNatomsInRow(thisplan) result(nInRow)
776 mmeineke 377 type (gs_plan), intent(in) :: thisplan
777 tim 1198 integer :: nInRow
778     nInRow = thisplan%gsComponentPlan%nAtomsInRow
779     end function getNatomsInRow
780    
781     function getNgroupsInCol(thisplan) result(nInCol)
782 gezelter 1150 type (gs_plan), intent(in) :: thisplan
783 tim 1198 integer :: nInCol
784     nInCol = thisplan%gsComponentPlan%nGroupsInColumn
785     end function getNgroupsInCol
786 gezelter 1150
787 tim 1198 function getNgroupsInRow(thisplan) result(nInRow)
788 gezelter 1150 type (gs_plan), intent(in) :: thisplan
789 tim 1198 integer :: nInRow
790     nInRow = thisplan%gsComponentPlan%nGroupsInRow
791     end function getNgroupsInRow
792    
793 mmeineke 377 function isMPISimSet() result(isthisSimSet)
794     logical :: isthisSimSet
795     if (isSimSet) then
796     isthisSimSet = .true.
797     else
798     isthisSimSet = .false.
799     endif
800     end function isMPISimSet
801    
802     subroutine printComponentPlan(this_plan,printNode)
803    
804     type (mpiComponentPlan), intent(in) :: this_plan
805     integer, optional :: printNode
806     logical :: print_me = .false.
807    
808     if (present(printNode)) then
809     if (printNode == mpiSim%myNode) print_me = .true.
810     else
811     print_me = .true.
812     endif
813    
814     if (print_me) then
815     write(default_error,*) "SetupSimParallel: writing component plan"
816    
817     write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
818     write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
819     write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
820     write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
821     write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
822 tim 1198 write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal
823 mmeineke 377 write(default_error,*) "myNode: ", mpiSim%myNode
824 tim 1198 write(default_error,*) "nProcessors: ", mpiSim%nProcessors
825 mmeineke 377 write(default_error,*) "rowComm: ", mpiSim%rowComm
826     write(default_error,*) "columnComm: ", mpiSim%columnComm
827 tim 1198 write(default_error,*) "nRows: ", mpiSim%nRows
828     write(default_error,*) "nColumns: ", mpiSim%nColumns
829     write(default_error,*) "nAtomsInRow: ", mpiSim%nAtomsInRow
830     write(default_error,*) "nAtomsInColumn: ", mpiSim%nAtomsInColumn
831 mmeineke 377 write(default_error,*) "rowIndex: ", mpiSim%rowIndex
832     write(default_error,*) "columnIndex: ", mpiSim%columnIndex
833     endif
834     end subroutine printComponentPlan
835    
836     function getMyNode() result(myNode)
837     integer :: myNode
838     myNode = mpiSim%myNode
839     end function getMyNode
840    
841 chuckv 694 #ifdef PROFILE
842     subroutine printCommTime()
843     write(*,*) "MPI communication time is: ", commTime
844 chuckv 883 end subroutine printCommTime
845 chuckv 694
846 chuckv 883 function getCommTime() result(comm_time)
847     real :: comm_time
848     comm_time = commTime
849     end function getCommTime
850    
851 chuckv 694 #endif
852    
853 chuckv 631 #endif // is_mpi
854 mmeineke 377 end module mpiSimulation
855    
856 chuckv 631