ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE/libmdtools/mpiSimulation_module.F90
Revision: 1203
Committed: Thu May 27 18:59:17 2004 UTC (20 years, 1 month ago) by gezelter
File size: 25680 byte(s)
Log Message:
Cutoff group changes under MPI

File Contents

# User Rev Content
1 mmeineke 377
2 chuckv 631
3 mmeineke 377 !! MPI support for long range forces using force decomposition
4     !! on a square grid of processors.
5     !! Corresponds to mpiSimunation.cpp for C++
6     !! mpi_module also contains a private interface for mpi f90 routines.
7     !!
8     !! @author Charles F. Vardeman II
9     !! @author Matthew Meineke
10 gezelter 1203 !! @version $Id: mpiSimulation_module.F90,v 1.14 2004-05-27 18:59:15 gezelter Exp $, $Date: 2004-05-27 18:59:15 $, $Name: not supported by cvs2svn $, $Revision: 1.14 $
11 mmeineke 377
12     module mpiSimulation
13     use definitions
14 chuckv 631 #ifdef IS_MPI
15 chuckv 858 use oopseMPI
16 mmeineke 377 implicit none
17     PRIVATE
18    
19    
20     !! PUBLIC Subroutines contained in this module
21     !! gather and scatter are a generic interface
22     !! to gather and scatter routines
23     public :: gather, scatter
24     public :: setupSimParallel
25     public :: replanSimParallel
26 tim 1198 public :: getNatomsInCol
27     public :: getNatomsInRow
28     public :: getNgroupsInCol
29     public :: getNgroupsInRow
30 mmeineke 377 public :: isMPISimSet
31     public :: printComponentPlan
32     public :: getMyNode
33    
34     !! PUBLIC Subroutines contained in MPI module
35     public :: mpi_bcast
36     public :: mpi_allreduce
37 chuckv 858 ! public :: mpi_reduce
38 mmeineke 377 public :: mpi_send
39     public :: mpi_recv
40     public :: mpi_get_processor_name
41     public :: mpi_finalize
42    
43     !! PUBLIC mpi variables
44     public :: mpi_comm_world
45     public :: mpi_character
46     public :: mpi_integer
47     public :: mpi_double_precision
48     public :: mpi_sum
49     public :: mpi_max
50     public :: mpi_status_size
51     public :: mpi_any_source
52    
53 chuckv 694
54    
55 mmeineke 377 !! Safety logical to prevent access to ComponetPlan until
56     !! set by C++.
57 gezelter 747 logical, save :: ComponentPlanSet = .false.
58 mmeineke 377
59    
60     !! generic mpi error declaration.
61 gezelter 747 integer, public :: mpi_err
62 mmeineke 377
63 chuckv 694 #ifdef PROFILE
64     public :: printCommTime
65 chuckv 883 public :: getCommTime
66     real,save :: commTime = 0.0
67     real :: commTimeInitial,commTimeFinal
68 chuckv 694 #endif
69    
70 mmeineke 377 !! Include mpiComponentPlan type. mpiComponentPlan is a
71     !! dual header file for both c and fortran.
72     #define __FORTRAN90
73     #include "mpiComponentPlan.h"
74    
75    
76     !! Tags used during force loop for parallel simulation
77 tim 1198 integer, public, allocatable, dimension(:) :: AtomLocalToGlobal
78     integer, public, allocatable, dimension(:) :: AtomRowToGlobal
79     integer, public, allocatable, dimension(:) :: AtomColToGlobal
80     integer, public, allocatable, dimension(:) :: GroupLocalToGlobal
81     integer, public, allocatable, dimension(:) :: GroupRowToGlobal
82     integer, public, allocatable, dimension(:) :: GroupColToGlobal
83 mmeineke 377
84     !! Logical set true if mpiSimulation has been initialized
85 gezelter 747 logical, save :: isSimSet = .false.
86 mmeineke 377
87    
88 gezelter 747 type (mpiComponentPlan), save :: mpiSim
89 mmeineke 377
90     !! gs_plan contains plans for gather and scatter routines
91     type, public :: gs_plan
92     private
93     type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
94     integer :: gsPlanSize !! size of this plan (nDim*nComponents)
95     integer :: globalPlanSize !! size of all components in plan
96     integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
97     integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
98     integer :: myPlanComm !! My communicator for this plan
99     integer :: myPlanRank !! My rank in this plan
100     integer :: planNprocs !! Number of processors in this plan
101     end type gs_plan
102    
103     ! plans for different decompositions
104 tim 1198 type (gs_plan), public, save :: plan_atom_row
105     type (gs_plan), public, save :: plan_atom_row_3d
106     type (gs_plan), public, save :: plan_atom_col
107     type (gs_plan), public, save :: plan_atom_col_3d
108     type (gs_plan), public, save :: plan_atom_row_Rotation
109     type (gs_plan), public, save :: plan_atom_col_Rotation
110     type (gs_plan), public, save :: plan_group_row
111     type (gs_plan), public, save :: plan_group_col
112     type (gs_plan), public, save :: plan_group_row_3d
113     type (gs_plan), public, save :: plan_group_col_3d
114 mmeineke 377
115     type (mpiComponentPlan), pointer :: simComponentPlan
116    
117     ! interface for gather scatter routines
118     !! Generic interface for gather.
119     !! Gathers an local array into row or column array
120     !! Interface provided for integer, double and double
121     !! rank 2 arrays.
122     interface gather
123     module procedure gather_integer
124     module procedure gather_double
125     module procedure gather_double_2d
126     end interface
127    
128     !! Generic interface for scatter.
129     !! Scatters a row or column array, adding componets
130     !! and reducing them to a local nComponent array.
131     !! Interface provided for double and double rank=2 arrays.
132    
133     interface scatter
134     module procedure scatter_double
135     module procedure scatter_double_2d
136     end interface
137    
138    
139    
140     contains
141    
142 gezelter 1203 !! Sets up mpiComponentPlan with structure passed from C++.
143     subroutine setupSimParallel(thisComponentPlan, nAtomTags, atomTags, &
144     status)
145     !! Passed Arguments
146 mmeineke 377 !! mpiComponentPlan struct from C
147     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
148 gezelter 1203 !! Number of tags passed
149 tim 1198 integer, intent(in) :: nAtomTags
150 gezelter 1203 !! Result status, 0 = normal, -1 = error
151 mmeineke 377 integer, intent(out) :: status
152     integer :: localStatus
153 gezelter 1203 !! Global reference tag for local particles
154     integer, dimension(nAtomTags), intent(inout) :: atomTags
155 mmeineke 377
156 gezelter 1203 !write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, &
157     ! ' has atomTags(1) = ', atomTags(1)
158    
159 mmeineke 377 status = 0
160     if (componentPlanSet) then
161     return
162     endif
163     componentPlanSet = .true.
164    
165 gezelter 1203 !! copy c component plan to fortran
166 mmeineke 377 mpiSim = thisComponentPlan
167 gezelter 1203 !write(*,*) "Seting up simParallel"
168    
169 tim 1198 call make_Force_Grid(mpiSim, localStatus)
170 mmeineke 377 if (localStatus /= 0) then
171     write(default_error,*) "Error creating force grid"
172     status = -1
173     return
174     endif
175 gezelter 1203
176 tim 1198 call updateGridComponents(mpiSim, localStatus)
177 mmeineke 377 if (localStatus /= 0) then
178     write(default_error,*) "Error updating grid components"
179     status = -1
180     return
181 gezelter 1203 endif
182 mmeineke 377
183     !! initialize gather and scatter plans used in this simulation
184 tim 1198 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
185     mpiSim, mpiSim%rowComm, plan_atom_row)
186     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
187     mpiSim, mpiSim%rowComm, plan_atom_row_3d)
188     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
189     mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
190     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
191     mpiSim, mpiSim%rowComm, plan_group_row)
192     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
193     mpiSim, mpiSim%rowComm, plan_group_row_3d)
194 gezelter 1150
195 tim 1198 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
196     mpiSim, mpiSim%columnComm, plan_atom_col)
197     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
198     mpiSim, mpiSim%columnComm, plan_atom_col_3d)
199     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
200     mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
201     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
202     mpiSim, mpiSim%columnComm, plan_group_col)
203     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
204     mpiSim, mpiSim%columnComm, plan_group_col_3d)
205 gezelter 1150
206 mmeineke 377 ! Initialize tags
207 tim 1198
208     call setAtomTags(atomTags,localStatus)
209 mmeineke 377 if (localStatus /= 0) then
210     status = -1
211     return
212     endif
213     isSimSet = .true.
214    
215     ! call printComponentPlan(mpiSim,0)
216     end subroutine setupSimParallel
217    
218     subroutine replanSimParallel(thisComponentPlan,status)
219     ! Passed Arguments
220     !! mpiComponentPlan struct from C
221     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
222     integer, intent(out) :: status
223     integer :: localStatus
224     integer :: mpierror
225     status = 0
226    
227     call updateGridComponents(thisComponentPlan,localStatus)
228     if (localStatus /= 0) then
229     status = -1
230     return
231     endif
232    
233     !! Unplan Gather Scatter plans
234 tim 1198 call unplan_gather_scatter(plan_atom_row)
235     call unplan_gather_scatter(plan_atom_row_3d)
236     call unplan_gather_scatter(plan_atom_row_Rotation)
237     call unplan_gather_scatter(plan_group_row)
238     call unplan_gather_scatter(plan_group_row_3d)
239 gezelter 1150
240 tim 1198 call unplan_gather_scatter(plan_atom_col)
241     call unplan_gather_scatter(plan_atom_col_3d)
242     call unplan_gather_scatter(plan_atom_col_Rotation)
243     call unplan_gather_scatter(plan_group_col)
244     call unplan_gather_scatter(plan_group_col_3d)
245 mmeineke 377
246     !! initialize gather and scatter plans used in this simulation
247 tim 1198 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
248     mpiSim, mpiSim%rowComm, plan_atom_row)
249     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
250     mpiSim, mpiSim%rowComm, plan_atom_row_3d)
251     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
252     mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
253     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
254     mpiSim, mpiSim%rowComm, plan_group_row)
255     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
256     mpiSim, mpiSim%rowComm, plan_group_row_3d)
257    
258     call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
259     mpiSim, mpiSim%columnComm, plan_atom_col)
260     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
261     mpiSim, mpiSim%columnComm, plan_atom_col_3d)
262     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
263     mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
264     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
265     mpiSim, mpiSim%columnComm, plan_group_col)
266     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
267     mpiSim, mpiSim%columnComm, plan_group_col_3d)
268    
269 mmeineke 377 end subroutine replanSimParallel
270    
271 gezelter 1203 !! Updates number of row and column components for long range forces.
272     subroutine updateGridComponents(thisComponentPlan, status)
273 mmeineke 377 type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
274 gezelter 1203
275     !! Status return
276     !! - 0 Success
277     !! - -1 Failure
278 mmeineke 377 integer, intent(out) :: status
279 tim 1198 integer :: nAtomsLocal
280     integer :: nAtomsInRow = 0
281     integer :: nAtomsInColumn = 0
282     integer :: nGroupsLocal
283     integer :: nGroupsInRow = 0
284     integer :: nGroupsInColumn = 0
285 mmeineke 377 integer :: mpiErrors
286    
287     status = 0
288     if (.not. componentPlanSet) return
289    
290 tim 1198 if (thisComponentPlan%nAtomsLocal == 0) then
291 mmeineke 377 status = -1
292     return
293 tim 1198 endif
294     if (thisComponentPlan%nGroupsLocal == 0) then
295 gezelter 1203 write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal
296 tim 1198 status = -1
297     return
298 mmeineke 377 endif
299    
300 tim 1198 nAtomsLocal = thisComponentPlan%nAtomsLocal
301     nGroupsLocal = thisComponentPlan%nGroupsLocal
302 mmeineke 377
303 tim 1198 call mpi_allreduce(nAtomsLocal, nAtomsInRow, 1, mpi_integer, &
304     mpi_sum, thisComponentPlan%rowComm, mpiErrors)
305 mmeineke 377 if (mpiErrors /= 0) then
306     status = -1
307     return
308     endif
309    
310 tim 1198 call mpi_allreduce(nAtomsLocal, nAtomsInColumn, 1, mpi_integer, &
311     mpi_sum, thisComponentPlan%columnComm, mpiErrors)
312 mmeineke 377 if (mpiErrors /= 0) then
313     status = -1
314     return
315     endif
316 tim 1198
317     call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, &
318     mpi_sum, thisComponentPlan%rowComm, mpiErrors)
319     if (mpiErrors /= 0) then
320     status = -1
321     return
322     endif
323 mmeineke 377
324 tim 1198 call mpi_allreduce(nGroupsLocal, nGroupsInColumn, 1, mpi_integer, &
325     mpi_sum, thisComponentPlan%columnComm, mpiErrors)
326     if (mpiErrors /= 0) then
327     status = -1
328     return
329     endif
330 mmeineke 377
331 tim 1198 thisComponentPlan%nAtomsInRow = nAtomsInRow
332     thisComponentPlan%nAtomsInColumn = nAtomsInColumn
333     thisComponentPlan%nGroupsInRow = nGroupsInRow
334     thisComponentPlan%nGroupsInColumn = nGroupsInColumn
335    
336 mmeineke 377 end subroutine updateGridComponents
337    
338    
339 gezelter 1203 !! Creates a square force decomposition of processors into row and column
340     !! communicators.
341 mmeineke 377 subroutine make_Force_Grid(thisComponentPlan,status)
342     type (mpiComponentPlan) :: thisComponentPlan
343     integer, intent(out) :: status !! status returns -1 if error
344 gezelter 1203 integer :: nColumnsMax !! Maximum number of columns
345     integer :: nWorldProcessors !! Total number of processors in World comm.
346 mmeineke 377 integer :: rowIndex !! Row for this processor.
347     integer :: columnIndex !! Column for this processor.
348     integer :: nRows !! Total number of rows.
349     integer :: nColumns !! Total number of columns.
350     integer :: mpiErrors !! MPI errors.
351     integer :: rowCommunicator !! MPI row communicator.
352     integer :: columnCommunicator
353     integer :: myWorldRank
354     integer :: i
355    
356    
357     if (.not. ComponentPlanSet) return
358     status = 0
359    
360 gezelter 1203 !! We make a dangerous assumption here that if numberProcessors is
361     !! zero, then we need to get the information from MPI.
362 tim 1198 if (thisComponentPlan%nProcessors == 0 ) then
363 mmeineke 377 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
364     if ( mpiErrors /= 0 ) then
365     status = -1
366     return
367     endif
368     call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
369     if ( mpiErrors /= 0 ) then
370     status = -1
371     return
372     endif
373 gezelter 1203
374 mmeineke 377 else
375 tim 1198 nWorldProcessors = thisComponentPlan%nProcessors
376 mmeineke 377 myWorldRank = thisComponentPlan%myNode
377     endif
378 gezelter 1203
379 mmeineke 377 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
380 gezelter 1203
381 mmeineke 377 do i = 1, nColumnsMax
382     if (mod(nWorldProcessors,i) == 0) nColumns = i
383     end do
384 gezelter 1203
385 mmeineke 377 nRows = nWorldProcessors/nColumns
386 gezelter 1203
387 mmeineke 377 rowIndex = myWorldRank/nColumns
388 gezelter 1203
389 mmeineke 377 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
390     if ( mpiErrors /= 0 ) then
391     write(default_error,*) "MPI comm split failed at row communicator"
392     status = -1
393     return
394     endif
395 gezelter 1203
396 mmeineke 377 columnIndex = mod(myWorldRank,nColumns)
397     call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
398     if ( mpiErrors /= 0 ) then
399     write(default_error,*) "MPI comm split faild at columnCommunicator"
400     status = -1
401     return
402     endif
403 gezelter 1203
404     ! Set appropriate components of thisComponentPlan
405 mmeineke 377 thisComponentPlan%rowComm = rowCommunicator
406     thisComponentPlan%columnComm = columnCommunicator
407     thisComponentPlan%rowIndex = rowIndex
408     thisComponentPlan%columnIndex = columnIndex
409 tim 1198 thisComponentPlan%nRows = nRows
410     thisComponentPlan%nColumns = nColumns
411 mmeineke 377
412     end subroutine make_Force_Grid
413 gezelter 1203
414 mmeineke 377 !! initalizes a gather scatter plan
415 tim 1198 subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, &
416     thisComm, this_plan, status)
417 mmeineke 377 integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
418 tim 1198 integer, intent(in) :: nObjects
419 mmeineke 377 type (mpiComponentPlan), intent(in), target :: thisComponentPlan
420     type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
421     integer, intent(in) :: thisComm !! MPI communicator for this plan
422    
423     integer :: arraySize !! size to allocate plan for
424     integer, intent(out), optional :: status
425     integer :: ierror
426     integer :: i,junk
427    
428     if (present(status)) status = 0
429    
430 tim 1198 !! Set gsComponentPlan pointer
431 mmeineke 377 !! to the componet plan we want to use for this gather scatter plan.
432     !! WARNING this could be dangerous since thisComponentPlan was origionally
433     !! allocated in C++ and there is a significant difference between c and
434     !! f95 pointers....
435     this_plan%gsComponentPlan => thisComponentPlan
436    
437     ! Set this plan size for displs array.
438 tim 1198 this_plan%gsPlanSize = nDim * nObjects
439 mmeineke 377
440     ! Duplicate communicator for this plan
441 tim 1198 call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err)
442 mmeineke 377 if (mpi_err /= 0) then
443     if (present(status)) status = -1
444     return
445     end if
446 tim 1198 call mpi_comm_rank(this_plan%myPlanComm, this_plan%myPlanRank, mpi_err)
447 mmeineke 377 if (mpi_err /= 0) then
448     if (present(status)) status = -1
449     return
450     end if
451    
452 tim 1198 call mpi_comm_size(this_plan%myPlanComm, this_plan%planNprocs, mpi_err)
453 mmeineke 377
454     if (mpi_err /= 0) then
455     if (present(status)) status = -1
456     return
457     end if
458    
459     !! counts and displacements arrays are indexed from 0 to be compatable
460     !! with MPI arrays.
461     allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
462     if (ierror /= 0) then
463     if (present(status)) status = -1
464     return
465     end if
466    
467     allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
468     if (ierror /= 0) then
469     if (present(status)) status = -1
470     return
471     end if
472    
473     !! gather all the local sizes into a size # processors array.
474     call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
475     1,mpi_integer,thisComm,mpi_err)
476    
477     if (mpi_err /= 0) then
478     if (present(status)) status = -1
479     return
480     end if
481    
482     !! figure out the total number of particles in this plan
483     this_plan%globalPlanSize = sum(this_plan%counts)
484    
485     !! initialize plan displacements.
486     this_plan%displs(0) = 0
487     do i = 1, this_plan%planNprocs - 1,1
488     this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
489     end do
490     end subroutine plan_gather_scatter
491    
492     subroutine unplan_gather_scatter(this_plan)
493     type (gs_plan), intent(inout) :: this_plan
494    
495     this_plan%gsComponentPlan => null()
496     call mpi_comm_free(this_plan%myPlanComm,mpi_err)
497    
498     deallocate(this_plan%counts)
499     deallocate(this_plan%displs)
500    
501     end subroutine unplan_gather_scatter
502    
503     subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
504    
505 chuckv 858 type (gs_plan), intent(inout) :: this_plan
506     integer, dimension(:), intent(inout) :: sbuffer
507     integer, dimension(:), intent(inout) :: rbuffer
508 mmeineke 377 integer :: noffset
509     integer, intent(out), optional :: status
510     integer :: i
511    
512     if (present(status)) status = 0
513     noffset = this_plan%displs(this_plan%myPlanRank)
514    
515     ! if (getmyNode() == 1) then
516     ! write(*,*) "Node 0 printing allgatherv vars"
517     ! write(*,*) "Noffset: ", noffset
518     ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
519     ! write(*,*) "PlanComm: ", this_plan%myPlanComm
520     ! end if
521    
522     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
523     rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
524     this_plan%myPlanComm, mpi_err)
525    
526     if (mpi_err /= 0) then
527     if (present(status)) status = -1
528     endif
529    
530     end subroutine gather_integer
531    
532     subroutine gather_double( sbuffer, rbuffer, this_plan, status)
533    
534     type (gs_plan), intent(in) :: this_plan
535 chuckv 858 real( kind = DP ), dimension(:), intent(inout) :: sbuffer
536     real( kind = DP ), dimension(:), intent(inout) :: rbuffer
537 mmeineke 377 integer :: noffset
538     integer, intent(out), optional :: status
539    
540    
541     if (present(status)) status = 0
542     noffset = this_plan%displs(this_plan%myPlanRank)
543 chuckv 694 #ifdef PROFILE
544 chuckv 883 call cpu_time(commTimeInitial)
545 chuckv 694 #endif
546 mmeineke 377 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
547     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
548     this_plan%myPlanComm, mpi_err)
549 chuckv 694 #ifdef PROFILE
550 chuckv 883 call cpu_time(commTimeFinal)
551 chuckv 694 commTime = commTime + commTimeFinal - commTimeInitial
552     #endif
553 mmeineke 377
554     if (mpi_err /= 0) then
555     if (present(status)) status = -1
556     endif
557    
558     end subroutine gather_double
559    
560     subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
561    
562     type (gs_plan), intent(in) :: this_plan
563 chuckv 858 real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
564     real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
565 mmeineke 377 integer :: noffset,i,ierror
566     integer, intent(out), optional :: status
567    
568     external mpi_allgatherv
569    
570     if (present(status)) status = 0
571    
572     ! noffset = this_plan%displs(this_plan%me)
573 chuckv 694 #ifdef PROFILE
574 chuckv 883 call cpu_time(commTimeInitial)
575 chuckv 694 #endif
576    
577 mmeineke 377 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
578     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
579     this_plan%myPlanComm, mpi_err)
580    
581 chuckv 694 #ifdef PROFILE
582 chuckv 883 call cpu_time(commTimeFinal)
583 chuckv 694 commTime = commTime + commTimeFinal - commTimeInitial
584     #endif
585    
586 mmeineke 377 if (mpi_err /= 0) then
587     if (present(status)) status = -1
588     endif
589    
590     end subroutine gather_double_2d
591    
592     subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
593    
594     type (gs_plan), intent(in) :: this_plan
595 chuckv 858 real( kind = DP ), dimension(:), intent(inout) :: sbuffer
596     real( kind = DP ), dimension(:), intent(inout) :: rbuffer
597 mmeineke 377 integer, intent(out), optional :: status
598     external mpi_reduce_scatter
599    
600     if (present(status)) status = 0
601    
602 chuckv 694 #ifdef PROFILE
603 chuckv 883 call cpu_time(commTimeInitial)
604 chuckv 694 #endif
605 mmeineke 377 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
606     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
607 chuckv 694 #ifdef PROFILE
608 chuckv 883 call cpu_time(commTimeFinal)
609 chuckv 694 commTime = commTime + commTimeFinal - commTimeInitial
610     #endif
611 mmeineke 377
612     if (mpi_err /= 0) then
613     if (present(status)) status = -1
614     endif
615    
616     end subroutine scatter_double
617    
618     subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
619    
620     type (gs_plan), intent(in) :: this_plan
621 chuckv 858 real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
622     real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
623 mmeineke 377 integer, intent(out), optional :: status
624     external mpi_reduce_scatter
625    
626     if (present(status)) status = 0
627 chuckv 694 #ifdef PROFILE
628 chuckv 883 call cpu_time(commTimeInitial)
629 chuckv 694 #endif
630    
631 mmeineke 377 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
632     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
633 chuckv 694 #ifdef PROFILE
634 chuckv 883 call cpu_time(commTimeFinal)
635 chuckv 694 commTime = commTime + commTimeFinal - commTimeInitial
636     #endif
637 mmeineke 377
638     if (mpi_err /= 0) then
639     if (present(status)) status = -1
640     endif
641    
642     end subroutine scatter_double_2d
643 tim 1198
644     subroutine setAtomTags(tags, status)
645 mmeineke 377 integer, dimension(:) :: tags
646     integer :: status
647    
648     integer :: alloc_stat
649    
650 tim 1198 integer :: nAtomsInCol
651     integer :: nAtomsInRow
652 mmeineke 377
653     status = 0
654     ! allocate row arrays
655 tim 1198 nAtomsInRow = getNatomsInRow(plan_atom_row)
656     nAtomsInCol = getNatomsInCol(plan_atom_col)
657 chuckv 882
658 tim 1198 if(.not. allocated(AtomLocalToGlobal)) then
659     allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
660 chuckv 882 if (alloc_stat /= 0 ) then
661     status = -1
662     return
663     endif
664     else
665 tim 1198 deallocate(AtomLocalToGlobal)
666     allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
667 chuckv 882 if (alloc_stat /= 0 ) then
668     status = -1
669     return
670     endif
671 mmeineke 377
672 chuckv 882 endif
673    
674 tim 1198 AtomLocalToGlobal = tags
675 chuckv 882
676 tim 1198 if (.not. allocated(AtomRowToGlobal)) then
677     allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
678 mmeineke 377 if (alloc_stat /= 0 ) then
679     status = -1
680     return
681     endif
682     else
683 tim 1198 deallocate(AtomRowToGlobal)
684     allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
685 mmeineke 377 if (alloc_stat /= 0 ) then
686     status = -1
687     return
688     endif
689    
690     endif
691     ! allocate column arrays
692 tim 1198 if (.not. allocated(AtomColToGlobal)) then
693     allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
694 mmeineke 377 if (alloc_stat /= 0 ) then
695     status = -1
696     return
697     endif
698     else
699 tim 1198 deallocate(AtomColToGlobal)
700     allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
701 mmeineke 377 if (alloc_stat /= 0 ) then
702     status = -1
703     return
704     endif
705     endif
706    
707 tim 1198 call gather(tags, AtomRowToGlobal, plan_atom_row)
708     call gather(tags, AtomColToGlobal, plan_atom_col)
709    
710     end subroutine setAtomTags
711    
712     function getNatomsInCol(thisplan) result(nInCol)
713 mmeineke 377 type (gs_plan), intent(in) :: thisplan
714 tim 1198 integer :: nInCol
715     nInCol = thisplan%gsComponentPlan%nAtomsInColumn
716     end function getNatomsInCol
717 mmeineke 377
718 tim 1198 function getNatomsInRow(thisplan) result(nInRow)
719 mmeineke 377 type (gs_plan), intent(in) :: thisplan
720 tim 1198 integer :: nInRow
721     nInRow = thisplan%gsComponentPlan%nAtomsInRow
722     end function getNatomsInRow
723    
724     function getNgroupsInCol(thisplan) result(nInCol)
725 gezelter 1150 type (gs_plan), intent(in) :: thisplan
726 tim 1198 integer :: nInCol
727     nInCol = thisplan%gsComponentPlan%nGroupsInColumn
728     end function getNgroupsInCol
729 gezelter 1150
730 tim 1198 function getNgroupsInRow(thisplan) result(nInRow)
731 gezelter 1150 type (gs_plan), intent(in) :: thisplan
732 tim 1198 integer :: nInRow
733     nInRow = thisplan%gsComponentPlan%nGroupsInRow
734     end function getNgroupsInRow
735    
736 mmeineke 377 function isMPISimSet() result(isthisSimSet)
737     logical :: isthisSimSet
738     if (isSimSet) then
739     isthisSimSet = .true.
740     else
741     isthisSimSet = .false.
742     endif
743     end function isMPISimSet
744    
745     subroutine printComponentPlan(this_plan,printNode)
746    
747     type (mpiComponentPlan), intent(in) :: this_plan
748     integer, optional :: printNode
749     logical :: print_me = .false.
750    
751     if (present(printNode)) then
752     if (printNode == mpiSim%myNode) print_me = .true.
753     else
754     print_me = .true.
755     endif
756    
757     if (print_me) then
758     write(default_error,*) "SetupSimParallel: writing component plan"
759    
760     write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
761     write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
762     write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
763     write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
764     write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
765 tim 1198 write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal
766 mmeineke 377 write(default_error,*) "myNode: ", mpiSim%myNode
767 tim 1198 write(default_error,*) "nProcessors: ", mpiSim%nProcessors
768 mmeineke 377 write(default_error,*) "rowComm: ", mpiSim%rowComm
769     write(default_error,*) "columnComm: ", mpiSim%columnComm
770 tim 1198 write(default_error,*) "nRows: ", mpiSim%nRows
771     write(default_error,*) "nColumns: ", mpiSim%nColumns
772     write(default_error,*) "nAtomsInRow: ", mpiSim%nAtomsInRow
773     write(default_error,*) "nAtomsInColumn: ", mpiSim%nAtomsInColumn
774 mmeineke 377 write(default_error,*) "rowIndex: ", mpiSim%rowIndex
775     write(default_error,*) "columnIndex: ", mpiSim%columnIndex
776     endif
777     end subroutine printComponentPlan
778    
779     function getMyNode() result(myNode)
780     integer :: myNode
781     myNode = mpiSim%myNode
782     end function getMyNode
783    
784 chuckv 694 #ifdef PROFILE
785     subroutine printCommTime()
786     write(*,*) "MPI communication time is: ", commTime
787 chuckv 883 end subroutine printCommTime
788 chuckv 694
789 chuckv 883 function getCommTime() result(comm_time)
790     real :: comm_time
791     comm_time = commTime
792     end function getCommTime
793    
794 chuckv 694 #endif
795    
796 chuckv 631 #endif // is_mpi
797 mmeineke 377 end module mpiSimulation
798    
799 chuckv 631