ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE-4/src/UseTheForce/DarkSide/simParallel.F90
Revision: 2682
Committed: Mon Apr 3 15:37:43 2006 UTC (18 years, 5 months ago) by chuckv
File size: 29348 byte(s)
Log Message:
Added status module to simParallel for error reporting.

File Contents

# User Rev Content
1 gezelter 1930 !!
2     !! Copyright (c) 2005 The University of Notre Dame. All Rights Reserved.
3     !!
4     !! The University of Notre Dame grants you ("Licensee") a
5     !! non-exclusive, royalty free, license to use, modify and
6     !! redistribute this software in source and binary code form, provided
7     !! that the following conditions are met:
8     !!
9     !! 1. Acknowledgement of the program authors must be made in any
10     !! publication of scientific results based in part on use of the
11     !! program. An acceptable form of acknowledgement is citation of
12     !! the article in which the program was described (Matthew
13     !! A. Meineke, Charles F. Vardeman II, Teng Lin, Christopher
14     !! J. Fennell and J. Daniel Gezelter, "OOPSE: An Object-Oriented
15     !! Parallel Simulation Engine for Molecular Dynamics,"
16     !! J. Comput. Chem. 26, pp. 252-271 (2005))
17     !!
18     !! 2. Redistributions of source code must retain the above copyright
19     !! notice, this list of conditions and the following disclaimer.
20     !!
21     !! 3. Redistributions in binary form must reproduce the above copyright
22     !! notice, this list of conditions and the following disclaimer in the
23     !! documentation and/or other materials provided with the
24     !! distribution.
25     !!
26     !! This software is provided "AS IS," without a warranty of any
27     !! kind. All express or implied conditions, representations and
28     !! warranties, including any implied warranty of merchantability,
29     !! fitness for a particular purpose or non-infringement, are hereby
30     !! excluded. The University of Notre Dame and its licensors shall not
31     !! be liable for any damages suffered by licensee as a result of
32     !! using, modifying or distributing the software or its
33     !! derivatives. In no event will the University of Notre Dame or its
34     !! licensors be liable for any lost revenue, profit or data, or for
35     !! direct, indirect, special, consequential, incidental or punitive
36     !! damages, however caused and regardless of the theory of liability,
37     !! arising out of the use of or inability to use software, even if the
38     !! University of Notre Dame has been advised of the possibility of
39     !! such damages.
40     !!
41 chuckv 1619
42 gezelter 1930
43 chuckv 1619 !! MPI support for long range forces using force decomposition
44     !! on a square grid of processors.
45     !! Corresponds to mpiSimunation.cpp for C++
46     !! mpi_module also contains a private interface for mpi f90 routines.
47     !!
48     !! @author Charles F. Vardeman II
49     !! @author Matthew Meineke
50 chuckv 2682 !! @version $Id: simParallel.F90,v 1.6 2006-04-03 15:37:43 chuckv Exp $, $Date: 2006-04-03 15:37:43 $, $Name: not supported by cvs2svn $, $Revision: 1.6 $
51 chuckv 1619
52     module mpiSimulation
53     use definitions
54 chuckv 2682 use status
55 chuckv 1619 #ifdef IS_MPI
56     use oopseMPI
57     implicit none
58     PRIVATE
59    
60    
61 gezelter 2204 !! PUBLIC Subroutines contained in this module
62     !! gather and scatter are a generic interface
63     !! to gather and scatter routines
64 chuckv 1619 public :: gather, scatter
65     public :: setupSimParallel
66     public :: replanSimParallel
67     public :: getNatomsInCol
68     public :: getNatomsInRow
69     public :: getNgroupsInCol
70     public :: getNgroupsInRow
71     public :: isMPISimSet
72     public :: printComponentPlan
73     public :: getMyNode
74    
75 gezelter 2204 !! PUBLIC Subroutines contained in MPI module
76 chuckv 1619 public :: mpi_bcast
77     public :: mpi_allreduce
78 gezelter 2204 ! public :: mpi_reduce
79 chuckv 1619 public :: mpi_send
80     public :: mpi_recv
81     public :: mpi_get_processor_name
82     public :: mpi_finalize
83    
84 gezelter 2204 !! PUBLIC mpi variables
85 chuckv 1619 public :: mpi_comm_world
86     public :: mpi_character
87     public :: mpi_integer
88 chuckv 2287 public :: mpi_lor
89     public :: mpi_logical
90 chuckv 1619 public :: mpi_double_precision
91     public :: mpi_sum
92     public :: mpi_max
93     public :: mpi_status_size
94     public :: mpi_any_source
95    
96    
97    
98 gezelter 2204 !! Safety logical to prevent access to ComponetPlan until
99     !! set by C++.
100 chuckv 1619 logical, save :: ComponentPlanSet = .false.
101    
102    
103 gezelter 2204 !! generic mpi error declaration.
104 chuckv 1619 integer, public :: mpi_err
105 chuckv 2682 character(len = statusMsgSize) :: errMsg
106 chuckv 1619
107     #ifdef PROFILE
108     public :: printCommTime
109     public :: getCommTime
110     real,save :: commTime = 0.0
111     real :: commTimeInitial,commTimeFinal
112     #endif
113    
114 gezelter 2204 !! Include mpiComponentPlan type. mpiComponentPlan is a
115     !! dual header file for both c and fortran.
116 chuckv 1619 #define __FORTRAN90
117     #include "UseTheForce/mpiComponentPlan.h"
118    
119    
120 gezelter 2204 !! Tags used during force loop for parallel simulation
121 chuckv 1619 integer, public, allocatable, dimension(:) :: AtomLocalToGlobal
122     integer, public, allocatable, dimension(:) :: AtomRowToGlobal
123     integer, public, allocatable, dimension(:) :: AtomColToGlobal
124     integer, public, allocatable, dimension(:) :: GroupLocalToGlobal
125     integer, public, allocatable, dimension(:) :: GroupRowToGlobal
126     integer, public, allocatable, dimension(:) :: GroupColToGlobal
127    
128 gezelter 2204 !! Logical set true if mpiSimulation has been initialized
129 chuckv 1619 logical, save :: isSimSet = .false.
130    
131    
132     type (mpiComponentPlan), save :: mpiSim
133    
134 gezelter 2204 !! gs_plan contains plans for gather and scatter routines
135 chuckv 1619 type, public :: gs_plan
136     private
137     type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
138     integer :: gsPlanSize !! size of this plan (nDim*nComponents)
139     integer :: globalPlanSize !! size of all components in plan
140     integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
141     integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
142     integer :: myPlanComm !! My communicator for this plan
143     integer :: myPlanRank !! My rank in this plan
144     integer :: planNprocs !! Number of processors in this plan
145     end type gs_plan
146    
147 gezelter 2204 ! plans for different decompositions
148 chuckv 1619 type (gs_plan), public, save :: plan_atom_row
149     type (gs_plan), public, save :: plan_atom_row_3d
150     type (gs_plan), public, save :: plan_atom_col
151     type (gs_plan), public, save :: plan_atom_col_3d
152     type (gs_plan), public, save :: plan_atom_row_Rotation
153     type (gs_plan), public, save :: plan_atom_col_Rotation
154     type (gs_plan), public, save :: plan_group_row
155     type (gs_plan), public, save :: plan_group_col
156     type (gs_plan), public, save :: plan_group_row_3d
157     type (gs_plan), public, save :: plan_group_col_3d
158    
159     type (mpiComponentPlan), pointer :: simComponentPlan
160    
161 gezelter 2204 ! interface for gather scatter routines
162     !! Generic interface for gather.
163     !! Gathers an local array into row or column array
164     !! Interface provided for integer, double and double
165     !! rank 2 arrays.
166 chuckv 1619 interface gather
167     module procedure gather_integer
168     module procedure gather_double
169     module procedure gather_double_2d
170     end interface
171    
172 gezelter 2204 !! Generic interface for scatter.
173     !! Scatters a row or column array, adding componets
174     !! and reducing them to a local nComponent array.
175     !! Interface provided for double and double rank=2 arrays.
176 chuckv 1619
177     interface scatter
178     module procedure scatter_double
179     module procedure scatter_double_2d
180     end interface
181    
182    
183    
184     contains
185    
186     !! Sets up mpiComponentPlan with structure passed from C++.
187     subroutine setupSimParallel(thisComponentPlan, nAtomTags, atomTags, &
188     nGroupTags, groupTags, status)
189     !! Passed Arguments
190     !! mpiComponentPlan struct from C
191     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
192     !! Number of tags passed
193     integer, intent(in) :: nAtomTags, nGroupTags
194     !! Result status, 0 = normal, -1 = error
195     integer, intent(out) :: status
196     integer :: localStatus
197     !! Global reference tag for local particles
198     integer, dimension(nAtomTags), intent(inout) :: atomTags
199     integer, dimension(nGroupTags), intent(inout) :: groupTags
200    
201     !write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, &
202     ! ' has atomTags(1) = ', atomTags(1)
203 gezelter 2204
204 chuckv 1619 status = 0
205     if (componentPlanSet) then
206     return
207     endif
208     componentPlanSet = .true.
209 gezelter 2204
210 chuckv 1619 !! copy c component plan to fortran
211     mpiSim = thisComponentPlan
212 chuckv 2682 write(*,*) "Seting up simParallel"
213 gezelter 2204
214 chuckv 1619 call make_Force_Grid(mpiSim, localStatus)
215     if (localStatus /= 0) then
216 chuckv 2682 write(errMsg, *) 'An error in making the force grid has occurred'
217     call handleError("setupSimParallel", errMsg)
218 chuckv 1619 status = -1
219     return
220     endif
221 gezelter 2204
222 chuckv 1619 call updateGridComponents(mpiSim, localStatus)
223     if (localStatus /= 0) then
224 chuckv 2682 write(errMsg,*) "Error updating grid components"
225     call handleError("setupSimParallel", errMsg)
226 chuckv 1619 status = -1
227     return
228     endif
229    
230     !! initialize gather and scatter plans used in this simulation
231     call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
232     mpiSim, mpiSim%rowComm, plan_atom_row)
233     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
234     mpiSim, mpiSim%rowComm, plan_atom_row_3d)
235     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
236     mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
237     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
238     mpiSim, mpiSim%rowComm, plan_group_row)
239     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
240     mpiSim, mpiSim%rowComm, plan_group_row_3d)
241 gezelter 2204
242 chuckv 1619 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
243     mpiSim, mpiSim%columnComm, plan_atom_col)
244     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
245     mpiSim, mpiSim%columnComm, plan_atom_col_3d)
246     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
247     mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
248     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
249     mpiSim, mpiSim%columnComm, plan_group_col)
250     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
251     mpiSim, mpiSim%columnComm, plan_group_col_3d)
252    
253 gezelter 2204 ! Initialize tags
254    
255 chuckv 1619 call setAtomTags(atomTags,localStatus)
256     if (localStatus /= 0) then
257 chuckv 2682 write(errMsg, *) 'An error in setting Atom Tags has occured'
258     call handleError("setupSimParallel", errMsg)
259 chuckv 1619 status = -1
260     return
261     endif
262    
263    
264     call setGroupTags(groupTags,localStatus)
265     if (localStatus /= 0) then
266 chuckv 2682 write(errMsg, *) 'An error in setting Group Tags has occured'
267     call handleError("setupSimParallel", errMsg)
268 chuckv 1619 status = -1
269     return
270     endif
271    
272     isSimSet = .true.
273    
274 gezelter 2204 ! call printComponentPlan(mpiSim,0)
275 chuckv 1619 end subroutine setupSimParallel
276    
277     subroutine replanSimParallel(thisComponentPlan,status)
278 gezelter 2204 ! Passed Arguments
279 chuckv 1619 !! mpiComponentPlan struct from C
280     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
281     integer, intent(out) :: status
282     integer :: localStatus
283     integer :: mpierror
284     status = 0
285    
286     call updateGridComponents(thisComponentPlan,localStatus)
287     if (localStatus /= 0) then
288     status = -1
289     return
290     endif
291 gezelter 2204
292 chuckv 1619 !! Unplan Gather Scatter plans
293     call unplan_gather_scatter(plan_atom_row)
294     call unplan_gather_scatter(plan_atom_row_3d)
295     call unplan_gather_scatter(plan_atom_row_Rotation)
296     call unplan_gather_scatter(plan_group_row)
297     call unplan_gather_scatter(plan_group_row_3d)
298    
299     call unplan_gather_scatter(plan_atom_col)
300     call unplan_gather_scatter(plan_atom_col_3d)
301     call unplan_gather_scatter(plan_atom_col_Rotation)
302     call unplan_gather_scatter(plan_group_col)
303     call unplan_gather_scatter(plan_group_col_3d)
304    
305     !! initialize gather and scatter plans used in this simulation
306     call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
307     mpiSim, mpiSim%rowComm, plan_atom_row)
308     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
309     mpiSim, mpiSim%rowComm, plan_atom_row_3d)
310     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
311     mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
312     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
313     mpiSim, mpiSim%rowComm, plan_group_row)
314     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
315     mpiSim, mpiSim%rowComm, plan_group_row_3d)
316 gezelter 2204
317 chuckv 1619 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
318     mpiSim, mpiSim%columnComm, plan_atom_col)
319     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
320     mpiSim, mpiSim%columnComm, plan_atom_col_3d)
321     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
322     mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
323     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
324     mpiSim, mpiSim%columnComm, plan_group_col)
325     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
326     mpiSim, mpiSim%columnComm, plan_group_col_3d)
327 gezelter 2204
328 chuckv 1619 end subroutine replanSimParallel
329    
330     !! Updates number of row and column components for long range forces.
331     subroutine updateGridComponents(thisComponentPlan, status)
332     type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
333 gezelter 2204
334 chuckv 1619 !! Status return
335     !! - 0 Success
336     !! - -1 Failure
337     integer, intent(out) :: status
338     integer :: nAtomsLocal
339     integer :: nAtomsInRow = 0
340     integer :: nAtomsInColumn = 0
341     integer :: nGroupsLocal
342     integer :: nGroupsInRow = 0
343     integer :: nGroupsInColumn = 0
344     integer :: mpiErrors
345    
346     status = 0
347     if (.not. componentPlanSet) return
348    
349     if (thisComponentPlan%nAtomsLocal == 0) then
350     status = -1
351     return
352 gezelter 2204 endif
353 chuckv 1619 if (thisComponentPlan%nGroupsLocal == 0) then
354     write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal
355     status = -1
356     return
357     endif
358 gezelter 2204
359 chuckv 1619 nAtomsLocal = thisComponentPlan%nAtomsLocal
360     nGroupsLocal = thisComponentPlan%nGroupsLocal
361    
362     call mpi_allreduce(nAtomsLocal, nAtomsInRow, 1, mpi_integer, &
363     mpi_sum, thisComponentPlan%rowComm, mpiErrors)
364     if (mpiErrors /= 0) then
365     status = -1
366     return
367     endif
368    
369     call mpi_allreduce(nAtomsLocal, nAtomsInColumn, 1, mpi_integer, &
370     mpi_sum, thisComponentPlan%columnComm, mpiErrors)
371     if (mpiErrors /= 0) then
372     status = -1
373     return
374     endif
375 gezelter 2204
376 chuckv 1619 call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, &
377     mpi_sum, thisComponentPlan%rowComm, mpiErrors)
378     if (mpiErrors /= 0) then
379     status = -1
380     return
381     endif
382    
383     call mpi_allreduce(nGroupsLocal, nGroupsInColumn, 1, mpi_integer, &
384     mpi_sum, thisComponentPlan%columnComm, mpiErrors)
385     if (mpiErrors /= 0) then
386     status = -1
387     return
388     endif
389    
390     thisComponentPlan%nAtomsInRow = nAtomsInRow
391     thisComponentPlan%nAtomsInColumn = nAtomsInColumn
392     thisComponentPlan%nGroupsInRow = nGroupsInRow
393     thisComponentPlan%nGroupsInColumn = nGroupsInColumn
394    
395     end subroutine updateGridComponents
396    
397    
398     !! Creates a square force decomposition of processors into row and column
399     !! communicators.
400     subroutine make_Force_Grid(thisComponentPlan,status)
401     type (mpiComponentPlan) :: thisComponentPlan
402     integer, intent(out) :: status !! status returns -1 if error
403     integer :: nColumnsMax !! Maximum number of columns
404     integer :: nWorldProcessors !! Total number of processors in World comm.
405     integer :: rowIndex !! Row for this processor.
406     integer :: columnIndex !! Column for this processor.
407     integer :: nRows !! Total number of rows.
408     integer :: nColumns !! Total number of columns.
409     integer :: mpiErrors !! MPI errors.
410     integer :: rowCommunicator !! MPI row communicator.
411     integer :: columnCommunicator
412     integer :: myWorldRank
413     integer :: i
414    
415 gezelter 2204
416 chuckv 1619 if (.not. ComponentPlanSet) return
417     status = 0
418 gezelter 2204
419 chuckv 1619 !! We make a dangerous assumption here that if numberProcessors is
420     !! zero, then we need to get the information from MPI.
421     if (thisComponentPlan%nProcessors == 0 ) then
422     call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
423     if ( mpiErrors /= 0 ) then
424     status = -1
425     return
426     endif
427     call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
428     if ( mpiErrors /= 0 ) then
429     status = -1
430     return
431     endif
432 gezelter 2204
433 chuckv 1619 else
434     nWorldProcessors = thisComponentPlan%nProcessors
435     myWorldRank = thisComponentPlan%myNode
436     endif
437 gezelter 2204
438 chuckv 1619 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
439 gezelter 2204
440 chuckv 1619 do i = 1, nColumnsMax
441     if (mod(nWorldProcessors,i) == 0) nColumns = i
442     end do
443 gezelter 2204
444 chuckv 1619 nRows = nWorldProcessors/nColumns
445 gezelter 2204
446 chuckv 1619 rowIndex = myWorldRank/nColumns
447 gezelter 2204
448 chuckv 1619 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
449     if ( mpiErrors /= 0 ) then
450     write(default_error,*) "MPI comm split failed at row communicator"
451     status = -1
452     return
453     endif
454 gezelter 2204
455 chuckv 1619 columnIndex = mod(myWorldRank,nColumns)
456     call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
457     if ( mpiErrors /= 0 ) then
458     write(default_error,*) "MPI comm split faild at columnCommunicator"
459     status = -1
460     return
461     endif
462 gezelter 2204
463 chuckv 1619 ! Set appropriate components of thisComponentPlan
464     thisComponentPlan%rowComm = rowCommunicator
465     thisComponentPlan%columnComm = columnCommunicator
466     thisComponentPlan%rowIndex = rowIndex
467     thisComponentPlan%columnIndex = columnIndex
468     thisComponentPlan%nRows = nRows
469     thisComponentPlan%nColumns = nColumns
470    
471     end subroutine make_Force_Grid
472 gezelter 2204
473 chuckv 1619 !! initalizes a gather scatter plan
474     subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, &
475     thisComm, this_plan, status)
476     integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
477     integer, intent(in) :: nObjects
478     type (mpiComponentPlan), intent(in), target :: thisComponentPlan
479     type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
480     integer, intent(in) :: thisComm !! MPI communicator for this plan
481    
482     integer :: arraySize !! size to allocate plan for
483     integer, intent(out), optional :: status
484     integer :: ierror
485     integer :: i,junk
486    
487     if (present(status)) status = 0
488    
489 gezelter 2204 !! Set gsComponentPlan pointer
490     !! to the componet plan we want to use for this gather scatter plan.
491     !! WARNING this could be dangerous since thisComponentPlan was origionally
492     !! allocated in C++ and there is a significant difference between c and
493     !! f95 pointers....
494 chuckv 1619 this_plan%gsComponentPlan => thisComponentPlan
495    
496 gezelter 2204 ! Set this plan size for displs array.
497 chuckv 1619 this_plan%gsPlanSize = nDim * nObjects
498    
499 gezelter 2204 ! Duplicate communicator for this plan
500 chuckv 1619 call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err)
501     if (mpi_err /= 0) then
502     if (present(status)) status = -1
503     return
504     end if
505     call mpi_comm_rank(this_plan%myPlanComm, this_plan%myPlanRank, mpi_err)
506     if (mpi_err /= 0) then
507     if (present(status)) status = -1
508     return
509     end if
510    
511     call mpi_comm_size(this_plan%myPlanComm, this_plan%planNprocs, mpi_err)
512    
513     if (mpi_err /= 0) then
514     if (present(status)) status = -1
515     return
516     end if
517    
518     !! counts and displacements arrays are indexed from 0 to be compatable
519     !! with MPI arrays.
520     allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
521     if (ierror /= 0) then
522     if (present(status)) status = -1
523     return
524     end if
525    
526     allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
527     if (ierror /= 0) then
528     if (present(status)) status = -1
529     return
530     end if
531    
532 gezelter 2204 !! gather all the local sizes into a size # processors array.
533 chuckv 1619 call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
534     1,mpi_integer,thisComm,mpi_err)
535    
536     if (mpi_err /= 0) then
537     if (present(status)) status = -1
538     return
539     end if
540 gezelter 2204
541 chuckv 1619 !! figure out the total number of particles in this plan
542     this_plan%globalPlanSize = sum(this_plan%counts)
543 gezelter 2204
544 chuckv 1619 !! initialize plan displacements.
545     this_plan%displs(0) = 0
546     do i = 1, this_plan%planNprocs - 1,1
547     this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
548     end do
549     end subroutine plan_gather_scatter
550    
551     subroutine unplan_gather_scatter(this_plan)
552     type (gs_plan), intent(inout) :: this_plan
553 gezelter 2204
554 chuckv 1619 this_plan%gsComponentPlan => null()
555     call mpi_comm_free(this_plan%myPlanComm,mpi_err)
556    
557     deallocate(this_plan%counts)
558     deallocate(this_plan%displs)
559    
560     end subroutine unplan_gather_scatter
561    
562     subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
563    
564     type (gs_plan), intent(inout) :: this_plan
565     integer, dimension(:), intent(inout) :: sbuffer
566     integer, dimension(:), intent(inout) :: rbuffer
567     integer :: noffset
568     integer, intent(out), optional :: status
569     integer :: i
570    
571     if (present(status)) status = 0
572     noffset = this_plan%displs(this_plan%myPlanRank)
573    
574 gezelter 2204 ! if (getmyNode() == 1) then
575     ! write(*,*) "Node 0 printing allgatherv vars"
576     ! write(*,*) "Noffset: ", noffset
577     ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
578     ! write(*,*) "PlanComm: ", this_plan%myPlanComm
579     ! end if
580 chuckv 1619
581     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
582     rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
583     this_plan%myPlanComm, mpi_err)
584    
585     if (mpi_err /= 0) then
586 gezelter 2204 if (present(status)) status = -1
587 chuckv 1619 endif
588    
589     end subroutine gather_integer
590    
591     subroutine gather_double( sbuffer, rbuffer, this_plan, status)
592    
593     type (gs_plan), intent(in) :: this_plan
594     real( kind = DP ), dimension(:), intent(inout) :: sbuffer
595     real( kind = DP ), dimension(:), intent(inout) :: rbuffer
596     integer :: noffset
597     integer, intent(out), optional :: status
598    
599    
600     if (present(status)) status = 0
601     noffset = this_plan%displs(this_plan%myPlanRank)
602     #ifdef PROFILE
603     call cpu_time(commTimeInitial)
604     #endif
605     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
606     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
607     this_plan%myPlanComm, mpi_err)
608     #ifdef PROFILE
609     call cpu_time(commTimeFinal)
610     commTime = commTime + commTimeFinal - commTimeInitial
611     #endif
612    
613     if (mpi_err /= 0) then
614 gezelter 2204 if (present(status)) status = -1
615 chuckv 1619 endif
616    
617     end subroutine gather_double
618    
619     subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
620    
621     type (gs_plan), intent(in) :: this_plan
622     real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
623     real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
624     integer :: noffset,i,ierror
625     integer, intent(out), optional :: status
626 gezelter 2204
627 chuckv 1619 external mpi_allgatherv
628    
629 gezelter 2204 if (present(status)) status = 0
630    
631     ! noffset = this_plan%displs(this_plan%me)
632 chuckv 1619 #ifdef PROFILE
633 gezelter 2204 call cpu_time(commTimeInitial)
634 chuckv 1619 #endif
635    
636     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
637 gezelter 2204 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
638     this_plan%myPlanComm, mpi_err)
639 chuckv 1619
640     #ifdef PROFILE
641     call cpu_time(commTimeFinal)
642     commTime = commTime + commTimeFinal - commTimeInitial
643     #endif
644    
645     if (mpi_err /= 0) then
646 gezelter 2204 if (present(status)) status = -1
647 chuckv 1619 endif
648    
649 gezelter 2204 end subroutine gather_double_2d
650 chuckv 1619
651     subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
652    
653     type (gs_plan), intent(in) :: this_plan
654     real( kind = DP ), dimension(:), intent(inout) :: sbuffer
655     real( kind = DP ), dimension(:), intent(inout) :: rbuffer
656     integer, intent(out), optional :: status
657     external mpi_reduce_scatter
658    
659 gezelter 2204 if (present(status)) status = 0
660 chuckv 1619
661     #ifdef PROFILE
662 gezelter 2204 call cpu_time(commTimeInitial)
663 chuckv 1619 #endif
664     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
665     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
666     #ifdef PROFILE
667     call cpu_time(commTimeFinal)
668     commTime = commTime + commTimeFinal - commTimeInitial
669     #endif
670    
671     if (mpi_err /= 0) then
672 gezelter 2204 if (present(status)) status = -1
673 chuckv 1619 endif
674    
675     end subroutine scatter_double
676    
677     subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
678    
679     type (gs_plan), intent(in) :: this_plan
680     real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
681     real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
682     integer, intent(out), optional :: status
683     external mpi_reduce_scatter
684 gezelter 2204
685     if (present(status)) status = 0
686 chuckv 1619 #ifdef PROFILE
687 gezelter 2204 call cpu_time(commTimeInitial)
688 chuckv 1619 #endif
689    
690     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
691     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
692     #ifdef PROFILE
693     call cpu_time(commTimeFinal)
694     commTime = commTime + commTimeFinal - commTimeInitial
695     #endif
696    
697     if (mpi_err /= 0) then
698 gezelter 2204 if (present(status)) status = -1
699 chuckv 1619 endif
700    
701     end subroutine scatter_double_2d
702 gezelter 2204
703 chuckv 1619 subroutine setAtomTags(tags, status)
704     integer, dimension(:) :: tags
705     integer :: status
706    
707     integer :: alloc_stat
708 gezelter 2204
709 chuckv 1619 integer :: nAtomsInCol
710     integer :: nAtomsInRow
711    
712     status = 0
713 gezelter 2204 ! allocate row arrays
714 chuckv 1619 nAtomsInRow = getNatomsInRow(plan_atom_row)
715     nAtomsInCol = getNatomsInCol(plan_atom_col)
716 gezelter 2204
717 chuckv 1619 if(.not. allocated(AtomLocalToGlobal)) then
718     allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
719 gezelter 2204 if (alloc_stat /= 0 ) then
720 chuckv 1619 status = -1
721     return
722     endif
723     else
724     deallocate(AtomLocalToGlobal)
725     allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
726     if (alloc_stat /= 0 ) then
727     status = -1
728     return
729     endif
730    
731     endif
732    
733     AtomLocalToGlobal = tags
734    
735     if (.not. allocated(AtomRowToGlobal)) then
736     allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
737     if (alloc_stat /= 0 ) then
738     status = -1
739     return
740     endif
741     else
742     deallocate(AtomRowToGlobal)
743     allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
744     if (alloc_stat /= 0 ) then
745     status = -1
746     return
747     endif
748    
749     endif
750 gezelter 2204 ! allocate column arrays
751 chuckv 1619 if (.not. allocated(AtomColToGlobal)) then
752     allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
753     if (alloc_stat /= 0 ) then
754     status = -1
755     return
756     endif
757     else
758     deallocate(AtomColToGlobal)
759     allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
760     if (alloc_stat /= 0 ) then
761     status = -1
762     return
763     endif
764     endif
765 gezelter 2204
766 chuckv 1619 call gather(tags, AtomRowToGlobal, plan_atom_row)
767     call gather(tags, AtomColToGlobal, plan_atom_col)
768 gezelter 2204
769 chuckv 1619 end subroutine setAtomTags
770    
771     subroutine setGroupTags(tags, status)
772     integer, dimension(:) :: tags
773     integer :: status
774    
775     integer :: alloc_stat
776 gezelter 2204
777 chuckv 1619 integer :: nGroupsInCol
778     integer :: nGroupsInRow
779    
780     status = 0
781    
782     nGroupsInRow = getNgroupsInRow(plan_group_row)
783     nGroupsInCol = getNgroupsInCol(plan_group_col)
784 gezelter 2204
785 chuckv 1619 if(allocated(GroupLocalToGlobal)) then
786     deallocate(GroupLocalToGlobal)
787     endif
788     allocate(GroupLocalToGlobal(size(tags)),STAT=alloc_stat)
789     if (alloc_stat /= 0 ) then
790     status = -1
791     return
792     endif
793 gezelter 2204
794 chuckv 1619 GroupLocalToGlobal = tags
795    
796     if(allocated(GroupRowToGlobal)) then
797     deallocate(GroupRowToGlobal)
798     endif
799     allocate(GroupRowToGlobal(nGroupsInRow),STAT=alloc_stat)
800     if (alloc_stat /= 0 ) then
801     status = -1
802     return
803     endif
804    
805     if(allocated(GroupColToGlobal)) then
806     deallocate(GroupColToGlobal)
807     endif
808     allocate(GroupColToGlobal(nGroupsInCol),STAT=alloc_stat)
809     if (alloc_stat /= 0 ) then
810     status = -1
811     return
812     endif
813 gezelter 2204
814 chuckv 1619 call gather(tags, GroupRowToGlobal, plan_group_row)
815     call gather(tags, GroupColToGlobal, plan_group_col)
816 gezelter 2204
817 chuckv 1619 end subroutine setGroupTags
818 gezelter 2204
819 chuckv 1619 function getNatomsInCol(thisplan) result(nInCol)
820     type (gs_plan), intent(in) :: thisplan
821     integer :: nInCol
822     nInCol = thisplan%gsComponentPlan%nAtomsInColumn
823     end function getNatomsInCol
824    
825     function getNatomsInRow(thisplan) result(nInRow)
826     type (gs_plan), intent(in) :: thisplan
827     integer :: nInRow
828     nInRow = thisplan%gsComponentPlan%nAtomsInRow
829     end function getNatomsInRow
830 gezelter 2204
831 chuckv 1619 function getNgroupsInCol(thisplan) result(nInCol)
832     type (gs_plan), intent(in) :: thisplan
833     integer :: nInCol
834     nInCol = thisplan%gsComponentPlan%nGroupsInColumn
835     end function getNgroupsInCol
836    
837     function getNgroupsInRow(thisplan) result(nInRow)
838     type (gs_plan), intent(in) :: thisplan
839     integer :: nInRow
840     nInRow = thisplan%gsComponentPlan%nGroupsInRow
841     end function getNgroupsInRow
842 gezelter 2204
843 chuckv 1619 function isMPISimSet() result(isthisSimSet)
844     logical :: isthisSimSet
845     if (isSimSet) then
846     isthisSimSet = .true.
847     else
848     isthisSimSet = .false.
849     endif
850     end function isMPISimSet
851 gezelter 2204
852 chuckv 1619 subroutine printComponentPlan(this_plan,printNode)
853    
854     type (mpiComponentPlan), intent(in) :: this_plan
855     integer, optional :: printNode
856     logical :: print_me = .false.
857    
858     if (present(printNode)) then
859     if (printNode == mpiSim%myNode) print_me = .true.
860     else
861     print_me = .true.
862     endif
863    
864     if (print_me) then
865     write(default_error,*) "SetupSimParallel: writing component plan"
866 gezelter 2204
867 chuckv 1619 write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
868     write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
869     write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal
870     write(default_error,*) "myNode: ", mpiSim%myNode
871     write(default_error,*) "nProcessors: ", mpiSim%nProcessors
872     write(default_error,*) "rowComm: ", mpiSim%rowComm
873     write(default_error,*) "columnComm: ", mpiSim%columnComm
874     write(default_error,*) "nRows: ", mpiSim%nRows
875     write(default_error,*) "nColumns: ", mpiSim%nColumns
876     write(default_error,*) "nAtomsInRow: ", mpiSim%nAtomsInRow
877     write(default_error,*) "nAtomsInColumn: ", mpiSim%nAtomsInColumn
878     write(default_error,*) "rowIndex: ", mpiSim%rowIndex
879     write(default_error,*) "columnIndex: ", mpiSim%columnIndex
880     endif
881     end subroutine printComponentPlan
882    
883     function getMyNode() result(myNode)
884     integer :: myNode
885     myNode = mpiSim%myNode
886     end function getMyNode
887    
888     #ifdef PROFILE
889     subroutine printCommTime()
890     write(*,*) "MPI communication time is: ", commTime
891     end subroutine printCommTime
892    
893     function getCommTime() result(comm_time)
894     real :: comm_time
895     comm_time = commTime
896     end function getCommTime
897    
898     #endif
899    
900     #endif // is_mpi
901     end module mpiSimulation
902