ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE-4/src/UseTheForce/DarkSide/simParallel.F90
Revision: 2758
Committed: Wed May 17 19:54:27 2006 UTC (18 years, 3 months ago) by gezelter
File size: 30104 byte(s)
Log Message:
Adding single precision capabilities to the fortran side

File Contents

# User Rev Content
1 gezelter 1930 !!
2     !! Copyright (c) 2005 The University of Notre Dame. All Rights Reserved.
3     !!
4     !! The University of Notre Dame grants you ("Licensee") a
5     !! non-exclusive, royalty free, license to use, modify and
6     !! redistribute this software in source and binary code form, provided
7     !! that the following conditions are met:
8     !!
9     !! 1. Acknowledgement of the program authors must be made in any
10     !! publication of scientific results based in part on use of the
11     !! program. An acceptable form of acknowledgement is citation of
12     !! the article in which the program was described (Matthew
13     !! A. Meineke, Charles F. Vardeman II, Teng Lin, Christopher
14     !! J. Fennell and J. Daniel Gezelter, "OOPSE: An Object-Oriented
15     !! Parallel Simulation Engine for Molecular Dynamics,"
16     !! J. Comput. Chem. 26, pp. 252-271 (2005))
17     !!
18     !! 2. Redistributions of source code must retain the above copyright
19     !! notice, this list of conditions and the following disclaimer.
20     !!
21     !! 3. Redistributions in binary form must reproduce the above copyright
22     !! notice, this list of conditions and the following disclaimer in the
23     !! documentation and/or other materials provided with the
24     !! distribution.
25     !!
26     !! This software is provided "AS IS," without a warranty of any
27     !! kind. All express or implied conditions, representations and
28     !! warranties, including any implied warranty of merchantability,
29     !! fitness for a particular purpose or non-infringement, are hereby
30     !! excluded. The University of Notre Dame and its licensors shall not
31     !! be liable for any damages suffered by licensee as a result of
32     !! using, modifying or distributing the software or its
33     !! derivatives. In no event will the University of Notre Dame or its
34     !! licensors be liable for any lost revenue, profit or data, or for
35     !! direct, indirect, special, consequential, incidental or punitive
36     !! damages, however caused and regardless of the theory of liability,
37     !! arising out of the use of or inability to use software, even if the
38     !! University of Notre Dame has been advised of the possibility of
39     !! such damages.
40     !!
41 chuckv 1619
42 gezelter 1930
43 chuckv 1619 !! MPI support for long range forces using force decomposition
44     !! on a square grid of processors.
45     !! Corresponds to mpiSimunation.cpp for C++
46     !! mpi_module also contains a private interface for mpi f90 routines.
47     !!
48     !! @author Charles F. Vardeman II
49     !! @author Matthew Meineke
50 gezelter 2758 !! @version $Id: simParallel.F90,v 1.7 2006-05-17 19:54:27 gezelter Exp $, $Date: 2006-05-17 19:54:27 $, $Name: not supported by cvs2svn $, $Revision: 1.7 $
51 chuckv 1619
52     module mpiSimulation
53     use definitions
54 chuckv 2682 use status
55 chuckv 1619 #ifdef IS_MPI
56     use oopseMPI
57     implicit none
58     PRIVATE
59    
60    
61 gezelter 2204 !! PUBLIC Subroutines contained in this module
62     !! gather and scatter are a generic interface
63     !! to gather and scatter routines
64 chuckv 1619 public :: gather, scatter
65     public :: setupSimParallel
66     public :: replanSimParallel
67     public :: getNatomsInCol
68     public :: getNatomsInRow
69     public :: getNgroupsInCol
70     public :: getNgroupsInRow
71     public :: isMPISimSet
72     public :: printComponentPlan
73     public :: getMyNode
74    
75 gezelter 2204 !! PUBLIC Subroutines contained in MPI module
76 chuckv 1619 public :: mpi_bcast
77     public :: mpi_allreduce
78 gezelter 2204 ! public :: mpi_reduce
79 chuckv 1619 public :: mpi_send
80     public :: mpi_recv
81     public :: mpi_get_processor_name
82     public :: mpi_finalize
83    
84 gezelter 2204 !! PUBLIC mpi variables
85 chuckv 1619 public :: mpi_comm_world
86     public :: mpi_character
87     public :: mpi_integer
88 chuckv 2287 public :: mpi_lor
89     public :: mpi_logical
90 gezelter 2758 public :: mpi_real
91 chuckv 1619 public :: mpi_double_precision
92     public :: mpi_sum
93     public :: mpi_max
94     public :: mpi_status_size
95     public :: mpi_any_source
96    
97    
98    
99 gezelter 2204 !! Safety logical to prevent access to ComponetPlan until
100     !! set by C++.
101 chuckv 1619 logical, save :: ComponentPlanSet = .false.
102    
103    
104 gezelter 2204 !! generic mpi error declaration.
105 chuckv 1619 integer, public :: mpi_err
106 chuckv 2682 character(len = statusMsgSize) :: errMsg
107 chuckv 1619
108     #ifdef PROFILE
109     public :: printCommTime
110     public :: getCommTime
111     real,save :: commTime = 0.0
112     real :: commTimeInitial,commTimeFinal
113     #endif
114    
115 gezelter 2204 !! Include mpiComponentPlan type. mpiComponentPlan is a
116     !! dual header file for both c and fortran.
117 chuckv 1619 #define __FORTRAN90
118     #include "UseTheForce/mpiComponentPlan.h"
119    
120    
121 gezelter 2204 !! Tags used during force loop for parallel simulation
122 chuckv 1619 integer, public, allocatable, dimension(:) :: AtomLocalToGlobal
123     integer, public, allocatable, dimension(:) :: AtomRowToGlobal
124     integer, public, allocatable, dimension(:) :: AtomColToGlobal
125     integer, public, allocatable, dimension(:) :: GroupLocalToGlobal
126     integer, public, allocatable, dimension(:) :: GroupRowToGlobal
127     integer, public, allocatable, dimension(:) :: GroupColToGlobal
128    
129 gezelter 2204 !! Logical set true if mpiSimulation has been initialized
130 chuckv 1619 logical, save :: isSimSet = .false.
131    
132    
133     type (mpiComponentPlan), save :: mpiSim
134    
135 gezelter 2204 !! gs_plan contains plans for gather and scatter routines
136 chuckv 1619 type, public :: gs_plan
137     private
138     type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
139     integer :: gsPlanSize !! size of this plan (nDim*nComponents)
140     integer :: globalPlanSize !! size of all components in plan
141     integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
142     integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
143     integer :: myPlanComm !! My communicator for this plan
144     integer :: myPlanRank !! My rank in this plan
145     integer :: planNprocs !! Number of processors in this plan
146     end type gs_plan
147    
148 gezelter 2204 ! plans for different decompositions
149 chuckv 1619 type (gs_plan), public, save :: plan_atom_row
150     type (gs_plan), public, save :: plan_atom_row_3d
151     type (gs_plan), public, save :: plan_atom_col
152     type (gs_plan), public, save :: plan_atom_col_3d
153     type (gs_plan), public, save :: plan_atom_row_Rotation
154     type (gs_plan), public, save :: plan_atom_col_Rotation
155     type (gs_plan), public, save :: plan_group_row
156     type (gs_plan), public, save :: plan_group_col
157     type (gs_plan), public, save :: plan_group_row_3d
158     type (gs_plan), public, save :: plan_group_col_3d
159    
160     type (mpiComponentPlan), pointer :: simComponentPlan
161    
162 gezelter 2204 ! interface for gather scatter routines
163     !! Generic interface for gather.
164     !! Gathers an local array into row or column array
165     !! Interface provided for integer, double and double
166     !! rank 2 arrays.
167 chuckv 1619 interface gather
168     module procedure gather_integer
169     module procedure gather_double
170     module procedure gather_double_2d
171     end interface
172    
173 gezelter 2204 !! Generic interface for scatter.
174     !! Scatters a row or column array, adding componets
175     !! and reducing them to a local nComponent array.
176     !! Interface provided for double and double rank=2 arrays.
177 chuckv 1619
178     interface scatter
179     module procedure scatter_double
180     module procedure scatter_double_2d
181     end interface
182    
183    
184    
185     contains
186    
187     !! Sets up mpiComponentPlan with structure passed from C++.
188     subroutine setupSimParallel(thisComponentPlan, nAtomTags, atomTags, &
189     nGroupTags, groupTags, status)
190     !! Passed Arguments
191     !! mpiComponentPlan struct from C
192     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
193     !! Number of tags passed
194     integer, intent(in) :: nAtomTags, nGroupTags
195     !! Result status, 0 = normal, -1 = error
196     integer, intent(out) :: status
197     integer :: localStatus
198     !! Global reference tag for local particles
199     integer, dimension(nAtomTags), intent(inout) :: atomTags
200     integer, dimension(nGroupTags), intent(inout) :: groupTags
201    
202     !write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, &
203     ! ' has atomTags(1) = ', atomTags(1)
204 gezelter 2204
205 chuckv 1619 status = 0
206     if (componentPlanSet) then
207     return
208     endif
209     componentPlanSet = .true.
210 gezelter 2204
211 chuckv 1619 !! copy c component plan to fortran
212     mpiSim = thisComponentPlan
213 chuckv 2682 write(*,*) "Seting up simParallel"
214 gezelter 2204
215 chuckv 1619 call make_Force_Grid(mpiSim, localStatus)
216     if (localStatus /= 0) then
217 chuckv 2682 write(errMsg, *) 'An error in making the force grid has occurred'
218     call handleError("setupSimParallel", errMsg)
219 chuckv 1619 status = -1
220     return
221     endif
222 gezelter 2204
223 chuckv 1619 call updateGridComponents(mpiSim, localStatus)
224     if (localStatus /= 0) then
225 chuckv 2682 write(errMsg,*) "Error updating grid components"
226     call handleError("setupSimParallel", errMsg)
227 chuckv 1619 status = -1
228     return
229     endif
230    
231     !! initialize gather and scatter plans used in this simulation
232     call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
233     mpiSim, mpiSim%rowComm, plan_atom_row)
234     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
235     mpiSim, mpiSim%rowComm, plan_atom_row_3d)
236     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
237     mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
238     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
239     mpiSim, mpiSim%rowComm, plan_group_row)
240     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
241     mpiSim, mpiSim%rowComm, plan_group_row_3d)
242 gezelter 2204
243 chuckv 1619 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
244     mpiSim, mpiSim%columnComm, plan_atom_col)
245     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
246     mpiSim, mpiSim%columnComm, plan_atom_col_3d)
247     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
248     mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
249     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
250     mpiSim, mpiSim%columnComm, plan_group_col)
251     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
252     mpiSim, mpiSim%columnComm, plan_group_col_3d)
253    
254 gezelter 2204 ! Initialize tags
255    
256 chuckv 1619 call setAtomTags(atomTags,localStatus)
257     if (localStatus /= 0) then
258 chuckv 2682 write(errMsg, *) 'An error in setting Atom Tags has occured'
259     call handleError("setupSimParallel", errMsg)
260 chuckv 1619 status = -1
261     return
262     endif
263    
264    
265     call setGroupTags(groupTags,localStatus)
266     if (localStatus /= 0) then
267 chuckv 2682 write(errMsg, *) 'An error in setting Group Tags has occured'
268     call handleError("setupSimParallel", errMsg)
269 chuckv 1619 status = -1
270     return
271     endif
272    
273     isSimSet = .true.
274    
275 gezelter 2204 ! call printComponentPlan(mpiSim,0)
276 chuckv 1619 end subroutine setupSimParallel
277    
278     subroutine replanSimParallel(thisComponentPlan,status)
279 gezelter 2204 ! Passed Arguments
280 chuckv 1619 !! mpiComponentPlan struct from C
281     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
282     integer, intent(out) :: status
283     integer :: localStatus
284     integer :: mpierror
285     status = 0
286    
287     call updateGridComponents(thisComponentPlan,localStatus)
288     if (localStatus /= 0) then
289     status = -1
290     return
291     endif
292 gezelter 2204
293 chuckv 1619 !! Unplan Gather Scatter plans
294     call unplan_gather_scatter(plan_atom_row)
295     call unplan_gather_scatter(plan_atom_row_3d)
296     call unplan_gather_scatter(plan_atom_row_Rotation)
297     call unplan_gather_scatter(plan_group_row)
298     call unplan_gather_scatter(plan_group_row_3d)
299    
300     call unplan_gather_scatter(plan_atom_col)
301     call unplan_gather_scatter(plan_atom_col_3d)
302     call unplan_gather_scatter(plan_atom_col_Rotation)
303     call unplan_gather_scatter(plan_group_col)
304     call unplan_gather_scatter(plan_group_col_3d)
305    
306     !! initialize gather and scatter plans used in this simulation
307     call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
308     mpiSim, mpiSim%rowComm, plan_atom_row)
309     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
310     mpiSim, mpiSim%rowComm, plan_atom_row_3d)
311     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
312     mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
313     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
314     mpiSim, mpiSim%rowComm, plan_group_row)
315     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
316     mpiSim, mpiSim%rowComm, plan_group_row_3d)
317 gezelter 2204
318 chuckv 1619 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
319     mpiSim, mpiSim%columnComm, plan_atom_col)
320     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
321     mpiSim, mpiSim%columnComm, plan_atom_col_3d)
322     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
323     mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
324     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
325     mpiSim, mpiSim%columnComm, plan_group_col)
326     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
327     mpiSim, mpiSim%columnComm, plan_group_col_3d)
328 gezelter 2204
329 chuckv 1619 end subroutine replanSimParallel
330    
331     !! Updates number of row and column components for long range forces.
332     subroutine updateGridComponents(thisComponentPlan, status)
333     type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
334 gezelter 2204
335 chuckv 1619 !! Status return
336     !! - 0 Success
337     !! - -1 Failure
338     integer, intent(out) :: status
339     integer :: nAtomsLocal
340     integer :: nAtomsInRow = 0
341     integer :: nAtomsInColumn = 0
342     integer :: nGroupsLocal
343     integer :: nGroupsInRow = 0
344     integer :: nGroupsInColumn = 0
345     integer :: mpiErrors
346    
347     status = 0
348     if (.not. componentPlanSet) return
349    
350     if (thisComponentPlan%nAtomsLocal == 0) then
351     status = -1
352     return
353 gezelter 2204 endif
354 chuckv 1619 if (thisComponentPlan%nGroupsLocal == 0) then
355     write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal
356     status = -1
357     return
358     endif
359 gezelter 2204
360 chuckv 1619 nAtomsLocal = thisComponentPlan%nAtomsLocal
361     nGroupsLocal = thisComponentPlan%nGroupsLocal
362    
363     call mpi_allreduce(nAtomsLocal, nAtomsInRow, 1, mpi_integer, &
364     mpi_sum, thisComponentPlan%rowComm, mpiErrors)
365     if (mpiErrors /= 0) then
366     status = -1
367     return
368     endif
369    
370     call mpi_allreduce(nAtomsLocal, nAtomsInColumn, 1, mpi_integer, &
371     mpi_sum, thisComponentPlan%columnComm, mpiErrors)
372     if (mpiErrors /= 0) then
373     status = -1
374     return
375     endif
376 gezelter 2204
377 chuckv 1619 call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, &
378     mpi_sum, thisComponentPlan%rowComm, mpiErrors)
379     if (mpiErrors /= 0) then
380     status = -1
381     return
382     endif
383    
384     call mpi_allreduce(nGroupsLocal, nGroupsInColumn, 1, mpi_integer, &
385     mpi_sum, thisComponentPlan%columnComm, mpiErrors)
386     if (mpiErrors /= 0) then
387     status = -1
388     return
389     endif
390    
391     thisComponentPlan%nAtomsInRow = nAtomsInRow
392     thisComponentPlan%nAtomsInColumn = nAtomsInColumn
393     thisComponentPlan%nGroupsInRow = nGroupsInRow
394     thisComponentPlan%nGroupsInColumn = nGroupsInColumn
395    
396     end subroutine updateGridComponents
397    
398    
399     !! Creates a square force decomposition of processors into row and column
400     !! communicators.
401     subroutine make_Force_Grid(thisComponentPlan,status)
402     type (mpiComponentPlan) :: thisComponentPlan
403     integer, intent(out) :: status !! status returns -1 if error
404     integer :: nColumnsMax !! Maximum number of columns
405     integer :: nWorldProcessors !! Total number of processors in World comm.
406     integer :: rowIndex !! Row for this processor.
407     integer :: columnIndex !! Column for this processor.
408     integer :: nRows !! Total number of rows.
409     integer :: nColumns !! Total number of columns.
410     integer :: mpiErrors !! MPI errors.
411     integer :: rowCommunicator !! MPI row communicator.
412     integer :: columnCommunicator
413     integer :: myWorldRank
414     integer :: i
415    
416 gezelter 2204
417 chuckv 1619 if (.not. ComponentPlanSet) return
418     status = 0
419 gezelter 2204
420 chuckv 1619 !! We make a dangerous assumption here that if numberProcessors is
421     !! zero, then we need to get the information from MPI.
422     if (thisComponentPlan%nProcessors == 0 ) then
423     call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
424     if ( mpiErrors /= 0 ) then
425     status = -1
426     return
427     endif
428     call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
429     if ( mpiErrors /= 0 ) then
430     status = -1
431     return
432     endif
433 gezelter 2204
434 chuckv 1619 else
435     nWorldProcessors = thisComponentPlan%nProcessors
436     myWorldRank = thisComponentPlan%myNode
437     endif
438 gezelter 2204
439 chuckv 1619 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
440 gezelter 2204
441 chuckv 1619 do i = 1, nColumnsMax
442     if (mod(nWorldProcessors,i) == 0) nColumns = i
443     end do
444 gezelter 2204
445 chuckv 1619 nRows = nWorldProcessors/nColumns
446 gezelter 2204
447 chuckv 1619 rowIndex = myWorldRank/nColumns
448 gezelter 2204
449 chuckv 1619 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
450     if ( mpiErrors /= 0 ) then
451     write(default_error,*) "MPI comm split failed at row communicator"
452     status = -1
453     return
454     endif
455 gezelter 2204
456 chuckv 1619 columnIndex = mod(myWorldRank,nColumns)
457     call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
458     if ( mpiErrors /= 0 ) then
459     write(default_error,*) "MPI comm split faild at columnCommunicator"
460     status = -1
461     return
462     endif
463 gezelter 2204
464 chuckv 1619 ! Set appropriate components of thisComponentPlan
465     thisComponentPlan%rowComm = rowCommunicator
466     thisComponentPlan%columnComm = columnCommunicator
467     thisComponentPlan%rowIndex = rowIndex
468     thisComponentPlan%columnIndex = columnIndex
469     thisComponentPlan%nRows = nRows
470     thisComponentPlan%nColumns = nColumns
471    
472     end subroutine make_Force_Grid
473 gezelter 2204
474 chuckv 1619 !! initalizes a gather scatter plan
475     subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, &
476     thisComm, this_plan, status)
477     integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
478     integer, intent(in) :: nObjects
479     type (mpiComponentPlan), intent(in), target :: thisComponentPlan
480     type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
481     integer, intent(in) :: thisComm !! MPI communicator for this plan
482    
483     integer :: arraySize !! size to allocate plan for
484     integer, intent(out), optional :: status
485     integer :: ierror
486     integer :: i,junk
487    
488     if (present(status)) status = 0
489    
490 gezelter 2204 !! Set gsComponentPlan pointer
491     !! to the componet plan we want to use for this gather scatter plan.
492     !! WARNING this could be dangerous since thisComponentPlan was origionally
493     !! allocated in C++ and there is a significant difference between c and
494     !! f95 pointers....
495 chuckv 1619 this_plan%gsComponentPlan => thisComponentPlan
496    
497 gezelter 2204 ! Set this plan size for displs array.
498 chuckv 1619 this_plan%gsPlanSize = nDim * nObjects
499    
500 gezelter 2204 ! Duplicate communicator for this plan
501 chuckv 1619 call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err)
502     if (mpi_err /= 0) then
503     if (present(status)) status = -1
504     return
505     end if
506     call mpi_comm_rank(this_plan%myPlanComm, this_plan%myPlanRank, mpi_err)
507     if (mpi_err /= 0) then
508     if (present(status)) status = -1
509     return
510     end if
511    
512     call mpi_comm_size(this_plan%myPlanComm, this_plan%planNprocs, mpi_err)
513    
514     if (mpi_err /= 0) then
515     if (present(status)) status = -1
516     return
517     end if
518    
519     !! counts and displacements arrays are indexed from 0 to be compatable
520     !! with MPI arrays.
521     allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
522     if (ierror /= 0) then
523     if (present(status)) status = -1
524     return
525     end if
526    
527     allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
528     if (ierror /= 0) then
529     if (present(status)) status = -1
530     return
531     end if
532    
533 gezelter 2204 !! gather all the local sizes into a size # processors array.
534 chuckv 1619 call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
535     1,mpi_integer,thisComm,mpi_err)
536    
537     if (mpi_err /= 0) then
538     if (present(status)) status = -1
539     return
540     end if
541 gezelter 2204
542 chuckv 1619 !! figure out the total number of particles in this plan
543     this_plan%globalPlanSize = sum(this_plan%counts)
544 gezelter 2204
545 chuckv 1619 !! initialize plan displacements.
546     this_plan%displs(0) = 0
547     do i = 1, this_plan%planNprocs - 1,1
548     this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
549     end do
550     end subroutine plan_gather_scatter
551    
552     subroutine unplan_gather_scatter(this_plan)
553     type (gs_plan), intent(inout) :: this_plan
554 gezelter 2204
555 chuckv 1619 this_plan%gsComponentPlan => null()
556     call mpi_comm_free(this_plan%myPlanComm,mpi_err)
557    
558     deallocate(this_plan%counts)
559     deallocate(this_plan%displs)
560    
561     end subroutine unplan_gather_scatter
562    
563     subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
564    
565     type (gs_plan), intent(inout) :: this_plan
566     integer, dimension(:), intent(inout) :: sbuffer
567     integer, dimension(:), intent(inout) :: rbuffer
568     integer :: noffset
569     integer, intent(out), optional :: status
570     integer :: i
571    
572     if (present(status)) status = 0
573     noffset = this_plan%displs(this_plan%myPlanRank)
574    
575 gezelter 2204 ! if (getmyNode() == 1) then
576     ! write(*,*) "Node 0 printing allgatherv vars"
577     ! write(*,*) "Noffset: ", noffset
578     ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
579     ! write(*,*) "PlanComm: ", this_plan%myPlanComm
580     ! end if
581 chuckv 1619
582     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
583     rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
584     this_plan%myPlanComm, mpi_err)
585    
586     if (mpi_err /= 0) then
587 gezelter 2204 if (present(status)) status = -1
588 chuckv 1619 endif
589    
590     end subroutine gather_integer
591    
592     subroutine gather_double( sbuffer, rbuffer, this_plan, status)
593    
594     type (gs_plan), intent(in) :: this_plan
595     real( kind = DP ), dimension(:), intent(inout) :: sbuffer
596     real( kind = DP ), dimension(:), intent(inout) :: rbuffer
597     integer :: noffset
598     integer, intent(out), optional :: status
599    
600    
601     if (present(status)) status = 0
602     noffset = this_plan%displs(this_plan%myPlanRank)
603     #ifdef PROFILE
604     call cpu_time(commTimeInitial)
605     #endif
606 gezelter 2758 #ifdef SINGLE_PRECISION
607     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_real, &
608     rbuffer,this_plan%counts,this_plan%displs,mpi_real, &
609     this_plan%myPlanComm, mpi_err)
610     #else
611 chuckv 1619 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
612     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
613     this_plan%myPlanComm, mpi_err)
614 gezelter 2758 #endif
615 chuckv 1619 #ifdef PROFILE
616     call cpu_time(commTimeFinal)
617     commTime = commTime + commTimeFinal - commTimeInitial
618     #endif
619    
620     if (mpi_err /= 0) then
621 gezelter 2204 if (present(status)) status = -1
622 chuckv 1619 endif
623    
624     end subroutine gather_double
625    
626     subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
627    
628     type (gs_plan), intent(in) :: this_plan
629     real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
630     real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
631     integer :: noffset,i,ierror
632     integer, intent(out), optional :: status
633 gezelter 2204
634 chuckv 1619 external mpi_allgatherv
635    
636 gezelter 2204 if (present(status)) status = 0
637    
638     ! noffset = this_plan%displs(this_plan%me)
639 chuckv 1619 #ifdef PROFILE
640 gezelter 2204 call cpu_time(commTimeInitial)
641 chuckv 1619 #endif
642    
643 gezelter 2758 #ifdef SINGLE_PRECISION
644     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_real, &
645     rbuffer,this_plan%counts,this_plan%displs,mpi_real, &
646     this_plan%myPlanComm, mpi_err)
647     #else
648 chuckv 1619 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
649 gezelter 2204 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
650     this_plan%myPlanComm, mpi_err)
651 gezelter 2758 #endif
652 chuckv 1619
653     #ifdef PROFILE
654     call cpu_time(commTimeFinal)
655     commTime = commTime + commTimeFinal - commTimeInitial
656     #endif
657    
658     if (mpi_err /= 0) then
659 gezelter 2204 if (present(status)) status = -1
660 chuckv 1619 endif
661    
662 gezelter 2204 end subroutine gather_double_2d
663 chuckv 1619
664     subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
665    
666     type (gs_plan), intent(in) :: this_plan
667     real( kind = DP ), dimension(:), intent(inout) :: sbuffer
668     real( kind = DP ), dimension(:), intent(inout) :: rbuffer
669     integer, intent(out), optional :: status
670     external mpi_reduce_scatter
671    
672 gezelter 2204 if (present(status)) status = 0
673 chuckv 1619
674     #ifdef PROFILE
675 gezelter 2204 call cpu_time(commTimeInitial)
676 chuckv 1619 #endif
677 gezelter 2758 #ifdef SINGLE_PRECISION
678 chuckv 1619 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
679 gezelter 2758 mpi_real, MPI_SUM, this_plan%myPlanComm, mpi_err)
680     #else
681     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
682 chuckv 1619 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
683 gezelter 2758 #endif
684 chuckv 1619 #ifdef PROFILE
685     call cpu_time(commTimeFinal)
686     commTime = commTime + commTimeFinal - commTimeInitial
687     #endif
688    
689     if (mpi_err /= 0) then
690 gezelter 2204 if (present(status)) status = -1
691 chuckv 1619 endif
692    
693     end subroutine scatter_double
694    
695     subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
696    
697     type (gs_plan), intent(in) :: this_plan
698     real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
699     real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
700     integer, intent(out), optional :: status
701     external mpi_reduce_scatter
702 gezelter 2204
703     if (present(status)) status = 0
704 chuckv 1619 #ifdef PROFILE
705 gezelter 2204 call cpu_time(commTimeInitial)
706 chuckv 1619 #endif
707 gezelter 2758 #ifdef SINGLE_PRECISION
708 chuckv 1619 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
709 gezelter 2758 mpi_real, MPI_SUM, this_plan%myPlanComm, mpi_err)
710     #else
711     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
712 chuckv 1619 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
713 gezelter 2758 #endif
714 chuckv 1619 #ifdef PROFILE
715     call cpu_time(commTimeFinal)
716     commTime = commTime + commTimeFinal - commTimeInitial
717     #endif
718    
719     if (mpi_err /= 0) then
720 gezelter 2204 if (present(status)) status = -1
721 chuckv 1619 endif
722    
723     end subroutine scatter_double_2d
724 gezelter 2204
725 chuckv 1619 subroutine setAtomTags(tags, status)
726     integer, dimension(:) :: tags
727     integer :: status
728    
729     integer :: alloc_stat
730 gezelter 2204
731 chuckv 1619 integer :: nAtomsInCol
732     integer :: nAtomsInRow
733    
734     status = 0
735 gezelter 2204 ! allocate row arrays
736 chuckv 1619 nAtomsInRow = getNatomsInRow(plan_atom_row)
737     nAtomsInCol = getNatomsInCol(plan_atom_col)
738 gezelter 2204
739 chuckv 1619 if(.not. allocated(AtomLocalToGlobal)) then
740     allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
741 gezelter 2204 if (alloc_stat /= 0 ) then
742 chuckv 1619 status = -1
743     return
744     endif
745     else
746     deallocate(AtomLocalToGlobal)
747     allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
748     if (alloc_stat /= 0 ) then
749     status = -1
750     return
751     endif
752    
753     endif
754    
755     AtomLocalToGlobal = tags
756    
757     if (.not. allocated(AtomRowToGlobal)) then
758     allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
759     if (alloc_stat /= 0 ) then
760     status = -1
761     return
762     endif
763     else
764     deallocate(AtomRowToGlobal)
765     allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
766     if (alloc_stat /= 0 ) then
767     status = -1
768     return
769     endif
770    
771     endif
772 gezelter 2204 ! allocate column arrays
773 chuckv 1619 if (.not. allocated(AtomColToGlobal)) then
774     allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
775     if (alloc_stat /= 0 ) then
776     status = -1
777     return
778     endif
779     else
780     deallocate(AtomColToGlobal)
781     allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
782     if (alloc_stat /= 0 ) then
783     status = -1
784     return
785     endif
786     endif
787 gezelter 2204
788 chuckv 1619 call gather(tags, AtomRowToGlobal, plan_atom_row)
789     call gather(tags, AtomColToGlobal, plan_atom_col)
790 gezelter 2204
791 chuckv 1619 end subroutine setAtomTags
792    
793     subroutine setGroupTags(tags, status)
794     integer, dimension(:) :: tags
795     integer :: status
796    
797     integer :: alloc_stat
798 gezelter 2204
799 chuckv 1619 integer :: nGroupsInCol
800     integer :: nGroupsInRow
801    
802     status = 0
803    
804     nGroupsInRow = getNgroupsInRow(plan_group_row)
805     nGroupsInCol = getNgroupsInCol(plan_group_col)
806 gezelter 2204
807 chuckv 1619 if(allocated(GroupLocalToGlobal)) then
808     deallocate(GroupLocalToGlobal)
809     endif
810     allocate(GroupLocalToGlobal(size(tags)),STAT=alloc_stat)
811     if (alloc_stat /= 0 ) then
812     status = -1
813     return
814     endif
815 gezelter 2204
816 chuckv 1619 GroupLocalToGlobal = tags
817    
818     if(allocated(GroupRowToGlobal)) then
819     deallocate(GroupRowToGlobal)
820     endif
821     allocate(GroupRowToGlobal(nGroupsInRow),STAT=alloc_stat)
822     if (alloc_stat /= 0 ) then
823     status = -1
824     return
825     endif
826    
827     if(allocated(GroupColToGlobal)) then
828     deallocate(GroupColToGlobal)
829     endif
830     allocate(GroupColToGlobal(nGroupsInCol),STAT=alloc_stat)
831     if (alloc_stat /= 0 ) then
832     status = -1
833     return
834     endif
835 gezelter 2204
836 chuckv 1619 call gather(tags, GroupRowToGlobal, plan_group_row)
837     call gather(tags, GroupColToGlobal, plan_group_col)
838 gezelter 2204
839 chuckv 1619 end subroutine setGroupTags
840 gezelter 2204
841 chuckv 1619 function getNatomsInCol(thisplan) result(nInCol)
842     type (gs_plan), intent(in) :: thisplan
843     integer :: nInCol
844     nInCol = thisplan%gsComponentPlan%nAtomsInColumn
845     end function getNatomsInCol
846    
847     function getNatomsInRow(thisplan) result(nInRow)
848     type (gs_plan), intent(in) :: thisplan
849     integer :: nInRow
850     nInRow = thisplan%gsComponentPlan%nAtomsInRow
851     end function getNatomsInRow
852 gezelter 2204
853 chuckv 1619 function getNgroupsInCol(thisplan) result(nInCol)
854     type (gs_plan), intent(in) :: thisplan
855     integer :: nInCol
856     nInCol = thisplan%gsComponentPlan%nGroupsInColumn
857     end function getNgroupsInCol
858    
859     function getNgroupsInRow(thisplan) result(nInRow)
860     type (gs_plan), intent(in) :: thisplan
861     integer :: nInRow
862     nInRow = thisplan%gsComponentPlan%nGroupsInRow
863     end function getNgroupsInRow
864 gezelter 2204
865 chuckv 1619 function isMPISimSet() result(isthisSimSet)
866     logical :: isthisSimSet
867     if (isSimSet) then
868     isthisSimSet = .true.
869     else
870     isthisSimSet = .false.
871     endif
872     end function isMPISimSet
873 gezelter 2204
874 chuckv 1619 subroutine printComponentPlan(this_plan,printNode)
875    
876     type (mpiComponentPlan), intent(in) :: this_plan
877     integer, optional :: printNode
878     logical :: print_me = .false.
879    
880     if (present(printNode)) then
881     if (printNode == mpiSim%myNode) print_me = .true.
882     else
883     print_me = .true.
884     endif
885    
886     if (print_me) then
887     write(default_error,*) "SetupSimParallel: writing component plan"
888 gezelter 2204
889 chuckv 1619 write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
890     write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
891     write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal
892     write(default_error,*) "myNode: ", mpiSim%myNode
893     write(default_error,*) "nProcessors: ", mpiSim%nProcessors
894     write(default_error,*) "rowComm: ", mpiSim%rowComm
895     write(default_error,*) "columnComm: ", mpiSim%columnComm
896     write(default_error,*) "nRows: ", mpiSim%nRows
897     write(default_error,*) "nColumns: ", mpiSim%nColumns
898     write(default_error,*) "nAtomsInRow: ", mpiSim%nAtomsInRow
899     write(default_error,*) "nAtomsInColumn: ", mpiSim%nAtomsInColumn
900     write(default_error,*) "rowIndex: ", mpiSim%rowIndex
901     write(default_error,*) "columnIndex: ", mpiSim%columnIndex
902     endif
903     end subroutine printComponentPlan
904    
905     function getMyNode() result(myNode)
906     integer :: myNode
907     myNode = mpiSim%myNode
908     end function getMyNode
909    
910     #ifdef PROFILE
911     subroutine printCommTime()
912     write(*,*) "MPI communication time is: ", commTime
913     end subroutine printCommTime
914    
915     function getCommTime() result(comm_time)
916     real :: comm_time
917     comm_time = commTime
918     end function getCommTime
919    
920     #endif
921    
922     #endif // is_mpi
923     end module mpiSimulation
924