ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE-3.0/src/UseTheForce/DarkSide/simParallel.F90
Revision: 1948
Committed: Fri Jan 14 20:31:16 2005 UTC (19 years, 7 months ago) by gezelter
File size: 28939 byte(s)
Log Message:
separating modules and C/Fortran interface subroutines

File Contents

# User Rev Content
1 gezelter 1930 !!
2     !! Copyright (c) 2005 The University of Notre Dame. All Rights Reserved.
3     !!
4     !! The University of Notre Dame grants you ("Licensee") a
5     !! non-exclusive, royalty free, license to use, modify and
6     !! redistribute this software in source and binary code form, provided
7     !! that the following conditions are met:
8     !!
9     !! 1. Acknowledgement of the program authors must be made in any
10     !! publication of scientific results based in part on use of the
11     !! program. An acceptable form of acknowledgement is citation of
12     !! the article in which the program was described (Matthew
13     !! A. Meineke, Charles F. Vardeman II, Teng Lin, Christopher
14     !! J. Fennell and J. Daniel Gezelter, "OOPSE: An Object-Oriented
15     !! Parallel Simulation Engine for Molecular Dynamics,"
16     !! J. Comput. Chem. 26, pp. 252-271 (2005))
17     !!
18     !! 2. Redistributions of source code must retain the above copyright
19     !! notice, this list of conditions and the following disclaimer.
20     !!
21     !! 3. Redistributions in binary form must reproduce the above copyright
22     !! notice, this list of conditions and the following disclaimer in the
23     !! documentation and/or other materials provided with the
24     !! distribution.
25     !!
26     !! This software is provided "AS IS," without a warranty of any
27     !! kind. All express or implied conditions, representations and
28     !! warranties, including any implied warranty of merchantability,
29     !! fitness for a particular purpose or non-infringement, are hereby
30     !! excluded. The University of Notre Dame and its licensors shall not
31     !! be liable for any damages suffered by licensee as a result of
32     !! using, modifying or distributing the software or its
33     !! derivatives. In no event will the University of Notre Dame or its
34     !! licensors be liable for any lost revenue, profit or data, or for
35     !! direct, indirect, special, consequential, incidental or punitive
36     !! damages, however caused and regardless of the theory of liability,
37     !! arising out of the use of or inability to use software, even if the
38     !! University of Notre Dame has been advised of the possibility of
39     !! such damages.
40     !!
41 chuckv 1619
42 gezelter 1930
43 chuckv 1619 !! MPI support for long range forces using force decomposition
44     !! on a square grid of processors.
45     !! Corresponds to mpiSimunation.cpp for C++
46     !! mpi_module also contains a private interface for mpi f90 routines.
47     !!
48     !! @author Charles F. Vardeman II
49     !! @author Matthew Meineke
50 gezelter 1948 !! @version $Id: simParallel.F90,v 1.3 2005-01-14 20:31:16 gezelter Exp $, $Date: 2005-01-14 20:31:16 $, $Name: not supported by cvs2svn $, $Revision: 1.3 $
51 chuckv 1619
52     module mpiSimulation
53     use definitions
54     #ifdef IS_MPI
55     use oopseMPI
56     implicit none
57     PRIVATE
58    
59    
60     !! PUBLIC Subroutines contained in this module
61     !! gather and scatter are a generic interface
62     !! to gather and scatter routines
63     public :: gather, scatter
64     public :: setupSimParallel
65     public :: replanSimParallel
66     public :: getNatomsInCol
67     public :: getNatomsInRow
68     public :: getNgroupsInCol
69     public :: getNgroupsInRow
70     public :: isMPISimSet
71     public :: printComponentPlan
72     public :: getMyNode
73    
74     !! PUBLIC Subroutines contained in MPI module
75     public :: mpi_bcast
76     public :: mpi_allreduce
77     ! public :: mpi_reduce
78     public :: mpi_send
79     public :: mpi_recv
80     public :: mpi_get_processor_name
81     public :: mpi_finalize
82    
83     !! PUBLIC mpi variables
84     public :: mpi_comm_world
85     public :: mpi_character
86     public :: mpi_integer
87     public :: mpi_double_precision
88     public :: mpi_sum
89     public :: mpi_max
90     public :: mpi_status_size
91     public :: mpi_any_source
92    
93    
94    
95     !! Safety logical to prevent access to ComponetPlan until
96     !! set by C++.
97     logical, save :: ComponentPlanSet = .false.
98    
99    
100     !! generic mpi error declaration.
101     integer, public :: mpi_err
102    
103     #ifdef PROFILE
104     public :: printCommTime
105     public :: getCommTime
106     real,save :: commTime = 0.0
107     real :: commTimeInitial,commTimeFinal
108     #endif
109    
110     !! Include mpiComponentPlan type. mpiComponentPlan is a
111     !! dual header file for both c and fortran.
112     #define __FORTRAN90
113     #include "UseTheForce/mpiComponentPlan.h"
114    
115    
116     !! Tags used during force loop for parallel simulation
117     integer, public, allocatable, dimension(:) :: AtomLocalToGlobal
118     integer, public, allocatable, dimension(:) :: AtomRowToGlobal
119     integer, public, allocatable, dimension(:) :: AtomColToGlobal
120     integer, public, allocatable, dimension(:) :: GroupLocalToGlobal
121     integer, public, allocatable, dimension(:) :: GroupRowToGlobal
122     integer, public, allocatable, dimension(:) :: GroupColToGlobal
123    
124     !! Logical set true if mpiSimulation has been initialized
125     logical, save :: isSimSet = .false.
126    
127    
128     type (mpiComponentPlan), save :: mpiSim
129    
130     !! gs_plan contains plans for gather and scatter routines
131     type, public :: gs_plan
132     private
133     type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
134     integer :: gsPlanSize !! size of this plan (nDim*nComponents)
135     integer :: globalPlanSize !! size of all components in plan
136     integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
137     integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
138     integer :: myPlanComm !! My communicator for this plan
139     integer :: myPlanRank !! My rank in this plan
140     integer :: planNprocs !! Number of processors in this plan
141     end type gs_plan
142    
143     ! plans for different decompositions
144     type (gs_plan), public, save :: plan_atom_row
145     type (gs_plan), public, save :: plan_atom_row_3d
146     type (gs_plan), public, save :: plan_atom_col
147     type (gs_plan), public, save :: plan_atom_col_3d
148     type (gs_plan), public, save :: plan_atom_row_Rotation
149     type (gs_plan), public, save :: plan_atom_col_Rotation
150     type (gs_plan), public, save :: plan_group_row
151     type (gs_plan), public, save :: plan_group_col
152     type (gs_plan), public, save :: plan_group_row_3d
153     type (gs_plan), public, save :: plan_group_col_3d
154    
155     type (mpiComponentPlan), pointer :: simComponentPlan
156    
157     ! interface for gather scatter routines
158     !! Generic interface for gather.
159     !! Gathers an local array into row or column array
160     !! Interface provided for integer, double and double
161     !! rank 2 arrays.
162     interface gather
163     module procedure gather_integer
164     module procedure gather_double
165     module procedure gather_double_2d
166     end interface
167    
168     !! Generic interface for scatter.
169     !! Scatters a row or column array, adding componets
170     !! and reducing them to a local nComponent array.
171     !! Interface provided for double and double rank=2 arrays.
172    
173     interface scatter
174     module procedure scatter_double
175     module procedure scatter_double_2d
176     end interface
177    
178    
179    
180     contains
181    
182     !! Sets up mpiComponentPlan with structure passed from C++.
183     subroutine setupSimParallel(thisComponentPlan, nAtomTags, atomTags, &
184     nGroupTags, groupTags, status)
185     !! Passed Arguments
186     !! mpiComponentPlan struct from C
187     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
188     !! Number of tags passed
189     integer, intent(in) :: nAtomTags, nGroupTags
190     !! Result status, 0 = normal, -1 = error
191     integer, intent(out) :: status
192     integer :: localStatus
193     !! Global reference tag for local particles
194     integer, dimension(nAtomTags), intent(inout) :: atomTags
195     integer, dimension(nGroupTags), intent(inout) :: groupTags
196    
197     !write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, &
198     ! ' has atomTags(1) = ', atomTags(1)
199    
200     status = 0
201     if (componentPlanSet) then
202     return
203     endif
204     componentPlanSet = .true.
205    
206     !! copy c component plan to fortran
207     mpiSim = thisComponentPlan
208     !write(*,*) "Seting up simParallel"
209    
210     call make_Force_Grid(mpiSim, localStatus)
211     if (localStatus /= 0) then
212     write(default_error,*) "Error creating force grid"
213     status = -1
214     return
215     endif
216    
217     call updateGridComponents(mpiSim, localStatus)
218     if (localStatus /= 0) then
219     write(default_error,*) "Error updating grid components"
220     status = -1
221     return
222     endif
223    
224     !! initialize gather and scatter plans used in this simulation
225     call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
226     mpiSim, mpiSim%rowComm, plan_atom_row)
227     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
228     mpiSim, mpiSim%rowComm, plan_atom_row_3d)
229     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
230     mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
231     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
232     mpiSim, mpiSim%rowComm, plan_group_row)
233     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
234     mpiSim, mpiSim%rowComm, plan_group_row_3d)
235    
236     call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
237     mpiSim, mpiSim%columnComm, plan_atom_col)
238     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
239     mpiSim, mpiSim%columnComm, plan_atom_col_3d)
240     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
241     mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
242     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
243     mpiSim, mpiSim%columnComm, plan_group_col)
244     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
245     mpiSim, mpiSim%columnComm, plan_group_col_3d)
246    
247     ! Initialize tags
248    
249     call setAtomTags(atomTags,localStatus)
250     if (localStatus /= 0) then
251     status = -1
252     return
253     endif
254    
255    
256     call setGroupTags(groupTags,localStatus)
257     if (localStatus /= 0) then
258     status = -1
259     return
260     endif
261    
262     isSimSet = .true.
263    
264     ! call printComponentPlan(mpiSim,0)
265     end subroutine setupSimParallel
266    
267     subroutine replanSimParallel(thisComponentPlan,status)
268     ! Passed Arguments
269     !! mpiComponentPlan struct from C
270     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
271     integer, intent(out) :: status
272     integer :: localStatus
273     integer :: mpierror
274     status = 0
275    
276     call updateGridComponents(thisComponentPlan,localStatus)
277     if (localStatus /= 0) then
278     status = -1
279     return
280     endif
281    
282     !! Unplan Gather Scatter plans
283     call unplan_gather_scatter(plan_atom_row)
284     call unplan_gather_scatter(plan_atom_row_3d)
285     call unplan_gather_scatter(plan_atom_row_Rotation)
286     call unplan_gather_scatter(plan_group_row)
287     call unplan_gather_scatter(plan_group_row_3d)
288    
289     call unplan_gather_scatter(plan_atom_col)
290     call unplan_gather_scatter(plan_atom_col_3d)
291     call unplan_gather_scatter(plan_atom_col_Rotation)
292     call unplan_gather_scatter(plan_group_col)
293     call unplan_gather_scatter(plan_group_col_3d)
294    
295     !! initialize gather and scatter plans used in this simulation
296     call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
297     mpiSim, mpiSim%rowComm, plan_atom_row)
298     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
299     mpiSim, mpiSim%rowComm, plan_atom_row_3d)
300     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
301     mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
302     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
303     mpiSim, mpiSim%rowComm, plan_group_row)
304     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
305     mpiSim, mpiSim%rowComm, plan_group_row_3d)
306    
307     call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
308     mpiSim, mpiSim%columnComm, plan_atom_col)
309     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
310     mpiSim, mpiSim%columnComm, plan_atom_col_3d)
311     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
312     mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
313     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
314     mpiSim, mpiSim%columnComm, plan_group_col)
315     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
316     mpiSim, mpiSim%columnComm, plan_group_col_3d)
317    
318     end subroutine replanSimParallel
319    
320     !! Updates number of row and column components for long range forces.
321     subroutine updateGridComponents(thisComponentPlan, status)
322     type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
323    
324     !! Status return
325     !! - 0 Success
326     !! - -1 Failure
327     integer, intent(out) :: status
328     integer :: nAtomsLocal
329     integer :: nAtomsInRow = 0
330     integer :: nAtomsInColumn = 0
331     integer :: nGroupsLocal
332     integer :: nGroupsInRow = 0
333     integer :: nGroupsInColumn = 0
334     integer :: mpiErrors
335    
336     status = 0
337     if (.not. componentPlanSet) return
338    
339     if (thisComponentPlan%nAtomsLocal == 0) then
340     status = -1
341     return
342     endif
343     if (thisComponentPlan%nGroupsLocal == 0) then
344     write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal
345     status = -1
346     return
347     endif
348    
349     nAtomsLocal = thisComponentPlan%nAtomsLocal
350     nGroupsLocal = thisComponentPlan%nGroupsLocal
351    
352     call mpi_allreduce(nAtomsLocal, nAtomsInRow, 1, mpi_integer, &
353     mpi_sum, thisComponentPlan%rowComm, mpiErrors)
354     if (mpiErrors /= 0) then
355     status = -1
356     return
357     endif
358    
359     call mpi_allreduce(nAtomsLocal, nAtomsInColumn, 1, mpi_integer, &
360     mpi_sum, thisComponentPlan%columnComm, mpiErrors)
361     if (mpiErrors /= 0) then
362     status = -1
363     return
364     endif
365    
366     call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, &
367     mpi_sum, thisComponentPlan%rowComm, mpiErrors)
368     if (mpiErrors /= 0) then
369     status = -1
370     return
371     endif
372    
373     call mpi_allreduce(nGroupsLocal, nGroupsInColumn, 1, mpi_integer, &
374     mpi_sum, thisComponentPlan%columnComm, mpiErrors)
375     if (mpiErrors /= 0) then
376     status = -1
377     return
378     endif
379    
380     thisComponentPlan%nAtomsInRow = nAtomsInRow
381     thisComponentPlan%nAtomsInColumn = nAtomsInColumn
382     thisComponentPlan%nGroupsInRow = nGroupsInRow
383     thisComponentPlan%nGroupsInColumn = nGroupsInColumn
384    
385     end subroutine updateGridComponents
386    
387    
388     !! Creates a square force decomposition of processors into row and column
389     !! communicators.
390     subroutine make_Force_Grid(thisComponentPlan,status)
391     type (mpiComponentPlan) :: thisComponentPlan
392     integer, intent(out) :: status !! status returns -1 if error
393     integer :: nColumnsMax !! Maximum number of columns
394     integer :: nWorldProcessors !! Total number of processors in World comm.
395     integer :: rowIndex !! Row for this processor.
396     integer :: columnIndex !! Column for this processor.
397     integer :: nRows !! Total number of rows.
398     integer :: nColumns !! Total number of columns.
399     integer :: mpiErrors !! MPI errors.
400     integer :: rowCommunicator !! MPI row communicator.
401     integer :: columnCommunicator
402     integer :: myWorldRank
403     integer :: i
404    
405    
406     if (.not. ComponentPlanSet) return
407     status = 0
408    
409     !! We make a dangerous assumption here that if numberProcessors is
410     !! zero, then we need to get the information from MPI.
411     if (thisComponentPlan%nProcessors == 0 ) then
412     call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
413     if ( mpiErrors /= 0 ) then
414     status = -1
415     return
416     endif
417     call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
418     if ( mpiErrors /= 0 ) then
419     status = -1
420     return
421     endif
422    
423     else
424     nWorldProcessors = thisComponentPlan%nProcessors
425     myWorldRank = thisComponentPlan%myNode
426     endif
427    
428     nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
429    
430     do i = 1, nColumnsMax
431     if (mod(nWorldProcessors,i) == 0) nColumns = i
432     end do
433    
434     nRows = nWorldProcessors/nColumns
435    
436     rowIndex = myWorldRank/nColumns
437    
438     call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
439     if ( mpiErrors /= 0 ) then
440     write(default_error,*) "MPI comm split failed at row communicator"
441     status = -1
442     return
443     endif
444    
445     columnIndex = mod(myWorldRank,nColumns)
446     call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
447     if ( mpiErrors /= 0 ) then
448     write(default_error,*) "MPI comm split faild at columnCommunicator"
449     status = -1
450     return
451     endif
452    
453     ! Set appropriate components of thisComponentPlan
454     thisComponentPlan%rowComm = rowCommunicator
455     thisComponentPlan%columnComm = columnCommunicator
456     thisComponentPlan%rowIndex = rowIndex
457     thisComponentPlan%columnIndex = columnIndex
458     thisComponentPlan%nRows = nRows
459     thisComponentPlan%nColumns = nColumns
460    
461     end subroutine make_Force_Grid
462    
463     !! initalizes a gather scatter plan
464     subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, &
465     thisComm, this_plan, status)
466     integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
467     integer, intent(in) :: nObjects
468     type (mpiComponentPlan), intent(in), target :: thisComponentPlan
469     type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
470     integer, intent(in) :: thisComm !! MPI communicator for this plan
471    
472     integer :: arraySize !! size to allocate plan for
473     integer, intent(out), optional :: status
474     integer :: ierror
475     integer :: i,junk
476    
477     if (present(status)) status = 0
478    
479     !! Set gsComponentPlan pointer
480     !! to the componet plan we want to use for this gather scatter plan.
481     !! WARNING this could be dangerous since thisComponentPlan was origionally
482     !! allocated in C++ and there is a significant difference between c and
483     !! f95 pointers....
484     this_plan%gsComponentPlan => thisComponentPlan
485    
486     ! Set this plan size for displs array.
487     this_plan%gsPlanSize = nDim * nObjects
488    
489     ! Duplicate communicator for this plan
490     call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err)
491     if (mpi_err /= 0) then
492     if (present(status)) status = -1
493     return
494     end if
495     call mpi_comm_rank(this_plan%myPlanComm, this_plan%myPlanRank, mpi_err)
496     if (mpi_err /= 0) then
497     if (present(status)) status = -1
498     return
499     end if
500    
501     call mpi_comm_size(this_plan%myPlanComm, this_plan%planNprocs, mpi_err)
502    
503     if (mpi_err /= 0) then
504     if (present(status)) status = -1
505     return
506     end if
507    
508     !! counts and displacements arrays are indexed from 0 to be compatable
509     !! with MPI arrays.
510     allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
511     if (ierror /= 0) then
512     if (present(status)) status = -1
513     return
514     end if
515    
516     allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
517     if (ierror /= 0) then
518     if (present(status)) status = -1
519     return
520     end if
521    
522     !! gather all the local sizes into a size # processors array.
523     call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
524     1,mpi_integer,thisComm,mpi_err)
525    
526     if (mpi_err /= 0) then
527     if (present(status)) status = -1
528     return
529     end if
530    
531     !! figure out the total number of particles in this plan
532     this_plan%globalPlanSize = sum(this_plan%counts)
533    
534     !! initialize plan displacements.
535     this_plan%displs(0) = 0
536     do i = 1, this_plan%planNprocs - 1,1
537     this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
538     end do
539     end subroutine plan_gather_scatter
540    
541     subroutine unplan_gather_scatter(this_plan)
542     type (gs_plan), intent(inout) :: this_plan
543    
544     this_plan%gsComponentPlan => null()
545     call mpi_comm_free(this_plan%myPlanComm,mpi_err)
546    
547     deallocate(this_plan%counts)
548     deallocate(this_plan%displs)
549    
550     end subroutine unplan_gather_scatter
551    
552     subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
553    
554     type (gs_plan), intent(inout) :: this_plan
555     integer, dimension(:), intent(inout) :: sbuffer
556     integer, dimension(:), intent(inout) :: rbuffer
557     integer :: noffset
558     integer, intent(out), optional :: status
559     integer :: i
560    
561     if (present(status)) status = 0
562     noffset = this_plan%displs(this_plan%myPlanRank)
563    
564     ! if (getmyNode() == 1) then
565     ! write(*,*) "Node 0 printing allgatherv vars"
566     ! write(*,*) "Noffset: ", noffset
567     ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
568     ! write(*,*) "PlanComm: ", this_plan%myPlanComm
569     ! end if
570    
571     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
572     rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
573     this_plan%myPlanComm, mpi_err)
574    
575     if (mpi_err /= 0) then
576     if (present(status)) status = -1
577     endif
578    
579     end subroutine gather_integer
580    
581     subroutine gather_double( sbuffer, rbuffer, this_plan, status)
582    
583     type (gs_plan), intent(in) :: this_plan
584     real( kind = DP ), dimension(:), intent(inout) :: sbuffer
585     real( kind = DP ), dimension(:), intent(inout) :: rbuffer
586     integer :: noffset
587     integer, intent(out), optional :: status
588    
589    
590     if (present(status)) status = 0
591     noffset = this_plan%displs(this_plan%myPlanRank)
592     #ifdef PROFILE
593     call cpu_time(commTimeInitial)
594     #endif
595     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
596     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
597     this_plan%myPlanComm, mpi_err)
598     #ifdef PROFILE
599     call cpu_time(commTimeFinal)
600     commTime = commTime + commTimeFinal - commTimeInitial
601     #endif
602    
603     if (mpi_err /= 0) then
604     if (present(status)) status = -1
605     endif
606    
607     end subroutine gather_double
608    
609     subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
610    
611     type (gs_plan), intent(in) :: this_plan
612     real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
613     real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
614     integer :: noffset,i,ierror
615     integer, intent(out), optional :: status
616    
617     external mpi_allgatherv
618    
619     if (present(status)) status = 0
620    
621     ! noffset = this_plan%displs(this_plan%me)
622     #ifdef PROFILE
623     call cpu_time(commTimeInitial)
624     #endif
625    
626     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
627     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
628     this_plan%myPlanComm, mpi_err)
629    
630     #ifdef PROFILE
631     call cpu_time(commTimeFinal)
632     commTime = commTime + commTimeFinal - commTimeInitial
633     #endif
634    
635     if (mpi_err /= 0) then
636     if (present(status)) status = -1
637     endif
638    
639     end subroutine gather_double_2d
640    
641     subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
642    
643     type (gs_plan), intent(in) :: this_plan
644     real( kind = DP ), dimension(:), intent(inout) :: sbuffer
645     real( kind = DP ), dimension(:), intent(inout) :: rbuffer
646     integer, intent(out), optional :: status
647     external mpi_reduce_scatter
648    
649     if (present(status)) status = 0
650    
651     #ifdef PROFILE
652     call cpu_time(commTimeInitial)
653     #endif
654     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
655     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
656     #ifdef PROFILE
657     call cpu_time(commTimeFinal)
658     commTime = commTime + commTimeFinal - commTimeInitial
659     #endif
660    
661     if (mpi_err /= 0) then
662     if (present(status)) status = -1
663     endif
664    
665     end subroutine scatter_double
666    
667     subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
668    
669     type (gs_plan), intent(in) :: this_plan
670     real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
671     real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
672     integer, intent(out), optional :: status
673     external mpi_reduce_scatter
674    
675     if (present(status)) status = 0
676     #ifdef PROFILE
677     call cpu_time(commTimeInitial)
678     #endif
679    
680     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
681     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
682     #ifdef PROFILE
683     call cpu_time(commTimeFinal)
684     commTime = commTime + commTimeFinal - commTimeInitial
685     #endif
686    
687     if (mpi_err /= 0) then
688     if (present(status)) status = -1
689     endif
690    
691     end subroutine scatter_double_2d
692    
693     subroutine setAtomTags(tags, status)
694     integer, dimension(:) :: tags
695     integer :: status
696    
697     integer :: alloc_stat
698    
699     integer :: nAtomsInCol
700     integer :: nAtomsInRow
701    
702     status = 0
703     ! allocate row arrays
704     nAtomsInRow = getNatomsInRow(plan_atom_row)
705     nAtomsInCol = getNatomsInCol(plan_atom_col)
706    
707     if(.not. allocated(AtomLocalToGlobal)) then
708     allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
709     if (alloc_stat /= 0 ) then
710     status = -1
711     return
712     endif
713     else
714     deallocate(AtomLocalToGlobal)
715     allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
716     if (alloc_stat /= 0 ) then
717     status = -1
718     return
719     endif
720    
721     endif
722    
723     AtomLocalToGlobal = tags
724    
725     if (.not. allocated(AtomRowToGlobal)) then
726     allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
727     if (alloc_stat /= 0 ) then
728     status = -1
729     return
730     endif
731     else
732     deallocate(AtomRowToGlobal)
733     allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
734     if (alloc_stat /= 0 ) then
735     status = -1
736     return
737     endif
738    
739     endif
740     ! allocate column arrays
741     if (.not. allocated(AtomColToGlobal)) then
742     allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
743     if (alloc_stat /= 0 ) then
744     status = -1
745     return
746     endif
747     else
748     deallocate(AtomColToGlobal)
749     allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
750     if (alloc_stat /= 0 ) then
751     status = -1
752     return
753     endif
754     endif
755    
756     call gather(tags, AtomRowToGlobal, plan_atom_row)
757     call gather(tags, AtomColToGlobal, plan_atom_col)
758    
759     end subroutine setAtomTags
760    
761     subroutine setGroupTags(tags, status)
762     integer, dimension(:) :: tags
763     integer :: status
764    
765     integer :: alloc_stat
766    
767     integer :: nGroupsInCol
768     integer :: nGroupsInRow
769    
770     status = 0
771    
772     nGroupsInRow = getNgroupsInRow(plan_group_row)
773     nGroupsInCol = getNgroupsInCol(plan_group_col)
774    
775     if(allocated(GroupLocalToGlobal)) then
776     deallocate(GroupLocalToGlobal)
777     endif
778     allocate(GroupLocalToGlobal(size(tags)),STAT=alloc_stat)
779     if (alloc_stat /= 0 ) then
780     status = -1
781     return
782     endif
783    
784     GroupLocalToGlobal = tags
785    
786     if(allocated(GroupRowToGlobal)) then
787     deallocate(GroupRowToGlobal)
788     endif
789     allocate(GroupRowToGlobal(nGroupsInRow),STAT=alloc_stat)
790     if (alloc_stat /= 0 ) then
791     status = -1
792     return
793     endif
794    
795     if(allocated(GroupColToGlobal)) then
796     deallocate(GroupColToGlobal)
797     endif
798     allocate(GroupColToGlobal(nGroupsInCol),STAT=alloc_stat)
799     if (alloc_stat /= 0 ) then
800     status = -1
801     return
802     endif
803    
804     call gather(tags, GroupRowToGlobal, plan_group_row)
805     call gather(tags, GroupColToGlobal, plan_group_col)
806    
807     end subroutine setGroupTags
808    
809     function getNatomsInCol(thisplan) result(nInCol)
810     type (gs_plan), intent(in) :: thisplan
811     integer :: nInCol
812     nInCol = thisplan%gsComponentPlan%nAtomsInColumn
813     end function getNatomsInCol
814    
815     function getNatomsInRow(thisplan) result(nInRow)
816     type (gs_plan), intent(in) :: thisplan
817     integer :: nInRow
818     nInRow = thisplan%gsComponentPlan%nAtomsInRow
819     end function getNatomsInRow
820    
821     function getNgroupsInCol(thisplan) result(nInCol)
822     type (gs_plan), intent(in) :: thisplan
823     integer :: nInCol
824     nInCol = thisplan%gsComponentPlan%nGroupsInColumn
825     end function getNgroupsInCol
826    
827     function getNgroupsInRow(thisplan) result(nInRow)
828     type (gs_plan), intent(in) :: thisplan
829     integer :: nInRow
830     nInRow = thisplan%gsComponentPlan%nGroupsInRow
831     end function getNgroupsInRow
832    
833     function isMPISimSet() result(isthisSimSet)
834     logical :: isthisSimSet
835     if (isSimSet) then
836     isthisSimSet = .true.
837     else
838     isthisSimSet = .false.
839     endif
840     end function isMPISimSet
841    
842     subroutine printComponentPlan(this_plan,printNode)
843    
844     type (mpiComponentPlan), intent(in) :: this_plan
845     integer, optional :: printNode
846     logical :: print_me = .false.
847    
848     if (present(printNode)) then
849     if (printNode == mpiSim%myNode) print_me = .true.
850     else
851     print_me = .true.
852     endif
853    
854     if (print_me) then
855     write(default_error,*) "SetupSimParallel: writing component plan"
856    
857     write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
858     write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
859     write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal
860     write(default_error,*) "myNode: ", mpiSim%myNode
861     write(default_error,*) "nProcessors: ", mpiSim%nProcessors
862     write(default_error,*) "rowComm: ", mpiSim%rowComm
863     write(default_error,*) "columnComm: ", mpiSim%columnComm
864     write(default_error,*) "nRows: ", mpiSim%nRows
865     write(default_error,*) "nColumns: ", mpiSim%nColumns
866     write(default_error,*) "nAtomsInRow: ", mpiSim%nAtomsInRow
867     write(default_error,*) "nAtomsInColumn: ", mpiSim%nAtomsInColumn
868     write(default_error,*) "rowIndex: ", mpiSim%rowIndex
869     write(default_error,*) "columnIndex: ", mpiSim%columnIndex
870     endif
871     end subroutine printComponentPlan
872    
873     function getMyNode() result(myNode)
874     integer :: myNode
875     myNode = mpiSim%myNode
876     end function getMyNode
877    
878     #ifdef PROFILE
879     subroutine printCommTime()
880     write(*,*) "MPI communication time is: ", commTime
881     end subroutine printCommTime
882    
883     function getCommTime() result(comm_time)
884     real :: comm_time
885     comm_time = commTime
886     end function getCommTime
887    
888     #endif
889    
890     #endif // is_mpi
891     end module mpiSimulation
892