ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE/libmdtools/mpiSimulation_module.F90
Revision: 1198
Committed: Thu May 27 00:48:12 2004 UTC (20 years, 1 month ago) by tim
File size: 25504 byte(s)
Log Message:
in the progress of fixing MPI version of cutoff group

File Contents

# User Rev Content
1 mmeineke 377
2 chuckv 631
3 mmeineke 377 !! MPI support for long range forces using force decomposition
4     !! on a square grid of processors.
5     !! Corresponds to mpiSimunation.cpp for C++
6     !! mpi_module also contains a private interface for mpi f90 routines.
7     !!
8     !! @author Charles F. Vardeman II
9     !! @author Matthew Meineke
10 tim 1198 !! @version $Id: mpiSimulation_module.F90,v 1.13 2004-05-27 00:48:12 tim Exp $, $Date: 2004-05-27 00:48:12 $, $Name: not supported by cvs2svn $, $Revision: 1.13 $
11 mmeineke 377
12     module mpiSimulation
13     use definitions
14 chuckv 631 #ifdef IS_MPI
15 chuckv 858 use oopseMPI
16 mmeineke 377 implicit none
17     PRIVATE
18    
19    
20     !! PUBLIC Subroutines contained in this module
21     !! gather and scatter are a generic interface
22     !! to gather and scatter routines
23     public :: gather, scatter
24     public :: setupSimParallel
25     public :: replanSimParallel
26 tim 1198 public :: getNatomsInCol
27     public :: getNatomsInRow
28     public :: getNgroupsInCol
29     public :: getNgroupsInRow
30 mmeineke 377 public :: isMPISimSet
31     public :: printComponentPlan
32     public :: getMyNode
33    
34     !! PUBLIC Subroutines contained in MPI module
35     public :: mpi_bcast
36     public :: mpi_allreduce
37 chuckv 858 ! public :: mpi_reduce
38 mmeineke 377 public :: mpi_send
39     public :: mpi_recv
40     public :: mpi_get_processor_name
41     public :: mpi_finalize
42    
43     !! PUBLIC mpi variables
44     public :: mpi_comm_world
45     public :: mpi_character
46     public :: mpi_integer
47     public :: mpi_double_precision
48     public :: mpi_sum
49     public :: mpi_max
50     public :: mpi_status_size
51     public :: mpi_any_source
52    
53 chuckv 694
54    
55 mmeineke 377 !! Safety logical to prevent access to ComponetPlan until
56     !! set by C++.
57 gezelter 747 logical, save :: ComponentPlanSet = .false.
58 mmeineke 377
59    
60     !! generic mpi error declaration.
61 gezelter 747 integer, public :: mpi_err
62 mmeineke 377
63 chuckv 694 #ifdef PROFILE
64     public :: printCommTime
65 chuckv 883 public :: getCommTime
66     real,save :: commTime = 0.0
67     real :: commTimeInitial,commTimeFinal
68 chuckv 694 #endif
69    
70 mmeineke 377 !! Include mpiComponentPlan type. mpiComponentPlan is a
71     !! dual header file for both c and fortran.
72     #define __FORTRAN90
73     #include "mpiComponentPlan.h"
74    
75    
76     !! Tags used during force loop for parallel simulation
77 tim 1198 integer, public, allocatable, dimension(:) :: AtomLocalToGlobal
78     integer, public, allocatable, dimension(:) :: AtomRowToGlobal
79     integer, public, allocatable, dimension(:) :: AtomColToGlobal
80     integer, public, allocatable, dimension(:) :: GroupLocalToGlobal
81     integer, public, allocatable, dimension(:) :: GroupRowToGlobal
82     integer, public, allocatable, dimension(:) :: GroupColToGlobal
83 mmeineke 377
84     !! Logical set true if mpiSimulation has been initialized
85 gezelter 747 logical, save :: isSimSet = .false.
86 mmeineke 377
87    
88 gezelter 747 type (mpiComponentPlan), save :: mpiSim
89 mmeineke 377
90     !! gs_plan contains plans for gather and scatter routines
91     type, public :: gs_plan
92     private
93     type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
94     integer :: gsPlanSize !! size of this plan (nDim*nComponents)
95     integer :: globalPlanSize !! size of all components in plan
96     integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
97     integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
98     integer :: myPlanComm !! My communicator for this plan
99     integer :: myPlanRank !! My rank in this plan
100     integer :: planNprocs !! Number of processors in this plan
101     end type gs_plan
102    
103     ! plans for different decompositions
104 tim 1198 type (gs_plan), public, save :: plan_atom_row
105     type (gs_plan), public, save :: plan_atom_row_3d
106     type (gs_plan), public, save :: plan_atom_col
107     type (gs_plan), public, save :: plan_atom_col_3d
108     type (gs_plan), public, save :: plan_atom_row_Rotation
109     type (gs_plan), public, save :: plan_atom_col_Rotation
110     type (gs_plan), public, save :: plan_group_row
111     type (gs_plan), public, save :: plan_group_col
112     type (gs_plan), public, save :: plan_group_row_3d
113     type (gs_plan), public, save :: plan_group_col_3d
114 mmeineke 377
115     type (mpiComponentPlan), pointer :: simComponentPlan
116    
117     ! interface for gather scatter routines
118     !! Generic interface for gather.
119     !! Gathers an local array into row or column array
120     !! Interface provided for integer, double and double
121     !! rank 2 arrays.
122     interface gather
123     module procedure gather_integer
124     module procedure gather_double
125     module procedure gather_double_2d
126     end interface
127    
128     !! Generic interface for scatter.
129     !! Scatters a row or column array, adding componets
130     !! and reducing them to a local nComponent array.
131     !! Interface provided for double and double rank=2 arrays.
132    
133     interface scatter
134     module procedure scatter_double
135     module procedure scatter_double_2d
136     end interface
137    
138    
139    
140     contains
141    
142     !! Sets up mpiComponentPlan with structure passed from C++.
143 tim 1198 subroutine setupSimParallel(thisComponentPlan,nAtomTags,atomTags,status)
144 mmeineke 377 ! Passed Arguments
145     !! mpiComponentPlan struct from C
146     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
147     !! Number of tags passed, nlocal
148 tim 1198 integer, intent(in) :: nAtomTags
149 mmeineke 377 !! Result status, 0 = normal, -1 = error
150     integer, intent(out) :: status
151     integer :: localStatus
152     !! Global reference tag for local particles
153 tim 1198 integer, dimension(nAtomTags),intent(inout) :: atomTags
154 mmeineke 377
155 tim 1198 write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, &
156     ' has atomTags(1) = ', atomTags(1)
157 mmeineke 377
158     status = 0
159     if (componentPlanSet) then
160     return
161     endif
162     componentPlanSet = .true.
163    
164     !! copy c component plan to fortran
165     mpiSim = thisComponentPlan
166     write(*,*) "Seting up simParallel"
167    
168 tim 1198 call make_Force_Grid(mpiSim, localStatus)
169 mmeineke 377 if (localStatus /= 0) then
170     write(default_error,*) "Error creating force grid"
171     status = -1
172     return
173     endif
174    
175 tim 1198 call updateGridComponents(mpiSim, localStatus)
176 mmeineke 377 if (localStatus /= 0) then
177     write(default_error,*) "Error updating grid components"
178     status = -1
179     return
180 tim 1198 endif
181 mmeineke 377
182     !! initialize gather and scatter plans used in this simulation
183 tim 1198 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
184     mpiSim, mpiSim%rowComm, plan_atom_row)
185     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
186     mpiSim, mpiSim%rowComm, plan_atom_row_3d)
187     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
188     mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
189     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
190     mpiSim, mpiSim%rowComm, plan_group_row)
191     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
192     mpiSim, mpiSim%rowComm, plan_group_row_3d)
193 gezelter 1150
194 tim 1198 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
195     mpiSim, mpiSim%columnComm, plan_atom_col)
196     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
197     mpiSim, mpiSim%columnComm, plan_atom_col_3d)
198     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
199     mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
200     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
201     mpiSim, mpiSim%columnComm, plan_group_col)
202     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
203     mpiSim, mpiSim%columnComm, plan_group_col_3d)
204 gezelter 1150
205 mmeineke 377 ! Initialize tags
206 tim 1198
207     call setAtomTags(atomTags,localStatus)
208 mmeineke 377 if (localStatus /= 0) then
209     status = -1
210     return
211     endif
212     isSimSet = .true.
213    
214     ! call printComponentPlan(mpiSim,0)
215     end subroutine setupSimParallel
216    
217     subroutine replanSimParallel(thisComponentPlan,status)
218     ! Passed Arguments
219     !! mpiComponentPlan struct from C
220     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
221     integer, intent(out) :: status
222     integer :: localStatus
223     integer :: mpierror
224     status = 0
225    
226     call updateGridComponents(thisComponentPlan,localStatus)
227     if (localStatus /= 0) then
228     status = -1
229     return
230     endif
231    
232     !! Unplan Gather Scatter plans
233 tim 1198 call unplan_gather_scatter(plan_atom_row)
234     call unplan_gather_scatter(plan_atom_row_3d)
235     call unplan_gather_scatter(plan_atom_row_Rotation)
236     call unplan_gather_scatter(plan_group_row)
237     call unplan_gather_scatter(plan_group_row_3d)
238 gezelter 1150
239 tim 1198 call unplan_gather_scatter(plan_atom_col)
240     call unplan_gather_scatter(plan_atom_col_3d)
241     call unplan_gather_scatter(plan_atom_col_Rotation)
242     call unplan_gather_scatter(plan_group_col)
243     call unplan_gather_scatter(plan_group_col_3d)
244 mmeineke 377
245     !! initialize gather and scatter plans used in this simulation
246 tim 1198 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
247     mpiSim, mpiSim%rowComm, plan_atom_row)
248     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
249     mpiSim, mpiSim%rowComm, plan_atom_row_3d)
250     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
251     mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
252     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
253     mpiSim, mpiSim%rowComm, plan_group_row)
254     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
255     mpiSim, mpiSim%rowComm, plan_group_row_3d)
256    
257     call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
258     mpiSim, mpiSim%columnComm, plan_atom_col)
259     call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
260     mpiSim, mpiSim%columnComm, plan_atom_col_3d)
261     call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
262     mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
263     call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
264     mpiSim, mpiSim%columnComm, plan_group_col)
265     call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
266     mpiSim, mpiSim%columnComm, plan_group_col_3d)
267    
268 mmeineke 377 end subroutine replanSimParallel
269    
270     !! Updates number of row and column components for long range forces.
271     subroutine updateGridComponents(thisComponentPlan,status)
272     type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
273    
274     !! Status return
275     !! - 0 Success
276     !! - -1 Failure
277     integer, intent(out) :: status
278 tim 1198 integer :: nAtomsLocal
279     integer :: nAtomsInRow = 0
280     integer :: nAtomsInColumn = 0
281     integer :: nGroupsLocal
282     integer :: nGroupsInRow = 0
283     integer :: nGroupsInColumn = 0
284 mmeineke 377 integer :: mpiErrors
285    
286     status = 0
287     if (.not. componentPlanSet) return
288    
289 tim 1198 if (thisComponentPlan%nAtomsLocal == 0) then
290 mmeineke 377 status = -1
291     return
292 tim 1198 endif
293     if (thisComponentPlan%nGroupsLocal == 0) then
294     status = -1
295     return
296 mmeineke 377 endif
297    
298 tim 1198 nAtomsLocal = thisComponentPlan%nAtomsLocal
299     nGroupsLocal = thisComponentPlan%nGroupsLocal
300 mmeineke 377
301 tim 1198 call mpi_allreduce(nAtomsLocal, nAtomsInRow, 1, mpi_integer, &
302     mpi_sum, thisComponentPlan%rowComm, mpiErrors)
303 mmeineke 377 if (mpiErrors /= 0) then
304     status = -1
305     return
306     endif
307    
308 tim 1198 call mpi_allreduce(nAtomsLocal, nAtomsInColumn, 1, mpi_integer, &
309     mpi_sum, thisComponentPlan%columnComm, mpiErrors)
310 mmeineke 377 if (mpiErrors /= 0) then
311     status = -1
312     return
313     endif
314 tim 1198
315     call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, &
316     mpi_sum, thisComponentPlan%rowComm, mpiErrors)
317     if (mpiErrors /= 0) then
318     status = -1
319     return
320     endif
321 mmeineke 377
322 tim 1198 call mpi_allreduce(nGroupsLocal, nGroupsInColumn, 1, mpi_integer, &
323     mpi_sum, thisComponentPlan%columnComm, mpiErrors)
324     if (mpiErrors /= 0) then
325     status = -1
326     return
327     endif
328 mmeineke 377
329 tim 1198 thisComponentPlan%nAtomsInRow = nAtomsInRow
330     thisComponentPlan%nAtomsInColumn = nAtomsInColumn
331     thisComponentPlan%nGroupsInRow = nGroupsInRow
332     thisComponentPlan%nGroupsInColumn = nGroupsInColumn
333    
334 mmeineke 377 end subroutine updateGridComponents
335    
336    
337     !! Creates a square force decomposition of processors into row and column
338     !! communicators.
339     subroutine make_Force_Grid(thisComponentPlan,status)
340     type (mpiComponentPlan) :: thisComponentPlan
341     integer, intent(out) :: status !! status returns -1 if error
342     integer :: nColumnsMax !! Maximum number of columns
343     integer :: nWorldProcessors !! Total number of processors in World comm.
344     integer :: rowIndex !! Row for this processor.
345     integer :: columnIndex !! Column for this processor.
346     integer :: nRows !! Total number of rows.
347     integer :: nColumns !! Total number of columns.
348     integer :: mpiErrors !! MPI errors.
349     integer :: rowCommunicator !! MPI row communicator.
350     integer :: columnCommunicator
351     integer :: myWorldRank
352     integer :: i
353    
354    
355     if (.not. ComponentPlanSet) return
356     status = 0
357    
358     !! We make a dangerous assumption here that if numberProcessors is
359     !! zero, then we need to get the information from MPI.
360 tim 1198 if (thisComponentPlan%nProcessors == 0 ) then
361 mmeineke 377 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
362     if ( mpiErrors /= 0 ) then
363     status = -1
364     return
365     endif
366     call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
367     if ( mpiErrors /= 0 ) then
368     status = -1
369     return
370     endif
371    
372     else
373 tim 1198 nWorldProcessors = thisComponentPlan%nProcessors
374 mmeineke 377 myWorldRank = thisComponentPlan%myNode
375     endif
376    
377     nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
378    
379     do i = 1, nColumnsMax
380     if (mod(nWorldProcessors,i) == 0) nColumns = i
381     end do
382    
383     nRows = nWorldProcessors/nColumns
384    
385     rowIndex = myWorldRank/nColumns
386    
387     call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
388     if ( mpiErrors /= 0 ) then
389     write(default_error,*) "MPI comm split failed at row communicator"
390     status = -1
391     return
392     endif
393    
394     columnIndex = mod(myWorldRank,nColumns)
395     call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
396     if ( mpiErrors /= 0 ) then
397     write(default_error,*) "MPI comm split faild at columnCommunicator"
398     status = -1
399     return
400     endif
401    
402     ! Set appropriate components of thisComponentPlan
403     thisComponentPlan%rowComm = rowCommunicator
404     thisComponentPlan%columnComm = columnCommunicator
405     thisComponentPlan%rowIndex = rowIndex
406     thisComponentPlan%columnIndex = columnIndex
407 tim 1198 thisComponentPlan%nRows = nRows
408     thisComponentPlan%nColumns = nColumns
409 mmeineke 377
410     end subroutine make_Force_Grid
411    
412     !! initalizes a gather scatter plan
413 tim 1198 subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, &
414     thisComm, this_plan, status)
415 mmeineke 377 integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
416 tim 1198 integer, intent(in) :: nObjects
417 mmeineke 377 type (mpiComponentPlan), intent(in), target :: thisComponentPlan
418     type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
419     integer, intent(in) :: thisComm !! MPI communicator for this plan
420    
421     integer :: arraySize !! size to allocate plan for
422     integer, intent(out), optional :: status
423     integer :: ierror
424     integer :: i,junk
425    
426     if (present(status)) status = 0
427    
428 tim 1198 !! Set gsComponentPlan pointer
429 mmeineke 377 !! to the componet plan we want to use for this gather scatter plan.
430     !! WARNING this could be dangerous since thisComponentPlan was origionally
431     !! allocated in C++ and there is a significant difference between c and
432     !! f95 pointers....
433     this_plan%gsComponentPlan => thisComponentPlan
434    
435     ! Set this plan size for displs array.
436 tim 1198 this_plan%gsPlanSize = nDim * nObjects
437 mmeineke 377
438     ! Duplicate communicator for this plan
439 tim 1198 call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err)
440 mmeineke 377 if (mpi_err /= 0) then
441     if (present(status)) status = -1
442     return
443     end if
444 tim 1198 call mpi_comm_rank(this_plan%myPlanComm, this_plan%myPlanRank, mpi_err)
445 mmeineke 377 if (mpi_err /= 0) then
446     if (present(status)) status = -1
447     return
448     end if
449    
450 tim 1198 call mpi_comm_size(this_plan%myPlanComm, this_plan%planNprocs, mpi_err)
451 mmeineke 377
452     if (mpi_err /= 0) then
453     if (present(status)) status = -1
454     return
455     end if
456    
457     !! counts and displacements arrays are indexed from 0 to be compatable
458     !! with MPI arrays.
459     allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
460     if (ierror /= 0) then
461     if (present(status)) status = -1
462     return
463     end if
464    
465     allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
466     if (ierror /= 0) then
467     if (present(status)) status = -1
468     return
469     end if
470    
471     !! gather all the local sizes into a size # processors array.
472     call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
473     1,mpi_integer,thisComm,mpi_err)
474    
475     if (mpi_err /= 0) then
476     if (present(status)) status = -1
477     return
478     end if
479    
480     !! figure out the total number of particles in this plan
481     this_plan%globalPlanSize = sum(this_plan%counts)
482    
483     !! initialize plan displacements.
484     this_plan%displs(0) = 0
485     do i = 1, this_plan%planNprocs - 1,1
486     this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
487     end do
488     end subroutine plan_gather_scatter
489    
490     subroutine unplan_gather_scatter(this_plan)
491     type (gs_plan), intent(inout) :: this_plan
492    
493     this_plan%gsComponentPlan => null()
494     call mpi_comm_free(this_plan%myPlanComm,mpi_err)
495    
496     deallocate(this_plan%counts)
497     deallocate(this_plan%displs)
498    
499     end subroutine unplan_gather_scatter
500    
501     subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
502    
503 chuckv 858 type (gs_plan), intent(inout) :: this_plan
504     integer, dimension(:), intent(inout) :: sbuffer
505     integer, dimension(:), intent(inout) :: rbuffer
506 mmeineke 377 integer :: noffset
507     integer, intent(out), optional :: status
508     integer :: i
509    
510     if (present(status)) status = 0
511     noffset = this_plan%displs(this_plan%myPlanRank)
512    
513     ! if (getmyNode() == 1) then
514     ! write(*,*) "Node 0 printing allgatherv vars"
515     ! write(*,*) "Noffset: ", noffset
516     ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
517     ! write(*,*) "PlanComm: ", this_plan%myPlanComm
518     ! end if
519    
520     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
521     rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
522     this_plan%myPlanComm, mpi_err)
523    
524     if (mpi_err /= 0) then
525     if (present(status)) status = -1
526     endif
527    
528     end subroutine gather_integer
529    
530     subroutine gather_double( sbuffer, rbuffer, this_plan, status)
531    
532     type (gs_plan), intent(in) :: this_plan
533 chuckv 858 real( kind = DP ), dimension(:), intent(inout) :: sbuffer
534     real( kind = DP ), dimension(:), intent(inout) :: rbuffer
535 mmeineke 377 integer :: noffset
536     integer, intent(out), optional :: status
537    
538    
539     if (present(status)) status = 0
540     noffset = this_plan%displs(this_plan%myPlanRank)
541 chuckv 694 #ifdef PROFILE
542 chuckv 883 call cpu_time(commTimeInitial)
543 chuckv 694 #endif
544 mmeineke 377 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
545     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
546     this_plan%myPlanComm, mpi_err)
547 chuckv 694 #ifdef PROFILE
548 chuckv 883 call cpu_time(commTimeFinal)
549 chuckv 694 commTime = commTime + commTimeFinal - commTimeInitial
550     #endif
551 mmeineke 377
552     if (mpi_err /= 0) then
553     if (present(status)) status = -1
554     endif
555    
556     end subroutine gather_double
557    
558     subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
559    
560     type (gs_plan), intent(in) :: this_plan
561 chuckv 858 real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
562     real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
563 mmeineke 377 integer :: noffset,i,ierror
564     integer, intent(out), optional :: status
565    
566     external mpi_allgatherv
567    
568     if (present(status)) status = 0
569    
570     ! noffset = this_plan%displs(this_plan%me)
571 chuckv 694 #ifdef PROFILE
572 chuckv 883 call cpu_time(commTimeInitial)
573 chuckv 694 #endif
574    
575 mmeineke 377 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
576     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
577     this_plan%myPlanComm, mpi_err)
578    
579 chuckv 694 #ifdef PROFILE
580 chuckv 883 call cpu_time(commTimeFinal)
581 chuckv 694 commTime = commTime + commTimeFinal - commTimeInitial
582     #endif
583    
584 mmeineke 377 if (mpi_err /= 0) then
585     if (present(status)) status = -1
586     endif
587    
588     end subroutine gather_double_2d
589    
590     subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
591    
592     type (gs_plan), intent(in) :: this_plan
593 chuckv 858 real( kind = DP ), dimension(:), intent(inout) :: sbuffer
594     real( kind = DP ), dimension(:), intent(inout) :: rbuffer
595 mmeineke 377 integer, intent(out), optional :: status
596     external mpi_reduce_scatter
597    
598     if (present(status)) status = 0
599    
600 chuckv 694 #ifdef PROFILE
601 chuckv 883 call cpu_time(commTimeInitial)
602 chuckv 694 #endif
603 mmeineke 377 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
604     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
605 chuckv 694 #ifdef PROFILE
606 chuckv 883 call cpu_time(commTimeFinal)
607 chuckv 694 commTime = commTime + commTimeFinal - commTimeInitial
608     #endif
609 mmeineke 377
610     if (mpi_err /= 0) then
611     if (present(status)) status = -1
612     endif
613    
614     end subroutine scatter_double
615    
616     subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
617    
618     type (gs_plan), intent(in) :: this_plan
619 chuckv 858 real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
620     real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
621 mmeineke 377 integer, intent(out), optional :: status
622     external mpi_reduce_scatter
623    
624     if (present(status)) status = 0
625 chuckv 694 #ifdef PROFILE
626 chuckv 883 call cpu_time(commTimeInitial)
627 chuckv 694 #endif
628    
629 mmeineke 377 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
630     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
631 chuckv 694 #ifdef PROFILE
632 chuckv 883 call cpu_time(commTimeFinal)
633 chuckv 694 commTime = commTime + commTimeFinal - commTimeInitial
634     #endif
635 mmeineke 377
636     if (mpi_err /= 0) then
637     if (present(status)) status = -1
638     endif
639    
640     end subroutine scatter_double_2d
641 tim 1198
642     subroutine setAtomTags(tags, status)
643 mmeineke 377 integer, dimension(:) :: tags
644     integer :: status
645    
646     integer :: alloc_stat
647    
648 tim 1198 integer :: nAtomsInCol
649     integer :: nAtomsInRow
650 mmeineke 377
651     status = 0
652     ! allocate row arrays
653 tim 1198 nAtomsInRow = getNatomsInRow(plan_atom_row)
654     nAtomsInCol = getNatomsInCol(plan_atom_col)
655 chuckv 882
656 tim 1198 if(.not. allocated(AtomLocalToGlobal)) then
657     allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
658 chuckv 882 if (alloc_stat /= 0 ) then
659     status = -1
660     return
661     endif
662     else
663 tim 1198 deallocate(AtomLocalToGlobal)
664     allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
665 chuckv 882 if (alloc_stat /= 0 ) then
666     status = -1
667     return
668     endif
669 mmeineke 377
670 chuckv 882 endif
671    
672 tim 1198 AtomLocalToGlobal = tags
673 chuckv 882
674 tim 1198 if (.not. allocated(AtomRowToGlobal)) then
675     allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
676 mmeineke 377 if (alloc_stat /= 0 ) then
677     status = -1
678     return
679     endif
680     else
681 tim 1198 deallocate(AtomRowToGlobal)
682     allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
683 mmeineke 377 if (alloc_stat /= 0 ) then
684     status = -1
685     return
686     endif
687    
688     endif
689     ! allocate column arrays
690 tim 1198 if (.not. allocated(AtomColToGlobal)) then
691     allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
692 mmeineke 377 if (alloc_stat /= 0 ) then
693     status = -1
694     return
695     endif
696     else
697 tim 1198 deallocate(AtomColToGlobal)
698     allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
699 mmeineke 377 if (alloc_stat /= 0 ) then
700     status = -1
701     return
702     endif
703     endif
704    
705 tim 1198 call gather(tags, AtomRowToGlobal, plan_atom_row)
706     call gather(tags, AtomColToGlobal, plan_atom_col)
707    
708     end subroutine setAtomTags
709    
710     function getNatomsInCol(thisplan) result(nInCol)
711 mmeineke 377 type (gs_plan), intent(in) :: thisplan
712 tim 1198 integer :: nInCol
713     nInCol = thisplan%gsComponentPlan%nAtomsInColumn
714     end function getNatomsInCol
715 mmeineke 377
716 tim 1198 function getNatomsInRow(thisplan) result(nInRow)
717 mmeineke 377 type (gs_plan), intent(in) :: thisplan
718 tim 1198 integer :: nInRow
719     nInRow = thisplan%gsComponentPlan%nAtomsInRow
720     end function getNatomsInRow
721    
722     function getNgroupsInCol(thisplan) result(nInCol)
723 gezelter 1150 type (gs_plan), intent(in) :: thisplan
724 tim 1198 integer :: nInCol
725     nInCol = thisplan%gsComponentPlan%nGroupsInColumn
726     end function getNgroupsInCol
727 gezelter 1150
728 tim 1198 function getNgroupsInRow(thisplan) result(nInRow)
729 gezelter 1150 type (gs_plan), intent(in) :: thisplan
730 tim 1198 integer :: nInRow
731     nInRow = thisplan%gsComponentPlan%nGroupsInRow
732     end function getNgroupsInRow
733    
734 mmeineke 377 function isMPISimSet() result(isthisSimSet)
735     logical :: isthisSimSet
736     if (isSimSet) then
737     isthisSimSet = .true.
738     else
739     isthisSimSet = .false.
740     endif
741     end function isMPISimSet
742    
743     subroutine printComponentPlan(this_plan,printNode)
744    
745     type (mpiComponentPlan), intent(in) :: this_plan
746     integer, optional :: printNode
747     logical :: print_me = .false.
748    
749     if (present(printNode)) then
750     if (printNode == mpiSim%myNode) print_me = .true.
751     else
752     print_me = .true.
753     endif
754    
755     if (print_me) then
756     write(default_error,*) "SetupSimParallel: writing component plan"
757    
758     write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
759     write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
760     write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
761     write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
762     write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
763 tim 1198 write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal
764 mmeineke 377 write(default_error,*) "myNode: ", mpiSim%myNode
765 tim 1198 write(default_error,*) "nProcessors: ", mpiSim%nProcessors
766 mmeineke 377 write(default_error,*) "rowComm: ", mpiSim%rowComm
767     write(default_error,*) "columnComm: ", mpiSim%columnComm
768 tim 1198 write(default_error,*) "nRows: ", mpiSim%nRows
769     write(default_error,*) "nColumns: ", mpiSim%nColumns
770     write(default_error,*) "nAtomsInRow: ", mpiSim%nAtomsInRow
771     write(default_error,*) "nAtomsInColumn: ", mpiSim%nAtomsInColumn
772 mmeineke 377 write(default_error,*) "rowIndex: ", mpiSim%rowIndex
773     write(default_error,*) "columnIndex: ", mpiSim%columnIndex
774     endif
775     end subroutine printComponentPlan
776    
777     function getMyNode() result(myNode)
778     integer :: myNode
779     myNode = mpiSim%myNode
780     end function getMyNode
781    
782 chuckv 694 #ifdef PROFILE
783     subroutine printCommTime()
784     write(*,*) "MPI communication time is: ", commTime
785 chuckv 883 end subroutine printCommTime
786 chuckv 694
787 chuckv 883 function getCommTime() result(comm_time)
788     real :: comm_time
789     comm_time = commTime
790     end function getCommTime
791    
792 chuckv 694 #endif
793    
794 chuckv 631 #endif // is_mpi
795 mmeineke 377 end module mpiSimulation
796    
797 chuckv 631