ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE/libmdtools/mpiSimulation_module.F90
(Generate patch)

Comparing trunk/OOPSE/libmdtools/mpiSimulation_module.F90 (file contents):
Revision 882 by chuckv, Wed Dec 17 20:13:33 2003 UTC vs.
Revision 1214 by gezelter, Tue Jun 1 18:42:58 2004 UTC

# Line 7 | Line 7
7   !!
8   !! @author Charles F. Vardeman II
9   !! @author Matthew Meineke
10 < !! @version $Id: mpiSimulation_module.F90,v 1.10 2003-12-17 20:13:33 chuckv Exp $, $Date: 2003-12-17 20:13:33 $, $Name: not supported by cvs2svn $, $Revision: 1.10 $
10 > !! @version $Id: mpiSimulation_module.F90,v 1.15 2004-06-01 18:42:58 gezelter Exp $, $Date: 2004-06-01 18:42:58 $, $Name: not supported by cvs2svn $, $Revision: 1.15 $
11  
12   module mpiSimulation  
13    use definitions
# Line 23 | Line 23 | module mpiSimulation  
23    public :: gather, scatter
24    public :: setupSimParallel
25    public :: replanSimParallel
26 <  public :: getNcol
27 <  public :: getNrow
26 >  public :: getNatomsInCol
27 >  public :: getNatomsInRow
28 >  public :: getNgroupsInCol
29 >  public :: getNgroupsInRow
30    public :: isMPISimSet
31    public :: printComponentPlan
32    public :: getMyNode
# Line 60 | Line 62 | module mpiSimulation  
62  
63   #ifdef PROFILE
64    public :: printCommTime
65 <
66 <  real(kind = dp ), save :: commTime = 0.0_dp
67 <  real(kind = dp ) :: commTimeInitial,commTimeFinal
65 >  public :: getCommTime
66 >  real,save   :: commTime = 0.0
67 >  real   :: commTimeInitial,commTimeFinal
68   #endif
69  
70   !! Include mpiComponentPlan type. mpiComponentPlan is a
# Line 71 | Line 73 | module mpiSimulation  
73   #include "mpiComponentPlan.h"
74  
75  
74
76   !! Tags used during force loop for parallel simulation
77 <  integer, public, allocatable, dimension(:) :: tagLocal
78 <  integer, public, allocatable, dimension(:) :: tagRow
79 <  integer, public, allocatable, dimension(:) :: tagColumn
77 >  integer, public, allocatable, dimension(:) :: AtomLocalToGlobal
78 >  integer, public, allocatable, dimension(:) :: AtomRowToGlobal
79 >  integer, public, allocatable, dimension(:) :: AtomColToGlobal
80 >  integer, public, allocatable, dimension(:) :: GroupLocalToGlobal
81 >  integer, public, allocatable, dimension(:) :: GroupRowToGlobal
82 >  integer, public, allocatable, dimension(:) :: GroupColToGlobal
83  
84   !! Logical set true if mpiSimulation has been initialized
85    logical, save :: isSimSet = .false.
# Line 97 | Line 101 | module mpiSimulation  
101    end type gs_plan
102  
103   ! plans for different decompositions
104 <  type (gs_plan), public, save :: plan_row
105 <  type (gs_plan), public, save :: plan_row3d
106 <  type (gs_plan), public, save :: plan_col
107 <  type (gs_plan), public, save :: plan_col3d
108 <  type (gs_plan),  public, save :: plan_row_Rotation
109 <  type (gs_plan),  public, save :: plan_col_Rotation
104 >  type (gs_plan), public, save :: plan_atom_row
105 >  type (gs_plan), public, save :: plan_atom_row_3d
106 >  type (gs_plan), public, save :: plan_atom_col
107 >  type (gs_plan), public, save :: plan_atom_col_3d
108 >  type (gs_plan),  public, save :: plan_atom_row_Rotation
109 >  type (gs_plan),  public, save :: plan_atom_col_Rotation
110 >  type (gs_plan),  public, save :: plan_group_row
111 >  type (gs_plan),  public, save :: plan_group_col
112 >  type (gs_plan),  public, save :: plan_group_row_3d
113 >  type (gs_plan),  public, save :: plan_group_col_3d
114  
115    type (mpiComponentPlan), pointer :: simComponentPlan
116  
# Line 131 | Line 139 | contains
139  
140   contains
141  
142 < !! Sets up mpiComponentPlan with structure passed from C++.
143 <  subroutine setupSimParallel(thisComponentPlan,ntags,tags,status)
144 < !  Passed Arguments
145 < !    integer, intent(inout) :: nDim !! Number of dimensions
142 >  !! Sets up mpiComponentPlan with structure passed from C++.
143 >  subroutine setupSimParallel(thisComponentPlan, nAtomTags, atomTags, &
144 >       nGroupTags, groupTags, status)
145 >    !! Passed Arguments
146      !! mpiComponentPlan struct from C
147      type (mpiComponentPlan), intent(inout) :: thisComponentPlan
148 < !! Number of tags passed, nlocal  
149 <    integer, intent(in) :: ntags
150 < !! Result status, 0 = normal, -1 = error
148 >    !! Number of tags passed
149 >    integer, intent(in) :: nAtomTags, nGroupTags
150 >    !! Result status, 0 = normal, -1 = error
151      integer, intent(out) :: status
152      integer :: localStatus
153 < !! Global reference tag for local particles
154 <    integer, dimension(ntags),intent(inout) :: tags
153 >    !! Global reference tag for local particles
154 >    integer, dimension(nAtomTags), intent(inout) :: atomTags
155 >    integer, dimension(nGroupTags), intent(inout) :: groupTags
156  
157 <    write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, ' has tags(1) = ', tags(1)
158 <
159 <
151 <
157 >    !write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, &
158 >    !     ' has atomTags(1) = ', atomTags(1)
159 >    
160      status = 0
161      if (componentPlanSet) then
162         return
163      endif
164      componentPlanSet = .true.
165      
166 < !! copy c component plan to fortran  
166 >    !! copy c component plan to fortran  
167      mpiSim = thisComponentPlan
168 <    write(*,*) "Seting up simParallel"
169 <
170 <    call make_Force_Grid(mpiSim,localStatus)
168 >    !write(*,*) "Seting up simParallel"
169 >    
170 >    call make_Force_Grid(mpiSim, localStatus)
171      if (localStatus /= 0) then
172         write(default_error,*) "Error creating force grid"
173         status = -1
174         return
175      endif
176 <
177 <    call updateGridComponents(mpiSim,localStatus)
176 >    
177 >    call updateGridComponents(mpiSim, localStatus)
178      if (localStatus /= 0) then
179         write(default_error,*) "Error updating grid components"
180         status = -1
181         return
182      endif
175    
183  
184      !! initialize gather and scatter plans used in this simulation
185 <    call plan_gather_scatter(1,mpiSim%myNlocal,&
186 <         mpiSim,mpiSim%rowComm,plan_row)
187 <    call plan_gather_scatter(nDim,mpiSim%myNlocal,&
188 <         mpiSim,mpiSim%rowComm,plan_row3d)
189 <    call plan_gather_scatter(9,mpiSim%myNlocal,&
190 <         mpiSim,mpiSim%rowComm,plan_row_Rotation)
191 <    call plan_gather_scatter(1,mpiSim%myNlocal,&
192 <         mpiSim,mpiSim%columnComm,plan_col)
193 <    call plan_gather_scatter(nDim,mpiSim%myNlocal,&
194 <         mpiSim,mpiSim%columnComm,plan_col3d)
195 <   call plan_gather_scatter(9,mpiSim%myNlocal,&
196 <         mpiSim,mpiSim%columnComm,plan_col_Rotation)
185 >    call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
186 >         mpiSim, mpiSim%rowComm, plan_atom_row)
187 >    call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
188 >         mpiSim, mpiSim%rowComm, plan_atom_row_3d)
189 >    call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
190 >         mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
191 >    call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
192 >         mpiSim, mpiSim%rowComm, plan_group_row)
193 >    call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
194 >         mpiSim, mpiSim%rowComm, plan_group_row_3d)
195 >        
196 >    call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
197 >         mpiSim, mpiSim%columnComm, plan_atom_col)
198 >    call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
199 >         mpiSim, mpiSim%columnComm, plan_atom_col_3d)
200 >    call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
201 >         mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
202 >    call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
203 >         mpiSim, mpiSim%columnComm, plan_group_col)
204 >    call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
205 >         mpiSim, mpiSim%columnComm, plan_group_col_3d)
206 >    
207 > !  Initialize tags    
208  
209 +    call setAtomTags(atomTags,localStatus)
210 +    if (localStatus /= 0) then
211 +       status = -1
212 +       return
213 +    endif
214  
215  
216 < !  Initialize tags    
194 <    call setTags(tags,localStatus)
216 >    call setGroupTags(groupTags,localStatus)
217      if (localStatus /= 0) then
218         status = -1
219         return
220      endif
221 +
222      isSimSet = .true.
223  
224   !    call printComponentPlan(mpiSim,0)
# Line 217 | Line 240 | contains
240      endif
241      
242      !! Unplan Gather Scatter plans
243 <    call unplan_gather_scatter(plan_row)
244 <    call unplan_gather_scatter(plan_row3d)
245 <    call unplan_gather_scatter(plan_row_Rotation)
246 <    call unplan_gather_scatter(plan_col)
247 <    call unplan_gather_scatter(plan_col3d)
225 <    call unplan_gather_scatter(plan_col_Rotation)
243 >    call unplan_gather_scatter(plan_atom_row)
244 >    call unplan_gather_scatter(plan_atom_row_3d)
245 >    call unplan_gather_scatter(plan_atom_row_Rotation)
246 >    call unplan_gather_scatter(plan_group_row)
247 >    call unplan_gather_scatter(plan_group_row_3d)
248  
249 <    !! initialize gather and scatter plans used in this simulation
250 <    call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
251 <         thisComponentPlan,thisComponentPlan%rowComm,plan_row)
252 <    call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
253 <         thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
232 <    call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
233 <         thisComponentPlan,thisComponentPlan%rowComm,plan_row_Rotation)
234 <    call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
235 <         thisComponentPlan,thisComponentPlan%columnComm,plan_col)
236 <    call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
237 <         thisComponentPlan,thisComponentPlan%rowComm,plan_col3d)
238 <    call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
239 <         thisComponentPlan,thisComponentPlan%rowComm,plan_col_Rotation)
249 >    call unplan_gather_scatter(plan_atom_col)
250 >    call unplan_gather_scatter(plan_atom_col_3d)
251 >    call unplan_gather_scatter(plan_atom_col_Rotation)
252 >    call unplan_gather_scatter(plan_group_col)
253 >    call unplan_gather_scatter(plan_group_col_3d)
254  
255 <
256 <
255 >    !! initialize gather and scatter plans used in this simulation
256 >    call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
257 >         mpiSim, mpiSim%rowComm, plan_atom_row)
258 >    call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
259 >         mpiSim, mpiSim%rowComm, plan_atom_row_3d)
260 >    call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
261 >         mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
262 >    call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
263 >         mpiSim, mpiSim%rowComm, plan_group_row)
264 >    call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
265 >         mpiSim, mpiSim%rowComm, plan_group_row_3d)
266 >        
267 >    call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
268 >         mpiSim, mpiSim%columnComm, plan_atom_col)
269 >    call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
270 >         mpiSim, mpiSim%columnComm, plan_atom_col_3d)
271 >    call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
272 >         mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
273 >    call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
274 >         mpiSim, mpiSim%columnComm, plan_group_col)
275 >    call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
276 >         mpiSim, mpiSim%columnComm, plan_group_col_3d)
277 >        
278    end subroutine replanSimParallel
279  
280 < !! Updates number of row and column components for long range forces.
281 <  subroutine updateGridComponents(thisComponentPlan,status)
280 >  !! Updates number of row and column components for long range forces.
281 >  subroutine updateGridComponents(thisComponentPlan, status)
282      type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
283 <
284 < !! Status return
285 < !! -  0 Success
286 < !! - -1 Failure
283 >    
284 >    !! Status return
285 >    !! -  0 Success
286 >    !! - -1 Failure
287      integer, intent(out) :: status
288 <    integer :: nComponentsLocal
289 <    integer :: nComponentsRow = 0
290 <    integer :: nComponentsColumn = 0
288 >    integer :: nAtomsLocal
289 >    integer :: nAtomsInRow = 0
290 >    integer :: nAtomsInColumn = 0
291 >    integer :: nGroupsLocal
292 >    integer :: nGroupsInRow = 0
293 >    integer :: nGroupsInColumn = 0
294      integer :: mpiErrors
295  
296      status = 0
297      if (.not. componentPlanSet) return
298  
299 <    if (thisComponentPlan%myNlocal == 0 ) then
299 >    if (thisComponentPlan%nAtomsLocal == 0) then
300         status = -1
301         return
302 +    endif  
303 +    if (thisComponentPlan%nGroupsLocal == 0) then
304 +       write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal
305 +       status = -1
306 +       return
307      endif
308      
309 <    nComponentsLocal = thisComponentPlan%myNlocal
309 >    nAtomsLocal = thisComponentPlan%nAtomsLocal
310 >    nGroupsLocal = thisComponentPlan%nGroupsLocal
311  
312 <    write(*,*) "UpdateGridComponents: myNlocal ", nComponentsLocal
313 <    call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
270 <         mpi_sum,thisComponentPlan%rowComm,mpiErrors)
312 >    call mpi_allreduce(nAtomsLocal, nAtomsInRow, 1, mpi_integer, &
313 >         mpi_sum, thisComponentPlan%rowComm, mpiErrors)
314      if (mpiErrors /= 0) then
315         status = -1
316         return
317      endif
318  
319 <    call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
320 <         mpi_sum,thisComponentPlan%columnComm,mpiErrors)    
319 >    call mpi_allreduce(nAtomsLocal, nAtomsInColumn, 1, mpi_integer, &
320 >         mpi_sum, thisComponentPlan%columnComm, mpiErrors)    
321      if (mpiErrors /= 0) then
322         status = -1
323         return
324      endif
325 +        
326 +    call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, &
327 +         mpi_sum, thisComponentPlan%rowComm, mpiErrors)
328 +    if (mpiErrors /= 0) then
329 +       status = -1
330 +       return
331 +    endif
332  
333 <    thisComponentPlan%nComponentsRow = nComponentsRow
334 <    thisComponentPlan%nComponentsColumn = nComponentsColumn
335 <    write(*,*) "UpdateGridComponents: myNRow ",&
336 <         thisComponentPlan%nComponentsRow
337 <    write(*,*) "UpdateGridComponents: myNColumn ",&
338 <         thisComponentPlan%nComponentsColumn
333 >    call mpi_allreduce(nGroupsLocal, nGroupsInColumn, 1, mpi_integer, &
334 >         mpi_sum, thisComponentPlan%columnComm, mpiErrors)    
335 >    if (mpiErrors /= 0) then
336 >       status = -1
337 >       return
338 >    endif
339  
340 +    thisComponentPlan%nAtomsInRow = nAtomsInRow
341 +    thisComponentPlan%nAtomsInColumn = nAtomsInColumn
342 +    thisComponentPlan%nGroupsInRow = nGroupsInRow
343 +    thisComponentPlan%nGroupsInColumn = nGroupsInColumn
344 +
345    end subroutine updateGridComponents
346  
347  
348 < !! Creates a square force decomposition of processors into row and column
349 < !! communicators.
348 >  !! Creates a square force decomposition of processors into row and column
349 >  !! communicators.
350    subroutine make_Force_Grid(thisComponentPlan,status)
351      type (mpiComponentPlan) :: thisComponentPlan
352      integer, intent(out) :: status !! status returns -1 if error
353 <    integer ::  nColumnsMax !! Maximum number of columns
354 <    integer ::  nWorldProcessors !! Total number of processors in World comm.
353 >    integer :: nColumnsMax !! Maximum number of columns
354 >    integer :: nWorldProcessors !! Total number of processors in World comm.
355      integer :: rowIndex !! Row for this processor.
356      integer :: columnIndex !! Column for this processor.
357      integer :: nRows !! Total number of rows.
# Line 311 | Line 366 | contains
366      if (.not. ComponentPlanSet) return
367      status = 0
368    
369 < !! We make a dangerous assumption here that if numberProcessors is
370 < !! zero, then we need to get the information from MPI.
371 <    if (thisComponentPlan%numberProcessors == 0 ) then
369 >    !! We make a dangerous assumption here that if numberProcessors is
370 >    !! zero, then we need to get the information from MPI.
371 >    if (thisComponentPlan%nProcessors == 0 ) then
372         call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
373         if ( mpiErrors /= 0 ) then
374            status = -1
# Line 324 | Line 379 | contains
379            status = -1
380            return
381         endif
382 <
382 >      
383      else
384 <       nWorldProcessors = thisComponentPlan%numberProcessors
384 >       nWorldProcessors = thisComponentPlan%nProcessors
385         myWorldRank = thisComponentPlan%myNode
386      endif
387 <
333 <
334 <
335 <
387 >    
388      nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
389 <
389 >    
390      do i = 1, nColumnsMax
391         if (mod(nWorldProcessors,i) == 0) nColumns = i
392      end do
393 <
393 >    
394      nRows = nWorldProcessors/nColumns
395 <
395 >    
396      rowIndex = myWorldRank/nColumns
397 <
346 <
347 <
397 >    
398      call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
399      if ( mpiErrors /= 0 ) then
400         write(default_error,*) "MPI comm split failed at row communicator"
401         status = -1
402         return
403      endif
404 <
404 >    
405      columnIndex = mod(myWorldRank,nColumns)
406      call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
407      if ( mpiErrors /= 0 ) then
# Line 359 | Line 409 | contains
409         status = -1
410         return
411      endif
412 <
413 < ! Set appropriate components of thisComponentPlan
412 >    
413 >    ! Set appropriate components of thisComponentPlan
414      thisComponentPlan%rowComm = rowCommunicator
415      thisComponentPlan%columnComm = columnCommunicator
416      thisComponentPlan%rowIndex = rowIndex
417      thisComponentPlan%columnIndex = columnIndex
418 <    thisComponentPlan%numberRows = nRows
419 <    thisComponentPlan%numberColumns = nColumns
418 >    thisComponentPlan%nRows = nRows
419 >    thisComponentPlan%nColumns = nColumns
420  
371
421    end subroutine make_Force_Grid
422 <
374 <
422 >  
423    !! initalizes a gather scatter plan
424 <  subroutine plan_gather_scatter( nDim,nComponents,thisComponentPlan, &
425 <       thisComm, this_plan,status)  
424 >  subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, &
425 >       thisComm, this_plan, status)  
426      integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
427 <    integer, intent(in) :: nComponents
427 >    integer, intent(in) :: nObjects
428      type (mpiComponentPlan), intent(in), target :: thisComponentPlan
429      type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
430      integer, intent(in) :: thisComm !! MPI communicator for this plan
# Line 387 | Line 435 | contains
435      integer :: i,junk
436  
437      if (present(status)) status = 0
390    
391  
438  
439 < !! Set gsComponetPlan pointer
439 > !! Set gsComponentPlan pointer
440   !! to the componet plan we want to use for this gather scatter plan.
441   !! WARNING this could be dangerous since thisComponentPlan was origionally
442   !! allocated in C++ and there is a significant difference between c and
# Line 398 | Line 444 | contains
444      this_plan%gsComponentPlan => thisComponentPlan
445  
446   ! Set this plan size for displs array.
447 <    this_plan%gsPlanSize = nDim * nComponents
447 >    this_plan%gsPlanSize = nDim * nObjects
448  
449   ! Duplicate communicator for this plan
450 <    call mpi_comm_dup(thisComm,this_plan%myPlanComm,mpi_err)
450 >    call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err)
451      if (mpi_err /= 0) then
452         if (present(status)) status = -1
453         return
454      end if
455 <    call mpi_comm_rank(this_plan%myPlanComm,this_plan%myPlanRank,mpi_err)
455 >    call mpi_comm_rank(this_plan%myPlanComm, this_plan%myPlanRank, mpi_err)
456      if (mpi_err /= 0) then
457         if (present(status)) status = -1
458         return
459      end if
460  
461 <    call mpi_comm_size(this_plan%myPlanComm,this_plan%planNprocs,mpi_err)
461 >    call mpi_comm_size(this_plan%myPlanComm, this_plan%planNprocs, mpi_err)
462  
463      if (mpi_err /= 0) then
464         if (present(status)) status = -1
# Line 442 | Line 488 | contains
488         return
489      end if
490    
445
491      !! figure out the total number of particles in this plan
492      this_plan%globalPlanSize = sum(this_plan%counts)
493    
449
494      !! initialize plan displacements.
495      this_plan%displs(0) = 0
496      do i = 1, this_plan%planNprocs - 1,1
497         this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
498      end do
455
456
499    end subroutine plan_gather_scatter
500  
459
501    subroutine unplan_gather_scatter(this_plan)
502      type (gs_plan), intent(inout) :: this_plan
503      
463    
504      this_plan%gsComponentPlan => null()
505      call mpi_comm_free(this_plan%myPlanComm,mpi_err)
506  
# Line 478 | Line 518 | contains
518      integer, intent(out), optional :: status
519      integer :: i
520  
481
482    
521      if (present(status)) status = 0
522      noffset = this_plan%displs(this_plan%myPlanRank)
523  
# Line 512 | Line 550 | contains
550      if (present(status)) status = 0
551      noffset = this_plan%displs(this_plan%myPlanRank)
552   #ifdef PROFILE
553 <    commTimeInitial = mpi_wtime()
553 >    call cpu_time(commTimeInitial)
554   #endif
555      call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
556           rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
557           this_plan%myPlanComm, mpi_err)
558   #ifdef PROFILE
559 <    commTimeFinal = mpi_wtime()
559 >    call cpu_time(commTimeFinal)
560      commTime = commTime + commTimeFinal - commTimeInitial
561   #endif
562  
# Line 542 | Line 580 | contains
580  
581   !    noffset = this_plan%displs(this_plan%me)
582   #ifdef PROFILE
583 <   commTimeInitial = mpi_wtime()
583 >   call cpu_time(commTimeInitial)
584   #endif
585  
586      call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
# Line 550 | Line 588 | contains
588          this_plan%myPlanComm, mpi_err)
589  
590   #ifdef PROFILE
591 <    commTimeFinal = mpi_wtime()
591 >    call cpu_time(commTimeFinal)
592      commTime = commTime + commTimeFinal - commTimeInitial
593   #endif
594  
# Line 571 | Line 609 | contains
609     if (present(status)) status = 0
610  
611   #ifdef PROFILE
612 <   commTimeInitial = mpi_wtime()
612 >   call cpu_time(commTimeInitial)
613   #endif
614      call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
615           mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
616   #ifdef PROFILE
617 <    commTimeFinal = mpi_wtime()
617 >    call cpu_time(commTimeFinal)
618      commTime = commTime + commTimeFinal - commTimeInitial
619   #endif
620  
# Line 596 | Line 634 | contains
634  
635     if (present(status)) status = 0
636   #ifdef PROFILE
637 <   commTimeInitial = mpi_wtime()
637 >   call cpu_time(commTimeInitial)
638   #endif
639  
640      call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
641           mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
642   #ifdef PROFILE
643 <    commTimeFinal = mpi_wtime()
643 >    call cpu_time(commTimeFinal)
644      commTime = commTime + commTimeFinal - commTimeInitial
645   #endif
646  
# Line 611 | Line 649 | contains
649      endif
650  
651    end subroutine scatter_double_2d
652 <
653 <
616 <  subroutine setTags(tags,status)
652 >  
653 >  subroutine setAtomTags(tags, status)
654      integer, dimension(:) :: tags
655      integer :: status
656  
657      integer :: alloc_stat
658      
659 <    integer :: ncol
660 <    integer :: nrow
659 >    integer :: nAtomsInCol
660 >    integer :: nAtomsInRow
661  
662      status = 0
663   ! allocate row arrays
664 <    nrow = getNrow(plan_row)
665 <    ncol = getNcol(plan_col)
664 >    nAtomsInRow = getNatomsInRow(plan_atom_row)
665 >    nAtomsInCol = getNatomsInCol(plan_atom_col)
666      
667 <    if(.not. allocated(tagLocal)) then
668 <       allocate(tagLocal(size(tags)),STAT=alloc_stat)
667 >    if(.not. allocated(AtomLocalToGlobal)) then
668 >       allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
669          if (alloc_stat /= 0 ) then
670            status = -1
671            return
672         endif
673      else
674 <       deallocate(tagLocal)
675 <       allocate(tagLocal(size(tags)),STAT=alloc_stat)
674 >       deallocate(AtomLocalToGlobal)
675 >       allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
676         if (alloc_stat /= 0 ) then
677            status = -1
678            return
# Line 643 | Line 680 | contains
680  
681      endif
682  
683 <    tagLocal = tags
683 >    AtomLocalToGlobal = tags
684  
685 <    
686 <    if (.not. allocated(tagRow)) then
650 <       allocate(tagRow(nrow),STAT=alloc_stat)
685 >    if (.not. allocated(AtomRowToGlobal)) then
686 >       allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
687         if (alloc_stat /= 0 ) then
688            status = -1
689            return
690         endif
691      else
692 <       deallocate(tagRow)
693 <       allocate(tagRow(nrow),STAT=alloc_stat)
692 >       deallocate(AtomRowToGlobal)
693 >       allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
694         if (alloc_stat /= 0 ) then
695            status = -1
696            return
# Line 662 | Line 698 | contains
698  
699      endif
700   ! allocate column arrays
701 <    if (.not. allocated(tagColumn)) then
702 <       allocate(tagColumn(ncol),STAT=alloc_stat)
701 >    if (.not. allocated(AtomColToGlobal)) then
702 >       allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
703         if (alloc_stat /= 0 ) then
704            status = -1
705            return
706         endif
707      else
708 <       deallocate(tagColumn)
709 <       allocate(tagColumn(ncol),STAT=alloc_stat)
708 >       deallocate(AtomColToGlobal)
709 >       allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
710         if (alloc_stat /= 0 ) then
711            status = -1
712            return
713         endif
714      endif
715      
716 <    call gather(tags,tagRow,plan_row)
717 <    call gather(tags,tagColumn,plan_col)
716 >    call gather(tags, AtomRowToGlobal, plan_atom_row)
717 >    call gather(tags, AtomColToGlobal, plan_atom_col)
718 >    
719 >  end subroutine setAtomTags
720  
721 <  
722 <  end subroutine setTags
721 >  subroutine setGroupTags(tags, status)
722 >    integer, dimension(:) :: tags
723 >    integer :: status
724  
725 < !  pure function getNcol(thisplan) result(ncol)
726 <  function getNcol(thisplan) result(ncol)
725 >    integer :: alloc_stat
726 >    
727 >    integer :: nGroupsInCol
728 >    integer :: nGroupsInRow
729 >
730 >    status = 0
731 >
732 >    nGroupsInRow = getNgroupsInRow(plan_group_row)
733 >    nGroupsInCol = getNgroupsInCol(plan_group_col)
734 >    
735 >    if(allocated(GroupLocalToGlobal)) then
736 >       deallocate(GroupLocalToGlobal)
737 >    endif
738 >    allocate(GroupLocalToGlobal(size(tags)),STAT=alloc_stat)
739 >    if (alloc_stat /= 0 ) then
740 >       status = -1
741 >       return
742 >    endif
743 >    
744 >    GroupLocalToGlobal = tags
745 >
746 >    if(allocated(GroupRowToGlobal)) then
747 >       deallocate(GroupRowToGlobal)
748 >    endif
749 >    allocate(GroupRowToGlobal(nGroupsInRow),STAT=alloc_stat)
750 >    if (alloc_stat /= 0 ) then
751 >       status = -1
752 >       return
753 >    endif
754 >
755 >    if(allocated(GroupColToGlobal)) then
756 >       deallocate(GroupColToGlobal)
757 >    endif
758 >    allocate(GroupColToGlobal(nGroupsInCol),STAT=alloc_stat)
759 >    if (alloc_stat /= 0 ) then
760 >       status = -1
761 >       return
762 >    endif
763 >    
764 >    call gather(tags, GroupRowToGlobal, plan_group_row)
765 >    call gather(tags, GroupColToGlobal, plan_group_col)
766 >    
767 >  end subroutine setGroupTags
768 >  
769 >  function getNatomsInCol(thisplan) result(nInCol)
770      type (gs_plan), intent(in) :: thisplan
771 <    integer :: ncol
772 <    ncol = thisplan%gsComponentPlan%nComponentsColumn
773 <  end function getNcol
771 >    integer :: nInCol
772 >    nInCol = thisplan%gsComponentPlan%nAtomsInColumn
773 >  end function getNatomsInCol
774  
775 < !  pure function getNrow(thisplan) result(nrow)
694 <  function getNrow(thisplan) result(nrow)
775 >  function getNatomsInRow(thisplan) result(nInRow)
776      type (gs_plan), intent(in) :: thisplan
777 <    integer :: nrow
778 <    nrow = thisplan%gsComponentPlan%nComponentsRow
779 <  end function getNrow
777 >    integer :: nInRow
778 >    nInRow = thisplan%gsComponentPlan%nAtomsInRow
779 >  end function getNatomsInRow
780 >
781 >  function getNgroupsInCol(thisplan) result(nInCol)
782 >    type (gs_plan), intent(in) :: thisplan
783 >    integer :: nInCol
784 >    nInCol = thisplan%gsComponentPlan%nGroupsInColumn
785 >  end function getNgroupsInCol
786  
787 +  function getNgroupsInRow(thisplan) result(nInRow)
788 +    type (gs_plan), intent(in) :: thisplan
789 +    integer :: nInRow
790 +    nInRow = thisplan%gsComponentPlan%nGroupsInRow
791 +  end function getNgroupsInRow
792 +  
793    function isMPISimSet() result(isthisSimSet)
794      logical :: isthisSimSet
795      if (isSimSet) then
# Line 706 | Line 799 | contains
799      endif
800    end function isMPISimSet
801    
709
710
802    subroutine printComponentPlan(this_plan,printNode)
803  
804      type (mpiComponentPlan), intent(in) :: this_plan
# Line 728 | Line 819 | contains
819         write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
820         write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
821         write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
822 <       write(default_error,*) "myNlocal: ", mpiSim%myNlocal
822 >       write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal
823         write(default_error,*) "myNode: ", mpiSim%myNode
824 <       write(default_error,*) "numberProcessors: ", mpiSim%numberProcessors
824 >       write(default_error,*) "nProcessors: ", mpiSim%nProcessors
825         write(default_error,*) "rowComm: ", mpiSim%rowComm
826         write(default_error,*) "columnComm: ", mpiSim%columnComm
827 <       write(default_error,*) "numberRows: ", mpiSim%numberRows
828 <       write(default_error,*) "numberColumns: ", mpiSim%numberColumns
829 <       write(default_error,*) "nComponentsRow: ", mpiSim%nComponentsRow
830 <       write(default_error,*) "nComponentsColumn: ", mpiSim%nComponentsColumn
827 >       write(default_error,*) "nRows: ", mpiSim%nRows
828 >       write(default_error,*) "nColumns: ", mpiSim%nColumns
829 >       write(default_error,*) "nAtomsInRow: ", mpiSim%nAtomsInRow
830 >       write(default_error,*) "nAtomsInColumn: ", mpiSim%nAtomsInColumn
831         write(default_error,*) "rowIndex: ", mpiSim%rowIndex
832         write(default_error,*) "columnIndex: ", mpiSim%columnIndex
833      endif
# Line 749 | Line 840 | contains
840  
841   #ifdef PROFILE
842    subroutine printCommTime()
752
843      write(*,*) "MPI communication time is: ", commTime
754
844    end subroutine printCommTime
845 +
846 +  function getCommTime() result(comm_time)
847 +    real :: comm_time
848 +    comm_time = commTime
849 +  end function getCommTime
850 +
851   #endif
852  
853   #endif // is_mpi

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines