1 |
|
2 |
|
3 |
!! MPI support for long range forces using force decomposition |
4 |
!! on a square grid of processors. |
5 |
!! Corresponds to mpiSimunation.cpp for C++ |
6 |
!! mpi_module also contains a private interface for mpi f90 routines. |
7 |
!! |
8 |
!! @author Charles F. Vardeman II |
9 |
!! @author Matthew Meineke |
10 |
!! @version $Id: mpiSimulation_module.F90,v 1.14 2004-05-27 18:59:15 gezelter Exp $, $Date: 2004-05-27 18:59:15 $, $Name: not supported by cvs2svn $, $Revision: 1.14 $ |
11 |
|
12 |
module mpiSimulation |
13 |
use definitions |
14 |
#ifdef IS_MPI |
15 |
use oopseMPI |
16 |
implicit none |
17 |
PRIVATE |
18 |
|
19 |
|
20 |
!! PUBLIC Subroutines contained in this module |
21 |
!! gather and scatter are a generic interface |
22 |
!! to gather and scatter routines |
23 |
public :: gather, scatter |
24 |
public :: setupSimParallel |
25 |
public :: replanSimParallel |
26 |
public :: getNatomsInCol |
27 |
public :: getNatomsInRow |
28 |
public :: getNgroupsInCol |
29 |
public :: getNgroupsInRow |
30 |
public :: isMPISimSet |
31 |
public :: printComponentPlan |
32 |
public :: getMyNode |
33 |
|
34 |
!! PUBLIC Subroutines contained in MPI module |
35 |
public :: mpi_bcast |
36 |
public :: mpi_allreduce |
37 |
! public :: mpi_reduce |
38 |
public :: mpi_send |
39 |
public :: mpi_recv |
40 |
public :: mpi_get_processor_name |
41 |
public :: mpi_finalize |
42 |
|
43 |
!! PUBLIC mpi variables |
44 |
public :: mpi_comm_world |
45 |
public :: mpi_character |
46 |
public :: mpi_integer |
47 |
public :: mpi_double_precision |
48 |
public :: mpi_sum |
49 |
public :: mpi_max |
50 |
public :: mpi_status_size |
51 |
public :: mpi_any_source |
52 |
|
53 |
|
54 |
|
55 |
!! Safety logical to prevent access to ComponetPlan until |
56 |
!! set by C++. |
57 |
logical, save :: ComponentPlanSet = .false. |
58 |
|
59 |
|
60 |
!! generic mpi error declaration. |
61 |
integer, public :: mpi_err |
62 |
|
63 |
#ifdef PROFILE |
64 |
public :: printCommTime |
65 |
public :: getCommTime |
66 |
real,save :: commTime = 0.0 |
67 |
real :: commTimeInitial,commTimeFinal |
68 |
#endif |
69 |
|
70 |
!! Include mpiComponentPlan type. mpiComponentPlan is a |
71 |
!! dual header file for both c and fortran. |
72 |
#define __FORTRAN90 |
73 |
#include "mpiComponentPlan.h" |
74 |
|
75 |
|
76 |
!! Tags used during force loop for parallel simulation |
77 |
integer, public, allocatable, dimension(:) :: AtomLocalToGlobal |
78 |
integer, public, allocatable, dimension(:) :: AtomRowToGlobal |
79 |
integer, public, allocatable, dimension(:) :: AtomColToGlobal |
80 |
integer, public, allocatable, dimension(:) :: GroupLocalToGlobal |
81 |
integer, public, allocatable, dimension(:) :: GroupRowToGlobal |
82 |
integer, public, allocatable, dimension(:) :: GroupColToGlobal |
83 |
|
84 |
!! Logical set true if mpiSimulation has been initialized |
85 |
logical, save :: isSimSet = .false. |
86 |
|
87 |
|
88 |
type (mpiComponentPlan), save :: mpiSim |
89 |
|
90 |
!! gs_plan contains plans for gather and scatter routines |
91 |
type, public :: gs_plan |
92 |
private |
93 |
type (mpiComponentPlan), pointer :: gsComponentPlan => NULL() |
94 |
integer :: gsPlanSize !! size of this plan (nDim*nComponents) |
95 |
integer :: globalPlanSize !! size of all components in plan |
96 |
integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0. |
97 |
integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0. |
98 |
integer :: myPlanComm !! My communicator for this plan |
99 |
integer :: myPlanRank !! My rank in this plan |
100 |
integer :: planNprocs !! Number of processors in this plan |
101 |
end type gs_plan |
102 |
|
103 |
! plans for different decompositions |
104 |
type (gs_plan), public, save :: plan_atom_row |
105 |
type (gs_plan), public, save :: plan_atom_row_3d |
106 |
type (gs_plan), public, save :: plan_atom_col |
107 |
type (gs_plan), public, save :: plan_atom_col_3d |
108 |
type (gs_plan), public, save :: plan_atom_row_Rotation |
109 |
type (gs_plan), public, save :: plan_atom_col_Rotation |
110 |
type (gs_plan), public, save :: plan_group_row |
111 |
type (gs_plan), public, save :: plan_group_col |
112 |
type (gs_plan), public, save :: plan_group_row_3d |
113 |
type (gs_plan), public, save :: plan_group_col_3d |
114 |
|
115 |
type (mpiComponentPlan), pointer :: simComponentPlan |
116 |
|
117 |
! interface for gather scatter routines |
118 |
!! Generic interface for gather. |
119 |
!! Gathers an local array into row or column array |
120 |
!! Interface provided for integer, double and double |
121 |
!! rank 2 arrays. |
122 |
interface gather |
123 |
module procedure gather_integer |
124 |
module procedure gather_double |
125 |
module procedure gather_double_2d |
126 |
end interface |
127 |
|
128 |
!! Generic interface for scatter. |
129 |
!! Scatters a row or column array, adding componets |
130 |
!! and reducing them to a local nComponent array. |
131 |
!! Interface provided for double and double rank=2 arrays. |
132 |
|
133 |
interface scatter |
134 |
module procedure scatter_double |
135 |
module procedure scatter_double_2d |
136 |
end interface |
137 |
|
138 |
|
139 |
|
140 |
contains |
141 |
|
142 |
!! Sets up mpiComponentPlan with structure passed from C++. |
143 |
subroutine setupSimParallel(thisComponentPlan, nAtomTags, atomTags, & |
144 |
status) |
145 |
!! Passed Arguments |
146 |
!! mpiComponentPlan struct from C |
147 |
type (mpiComponentPlan), intent(inout) :: thisComponentPlan |
148 |
!! Number of tags passed |
149 |
integer, intent(in) :: nAtomTags |
150 |
!! Result status, 0 = normal, -1 = error |
151 |
integer, intent(out) :: status |
152 |
integer :: localStatus |
153 |
!! Global reference tag for local particles |
154 |
integer, dimension(nAtomTags), intent(inout) :: atomTags |
155 |
|
156 |
!write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, & |
157 |
! ' has atomTags(1) = ', atomTags(1) |
158 |
|
159 |
status = 0 |
160 |
if (componentPlanSet) then |
161 |
return |
162 |
endif |
163 |
componentPlanSet = .true. |
164 |
|
165 |
!! copy c component plan to fortran |
166 |
mpiSim = thisComponentPlan |
167 |
!write(*,*) "Seting up simParallel" |
168 |
|
169 |
call make_Force_Grid(mpiSim, localStatus) |
170 |
if (localStatus /= 0) then |
171 |
write(default_error,*) "Error creating force grid" |
172 |
status = -1 |
173 |
return |
174 |
endif |
175 |
|
176 |
call updateGridComponents(mpiSim, localStatus) |
177 |
if (localStatus /= 0) then |
178 |
write(default_error,*) "Error updating grid components" |
179 |
status = -1 |
180 |
return |
181 |
endif |
182 |
|
183 |
!! initialize gather and scatter plans used in this simulation |
184 |
call plan_gather_scatter(1, mpiSim%nAtomsLocal, & |
185 |
mpiSim, mpiSim%rowComm, plan_atom_row) |
186 |
call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, & |
187 |
mpiSim, mpiSim%rowComm, plan_atom_row_3d) |
188 |
call plan_gather_scatter(9, mpiSim%nAtomsLocal, & |
189 |
mpiSim, mpiSim%rowComm, plan_atom_row_Rotation) |
190 |
call plan_gather_scatter(1, mpiSim%nGroupsLocal, & |
191 |
mpiSim, mpiSim%rowComm, plan_group_row) |
192 |
call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, & |
193 |
mpiSim, mpiSim%rowComm, plan_group_row_3d) |
194 |
|
195 |
call plan_gather_scatter(1, mpiSim%nAtomsLocal, & |
196 |
mpiSim, mpiSim%columnComm, plan_atom_col) |
197 |
call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, & |
198 |
mpiSim, mpiSim%columnComm, plan_atom_col_3d) |
199 |
call plan_gather_scatter(9, mpiSim%nAtomsLocal, & |
200 |
mpiSim, mpiSim%columnComm, plan_atom_col_Rotation) |
201 |
call plan_gather_scatter(1, mpiSim%nGroupsLocal, & |
202 |
mpiSim, mpiSim%columnComm, plan_group_col) |
203 |
call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, & |
204 |
mpiSim, mpiSim%columnComm, plan_group_col_3d) |
205 |
|
206 |
! Initialize tags |
207 |
|
208 |
call setAtomTags(atomTags,localStatus) |
209 |
if (localStatus /= 0) then |
210 |
status = -1 |
211 |
return |
212 |
endif |
213 |
isSimSet = .true. |
214 |
|
215 |
! call printComponentPlan(mpiSim,0) |
216 |
end subroutine setupSimParallel |
217 |
|
218 |
subroutine replanSimParallel(thisComponentPlan,status) |
219 |
! Passed Arguments |
220 |
!! mpiComponentPlan struct from C |
221 |
type (mpiComponentPlan), intent(inout) :: thisComponentPlan |
222 |
integer, intent(out) :: status |
223 |
integer :: localStatus |
224 |
integer :: mpierror |
225 |
status = 0 |
226 |
|
227 |
call updateGridComponents(thisComponentPlan,localStatus) |
228 |
if (localStatus /= 0) then |
229 |
status = -1 |
230 |
return |
231 |
endif |
232 |
|
233 |
!! Unplan Gather Scatter plans |
234 |
call unplan_gather_scatter(plan_atom_row) |
235 |
call unplan_gather_scatter(plan_atom_row_3d) |
236 |
call unplan_gather_scatter(plan_atom_row_Rotation) |
237 |
call unplan_gather_scatter(plan_group_row) |
238 |
call unplan_gather_scatter(plan_group_row_3d) |
239 |
|
240 |
call unplan_gather_scatter(plan_atom_col) |
241 |
call unplan_gather_scatter(plan_atom_col_3d) |
242 |
call unplan_gather_scatter(plan_atom_col_Rotation) |
243 |
call unplan_gather_scatter(plan_group_col) |
244 |
call unplan_gather_scatter(plan_group_col_3d) |
245 |
|
246 |
!! initialize gather and scatter plans used in this simulation |
247 |
call plan_gather_scatter(1, mpiSim%nAtomsLocal, & |
248 |
mpiSim, mpiSim%rowComm, plan_atom_row) |
249 |
call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, & |
250 |
mpiSim, mpiSim%rowComm, plan_atom_row_3d) |
251 |
call plan_gather_scatter(9, mpiSim%nAtomsLocal, & |
252 |
mpiSim, mpiSim%rowComm, plan_atom_row_Rotation) |
253 |
call plan_gather_scatter(1, mpiSim%nGroupsLocal, & |
254 |
mpiSim, mpiSim%rowComm, plan_group_row) |
255 |
call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, & |
256 |
mpiSim, mpiSim%rowComm, plan_group_row_3d) |
257 |
|
258 |
call plan_gather_scatter(1, mpiSim%nAtomsLocal, & |
259 |
mpiSim, mpiSim%columnComm, plan_atom_col) |
260 |
call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, & |
261 |
mpiSim, mpiSim%columnComm, plan_atom_col_3d) |
262 |
call plan_gather_scatter(9, mpiSim%nAtomsLocal, & |
263 |
mpiSim, mpiSim%columnComm, plan_atom_col_Rotation) |
264 |
call plan_gather_scatter(1, mpiSim%nGroupsLocal, & |
265 |
mpiSim, mpiSim%columnComm, plan_group_col) |
266 |
call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, & |
267 |
mpiSim, mpiSim%columnComm, plan_group_col_3d) |
268 |
|
269 |
end subroutine replanSimParallel |
270 |
|
271 |
!! Updates number of row and column components for long range forces. |
272 |
subroutine updateGridComponents(thisComponentPlan, status) |
273 |
type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan |
274 |
|
275 |
!! Status return |
276 |
!! - 0 Success |
277 |
!! - -1 Failure |
278 |
integer, intent(out) :: status |
279 |
integer :: nAtomsLocal |
280 |
integer :: nAtomsInRow = 0 |
281 |
integer :: nAtomsInColumn = 0 |
282 |
integer :: nGroupsLocal |
283 |
integer :: nGroupsInRow = 0 |
284 |
integer :: nGroupsInColumn = 0 |
285 |
integer :: mpiErrors |
286 |
|
287 |
status = 0 |
288 |
if (.not. componentPlanSet) return |
289 |
|
290 |
if (thisComponentPlan%nAtomsLocal == 0) then |
291 |
status = -1 |
292 |
return |
293 |
endif |
294 |
if (thisComponentPlan%nGroupsLocal == 0) then |
295 |
write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal |
296 |
status = -1 |
297 |
return |
298 |
endif |
299 |
|
300 |
nAtomsLocal = thisComponentPlan%nAtomsLocal |
301 |
nGroupsLocal = thisComponentPlan%nGroupsLocal |
302 |
|
303 |
call mpi_allreduce(nAtomsLocal, nAtomsInRow, 1, mpi_integer, & |
304 |
mpi_sum, thisComponentPlan%rowComm, mpiErrors) |
305 |
if (mpiErrors /= 0) then |
306 |
status = -1 |
307 |
return |
308 |
endif |
309 |
|
310 |
call mpi_allreduce(nAtomsLocal, nAtomsInColumn, 1, mpi_integer, & |
311 |
mpi_sum, thisComponentPlan%columnComm, mpiErrors) |
312 |
if (mpiErrors /= 0) then |
313 |
status = -1 |
314 |
return |
315 |
endif |
316 |
|
317 |
call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, & |
318 |
mpi_sum, thisComponentPlan%rowComm, mpiErrors) |
319 |
if (mpiErrors /= 0) then |
320 |
status = -1 |
321 |
return |
322 |
endif |
323 |
|
324 |
call mpi_allreduce(nGroupsLocal, nGroupsInColumn, 1, mpi_integer, & |
325 |
mpi_sum, thisComponentPlan%columnComm, mpiErrors) |
326 |
if (mpiErrors /= 0) then |
327 |
status = -1 |
328 |
return |
329 |
endif |
330 |
|
331 |
thisComponentPlan%nAtomsInRow = nAtomsInRow |
332 |
thisComponentPlan%nAtomsInColumn = nAtomsInColumn |
333 |
thisComponentPlan%nGroupsInRow = nGroupsInRow |
334 |
thisComponentPlan%nGroupsInColumn = nGroupsInColumn |
335 |
|
336 |
end subroutine updateGridComponents |
337 |
|
338 |
|
339 |
!! Creates a square force decomposition of processors into row and column |
340 |
!! communicators. |
341 |
subroutine make_Force_Grid(thisComponentPlan,status) |
342 |
type (mpiComponentPlan) :: thisComponentPlan |
343 |
integer, intent(out) :: status !! status returns -1 if error |
344 |
integer :: nColumnsMax !! Maximum number of columns |
345 |
integer :: nWorldProcessors !! Total number of processors in World comm. |
346 |
integer :: rowIndex !! Row for this processor. |
347 |
integer :: columnIndex !! Column for this processor. |
348 |
integer :: nRows !! Total number of rows. |
349 |
integer :: nColumns !! Total number of columns. |
350 |
integer :: mpiErrors !! MPI errors. |
351 |
integer :: rowCommunicator !! MPI row communicator. |
352 |
integer :: columnCommunicator |
353 |
integer :: myWorldRank |
354 |
integer :: i |
355 |
|
356 |
|
357 |
if (.not. ComponentPlanSet) return |
358 |
status = 0 |
359 |
|
360 |
!! We make a dangerous assumption here that if numberProcessors is |
361 |
!! zero, then we need to get the information from MPI. |
362 |
if (thisComponentPlan%nProcessors == 0 ) then |
363 |
call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors) |
364 |
if ( mpiErrors /= 0 ) then |
365 |
status = -1 |
366 |
return |
367 |
endif |
368 |
call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors) |
369 |
if ( mpiErrors /= 0 ) then |
370 |
status = -1 |
371 |
return |
372 |
endif |
373 |
|
374 |
else |
375 |
nWorldProcessors = thisComponentPlan%nProcessors |
376 |
myWorldRank = thisComponentPlan%myNode |
377 |
endif |
378 |
|
379 |
nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp))) |
380 |
|
381 |
do i = 1, nColumnsMax |
382 |
if (mod(nWorldProcessors,i) == 0) nColumns = i |
383 |
end do |
384 |
|
385 |
nRows = nWorldProcessors/nColumns |
386 |
|
387 |
rowIndex = myWorldRank/nColumns |
388 |
|
389 |
call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors) |
390 |
if ( mpiErrors /= 0 ) then |
391 |
write(default_error,*) "MPI comm split failed at row communicator" |
392 |
status = -1 |
393 |
return |
394 |
endif |
395 |
|
396 |
columnIndex = mod(myWorldRank,nColumns) |
397 |
call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors) |
398 |
if ( mpiErrors /= 0 ) then |
399 |
write(default_error,*) "MPI comm split faild at columnCommunicator" |
400 |
status = -1 |
401 |
return |
402 |
endif |
403 |
|
404 |
! Set appropriate components of thisComponentPlan |
405 |
thisComponentPlan%rowComm = rowCommunicator |
406 |
thisComponentPlan%columnComm = columnCommunicator |
407 |
thisComponentPlan%rowIndex = rowIndex |
408 |
thisComponentPlan%columnIndex = columnIndex |
409 |
thisComponentPlan%nRows = nRows |
410 |
thisComponentPlan%nColumns = nColumns |
411 |
|
412 |
end subroutine make_Force_Grid |
413 |
|
414 |
!! initalizes a gather scatter plan |
415 |
subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, & |
416 |
thisComm, this_plan, status) |
417 |
integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan |
418 |
integer, intent(in) :: nObjects |
419 |
type (mpiComponentPlan), intent(in), target :: thisComponentPlan |
420 |
type (gs_plan), intent(out) :: this_plan !! MPI Component Plan |
421 |
integer, intent(in) :: thisComm !! MPI communicator for this plan |
422 |
|
423 |
integer :: arraySize !! size to allocate plan for |
424 |
integer, intent(out), optional :: status |
425 |
integer :: ierror |
426 |
integer :: i,junk |
427 |
|
428 |
if (present(status)) status = 0 |
429 |
|
430 |
!! Set gsComponentPlan pointer |
431 |
!! to the componet plan we want to use for this gather scatter plan. |
432 |
!! WARNING this could be dangerous since thisComponentPlan was origionally |
433 |
!! allocated in C++ and there is a significant difference between c and |
434 |
!! f95 pointers.... |
435 |
this_plan%gsComponentPlan => thisComponentPlan |
436 |
|
437 |
! Set this plan size for displs array. |
438 |
this_plan%gsPlanSize = nDim * nObjects |
439 |
|
440 |
! Duplicate communicator for this plan |
441 |
call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err) |
442 |
if (mpi_err /= 0) then |
443 |
if (present(status)) status = -1 |
444 |
return |
445 |
end if |
446 |
call mpi_comm_rank(this_plan%myPlanComm, this_plan%myPlanRank, mpi_err) |
447 |
if (mpi_err /= 0) then |
448 |
if (present(status)) status = -1 |
449 |
return |
450 |
end if |
451 |
|
452 |
call mpi_comm_size(this_plan%myPlanComm, this_plan%planNprocs, mpi_err) |
453 |
|
454 |
if (mpi_err /= 0) then |
455 |
if (present(status)) status = -1 |
456 |
return |
457 |
end if |
458 |
|
459 |
!! counts and displacements arrays are indexed from 0 to be compatable |
460 |
!! with MPI arrays. |
461 |
allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror) |
462 |
if (ierror /= 0) then |
463 |
if (present(status)) status = -1 |
464 |
return |
465 |
end if |
466 |
|
467 |
allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror) |
468 |
if (ierror /= 0) then |
469 |
if (present(status)) status = -1 |
470 |
return |
471 |
end if |
472 |
|
473 |
!! gather all the local sizes into a size # processors array. |
474 |
call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, & |
475 |
1,mpi_integer,thisComm,mpi_err) |
476 |
|
477 |
if (mpi_err /= 0) then |
478 |
if (present(status)) status = -1 |
479 |
return |
480 |
end if |
481 |
|
482 |
!! figure out the total number of particles in this plan |
483 |
this_plan%globalPlanSize = sum(this_plan%counts) |
484 |
|
485 |
!! initialize plan displacements. |
486 |
this_plan%displs(0) = 0 |
487 |
do i = 1, this_plan%planNprocs - 1,1 |
488 |
this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1) |
489 |
end do |
490 |
end subroutine plan_gather_scatter |
491 |
|
492 |
subroutine unplan_gather_scatter(this_plan) |
493 |
type (gs_plan), intent(inout) :: this_plan |
494 |
|
495 |
this_plan%gsComponentPlan => null() |
496 |
call mpi_comm_free(this_plan%myPlanComm,mpi_err) |
497 |
|
498 |
deallocate(this_plan%counts) |
499 |
deallocate(this_plan%displs) |
500 |
|
501 |
end subroutine unplan_gather_scatter |
502 |
|
503 |
subroutine gather_integer( sbuffer, rbuffer, this_plan, status) |
504 |
|
505 |
type (gs_plan), intent(inout) :: this_plan |
506 |
integer, dimension(:), intent(inout) :: sbuffer |
507 |
integer, dimension(:), intent(inout) :: rbuffer |
508 |
integer :: noffset |
509 |
integer, intent(out), optional :: status |
510 |
integer :: i |
511 |
|
512 |
if (present(status)) status = 0 |
513 |
noffset = this_plan%displs(this_plan%myPlanRank) |
514 |
|
515 |
! if (getmyNode() == 1) then |
516 |
! write(*,*) "Node 0 printing allgatherv vars" |
517 |
! write(*,*) "Noffset: ", noffset |
518 |
! write(*,*) "PlanSize: ", this_plan%gsPlanSize |
519 |
! write(*,*) "PlanComm: ", this_plan%myPlanComm |
520 |
! end if |
521 |
|
522 |
call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, & |
523 |
rbuffer,this_plan%counts,this_plan%displs,mpi_integer, & |
524 |
this_plan%myPlanComm, mpi_err) |
525 |
|
526 |
if (mpi_err /= 0) then |
527 |
if (present(status)) status = -1 |
528 |
endif |
529 |
|
530 |
end subroutine gather_integer |
531 |
|
532 |
subroutine gather_double( sbuffer, rbuffer, this_plan, status) |
533 |
|
534 |
type (gs_plan), intent(in) :: this_plan |
535 |
real( kind = DP ), dimension(:), intent(inout) :: sbuffer |
536 |
real( kind = DP ), dimension(:), intent(inout) :: rbuffer |
537 |
integer :: noffset |
538 |
integer, intent(out), optional :: status |
539 |
|
540 |
|
541 |
if (present(status)) status = 0 |
542 |
noffset = this_plan%displs(this_plan%myPlanRank) |
543 |
#ifdef PROFILE |
544 |
call cpu_time(commTimeInitial) |
545 |
#endif |
546 |
call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, & |
547 |
rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, & |
548 |
this_plan%myPlanComm, mpi_err) |
549 |
#ifdef PROFILE |
550 |
call cpu_time(commTimeFinal) |
551 |
commTime = commTime + commTimeFinal - commTimeInitial |
552 |
#endif |
553 |
|
554 |
if (mpi_err /= 0) then |
555 |
if (present(status)) status = -1 |
556 |
endif |
557 |
|
558 |
end subroutine gather_double |
559 |
|
560 |
subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status) |
561 |
|
562 |
type (gs_plan), intent(in) :: this_plan |
563 |
real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer |
564 |
real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer |
565 |
integer :: noffset,i,ierror |
566 |
integer, intent(out), optional :: status |
567 |
|
568 |
external mpi_allgatherv |
569 |
|
570 |
if (present(status)) status = 0 |
571 |
|
572 |
! noffset = this_plan%displs(this_plan%me) |
573 |
#ifdef PROFILE |
574 |
call cpu_time(commTimeInitial) |
575 |
#endif |
576 |
|
577 |
call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, & |
578 |
rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, & |
579 |
this_plan%myPlanComm, mpi_err) |
580 |
|
581 |
#ifdef PROFILE |
582 |
call cpu_time(commTimeFinal) |
583 |
commTime = commTime + commTimeFinal - commTimeInitial |
584 |
#endif |
585 |
|
586 |
if (mpi_err /= 0) then |
587 |
if (present(status)) status = -1 |
588 |
endif |
589 |
|
590 |
end subroutine gather_double_2d |
591 |
|
592 |
subroutine scatter_double( sbuffer, rbuffer, this_plan, status) |
593 |
|
594 |
type (gs_plan), intent(in) :: this_plan |
595 |
real( kind = DP ), dimension(:), intent(inout) :: sbuffer |
596 |
real( kind = DP ), dimension(:), intent(inout) :: rbuffer |
597 |
integer, intent(out), optional :: status |
598 |
external mpi_reduce_scatter |
599 |
|
600 |
if (present(status)) status = 0 |
601 |
|
602 |
#ifdef PROFILE |
603 |
call cpu_time(commTimeInitial) |
604 |
#endif |
605 |
call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, & |
606 |
mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err) |
607 |
#ifdef PROFILE |
608 |
call cpu_time(commTimeFinal) |
609 |
commTime = commTime + commTimeFinal - commTimeInitial |
610 |
#endif |
611 |
|
612 |
if (mpi_err /= 0) then |
613 |
if (present(status)) status = -1 |
614 |
endif |
615 |
|
616 |
end subroutine scatter_double |
617 |
|
618 |
subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status) |
619 |
|
620 |
type (gs_plan), intent(in) :: this_plan |
621 |
real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer |
622 |
real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer |
623 |
integer, intent(out), optional :: status |
624 |
external mpi_reduce_scatter |
625 |
|
626 |
if (present(status)) status = 0 |
627 |
#ifdef PROFILE |
628 |
call cpu_time(commTimeInitial) |
629 |
#endif |
630 |
|
631 |
call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, & |
632 |
mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err) |
633 |
#ifdef PROFILE |
634 |
call cpu_time(commTimeFinal) |
635 |
commTime = commTime + commTimeFinal - commTimeInitial |
636 |
#endif |
637 |
|
638 |
if (mpi_err /= 0) then |
639 |
if (present(status)) status = -1 |
640 |
endif |
641 |
|
642 |
end subroutine scatter_double_2d |
643 |
|
644 |
subroutine setAtomTags(tags, status) |
645 |
integer, dimension(:) :: tags |
646 |
integer :: status |
647 |
|
648 |
integer :: alloc_stat |
649 |
|
650 |
integer :: nAtomsInCol |
651 |
integer :: nAtomsInRow |
652 |
|
653 |
status = 0 |
654 |
! allocate row arrays |
655 |
nAtomsInRow = getNatomsInRow(plan_atom_row) |
656 |
nAtomsInCol = getNatomsInCol(plan_atom_col) |
657 |
|
658 |
if(.not. allocated(AtomLocalToGlobal)) then |
659 |
allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat) |
660 |
if (alloc_stat /= 0 ) then |
661 |
status = -1 |
662 |
return |
663 |
endif |
664 |
else |
665 |
deallocate(AtomLocalToGlobal) |
666 |
allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat) |
667 |
if (alloc_stat /= 0 ) then |
668 |
status = -1 |
669 |
return |
670 |
endif |
671 |
|
672 |
endif |
673 |
|
674 |
AtomLocalToGlobal = tags |
675 |
|
676 |
if (.not. allocated(AtomRowToGlobal)) then |
677 |
allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat) |
678 |
if (alloc_stat /= 0 ) then |
679 |
status = -1 |
680 |
return |
681 |
endif |
682 |
else |
683 |
deallocate(AtomRowToGlobal) |
684 |
allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat) |
685 |
if (alloc_stat /= 0 ) then |
686 |
status = -1 |
687 |
return |
688 |
endif |
689 |
|
690 |
endif |
691 |
! allocate column arrays |
692 |
if (.not. allocated(AtomColToGlobal)) then |
693 |
allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat) |
694 |
if (alloc_stat /= 0 ) then |
695 |
status = -1 |
696 |
return |
697 |
endif |
698 |
else |
699 |
deallocate(AtomColToGlobal) |
700 |
allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat) |
701 |
if (alloc_stat /= 0 ) then |
702 |
status = -1 |
703 |
return |
704 |
endif |
705 |
endif |
706 |
|
707 |
call gather(tags, AtomRowToGlobal, plan_atom_row) |
708 |
call gather(tags, AtomColToGlobal, plan_atom_col) |
709 |
|
710 |
end subroutine setAtomTags |
711 |
|
712 |
function getNatomsInCol(thisplan) result(nInCol) |
713 |
type (gs_plan), intent(in) :: thisplan |
714 |
integer :: nInCol |
715 |
nInCol = thisplan%gsComponentPlan%nAtomsInColumn |
716 |
end function getNatomsInCol |
717 |
|
718 |
function getNatomsInRow(thisplan) result(nInRow) |
719 |
type (gs_plan), intent(in) :: thisplan |
720 |
integer :: nInRow |
721 |
nInRow = thisplan%gsComponentPlan%nAtomsInRow |
722 |
end function getNatomsInRow |
723 |
|
724 |
function getNgroupsInCol(thisplan) result(nInCol) |
725 |
type (gs_plan), intent(in) :: thisplan |
726 |
integer :: nInCol |
727 |
nInCol = thisplan%gsComponentPlan%nGroupsInColumn |
728 |
end function getNgroupsInCol |
729 |
|
730 |
function getNgroupsInRow(thisplan) result(nInRow) |
731 |
type (gs_plan), intent(in) :: thisplan |
732 |
integer :: nInRow |
733 |
nInRow = thisplan%gsComponentPlan%nGroupsInRow |
734 |
end function getNgroupsInRow |
735 |
|
736 |
function isMPISimSet() result(isthisSimSet) |
737 |
logical :: isthisSimSet |
738 |
if (isSimSet) then |
739 |
isthisSimSet = .true. |
740 |
else |
741 |
isthisSimSet = .false. |
742 |
endif |
743 |
end function isMPISimSet |
744 |
|
745 |
subroutine printComponentPlan(this_plan,printNode) |
746 |
|
747 |
type (mpiComponentPlan), intent(in) :: this_plan |
748 |
integer, optional :: printNode |
749 |
logical :: print_me = .false. |
750 |
|
751 |
if (present(printNode)) then |
752 |
if (printNode == mpiSim%myNode) print_me = .true. |
753 |
else |
754 |
print_me = .true. |
755 |
endif |
756 |
|
757 |
if (print_me) then |
758 |
write(default_error,*) "SetupSimParallel: writing component plan" |
759 |
|
760 |
write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal |
761 |
write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal |
762 |
write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal |
763 |
write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal |
764 |
write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal |
765 |
write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal |
766 |
write(default_error,*) "myNode: ", mpiSim%myNode |
767 |
write(default_error,*) "nProcessors: ", mpiSim%nProcessors |
768 |
write(default_error,*) "rowComm: ", mpiSim%rowComm |
769 |
write(default_error,*) "columnComm: ", mpiSim%columnComm |
770 |
write(default_error,*) "nRows: ", mpiSim%nRows |
771 |
write(default_error,*) "nColumns: ", mpiSim%nColumns |
772 |
write(default_error,*) "nAtomsInRow: ", mpiSim%nAtomsInRow |
773 |
write(default_error,*) "nAtomsInColumn: ", mpiSim%nAtomsInColumn |
774 |
write(default_error,*) "rowIndex: ", mpiSim%rowIndex |
775 |
write(default_error,*) "columnIndex: ", mpiSim%columnIndex |
776 |
endif |
777 |
end subroutine printComponentPlan |
778 |
|
779 |
function getMyNode() result(myNode) |
780 |
integer :: myNode |
781 |
myNode = mpiSim%myNode |
782 |
end function getMyNode |
783 |
|
784 |
#ifdef PROFILE |
785 |
subroutine printCommTime() |
786 |
write(*,*) "MPI communication time is: ", commTime |
787 |
end subroutine printCommTime |
788 |
|
789 |
function getCommTime() result(comm_time) |
790 |
real :: comm_time |
791 |
comm_time = commTime |
792 |
end function getCommTime |
793 |
|
794 |
#endif |
795 |
|
796 |
#endif // is_mpi |
797 |
end module mpiSimulation |
798 |
|
799 |
|