ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE-2.0/src/UseTheForce/DarkSide/mpiSimulation_module.F90
Revision: 1526
Committed: Tue Oct 5 19:37:38 2004 UTC (19 years, 8 months ago) by tim
File size: 27076 byte(s)
Log Message:
move mpiSimulation_module to DarkSide

File Contents

# Content
1
2
3 !! MPI support for long range forces using force decomposition
4 !! on a square grid of processors.
5 !! Corresponds to mpiSimunation.cpp for C++
6 !! mpi_module also contains a private interface for mpi f90 routines.
7 !!
8 !! @author Charles F. Vardeman II
9 !! @author Matthew Meineke
10 !! @version $Id: mpiSimulation_module.F90,v 1.1 2004-10-05 19:37:38 tim Exp $, $Date: 2004-10-05 19:37:38 $, $Name: not supported by cvs2svn $, $Revision: 1.1 $
11
12 module mpiSimulation
13 use definitions
14 #ifdef IS_MPI
15 use oopseMPI
16 implicit none
17 PRIVATE
18
19
20 !! PUBLIC Subroutines contained in this module
21 !! gather and scatter are a generic interface
22 !! to gather and scatter routines
23 public :: gather, scatter
24 public :: setupSimParallel
25 public :: replanSimParallel
26 public :: getNatomsInCol
27 public :: getNatomsInRow
28 public :: getNgroupsInCol
29 public :: getNgroupsInRow
30 public :: isMPISimSet
31 public :: printComponentPlan
32 public :: getMyNode
33
34 !! PUBLIC Subroutines contained in MPI module
35 public :: mpi_bcast
36 public :: mpi_allreduce
37 ! public :: mpi_reduce
38 public :: mpi_send
39 public :: mpi_recv
40 public :: mpi_get_processor_name
41 public :: mpi_finalize
42
43 !! PUBLIC mpi variables
44 public :: mpi_comm_world
45 public :: mpi_character
46 public :: mpi_integer
47 public :: mpi_double_precision
48 public :: mpi_sum
49 public :: mpi_max
50 public :: mpi_status_size
51 public :: mpi_any_source
52
53
54
55 !! Safety logical to prevent access to ComponetPlan until
56 !! set by C++.
57 logical, save :: ComponentPlanSet = .false.
58
59
60 !! generic mpi error declaration.
61 integer, public :: mpi_err
62
63 #ifdef PROFILE
64 public :: printCommTime
65 public :: getCommTime
66 real,save :: commTime = 0.0
67 real :: commTimeInitial,commTimeFinal
68 #endif
69
70 !! Include mpiComponentPlan type. mpiComponentPlan is a
71 !! dual header file for both c and fortran.
72 #define __FORTRAN90
73 #include "UseTheForce/mpiComponentPlan.h"
74
75
76 !! Tags used during force loop for parallel simulation
77 integer, public, allocatable, dimension(:) :: AtomLocalToGlobal
78 integer, public, allocatable, dimension(:) :: AtomRowToGlobal
79 integer, public, allocatable, dimension(:) :: AtomColToGlobal
80 integer, public, allocatable, dimension(:) :: GroupLocalToGlobal
81 integer, public, allocatable, dimension(:) :: GroupRowToGlobal
82 integer, public, allocatable, dimension(:) :: GroupColToGlobal
83
84 !! Logical set true if mpiSimulation has been initialized
85 logical, save :: isSimSet = .false.
86
87
88 type (mpiComponentPlan), save :: mpiSim
89
90 !! gs_plan contains plans for gather and scatter routines
91 type, public :: gs_plan
92 private
93 type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
94 integer :: gsPlanSize !! size of this plan (nDim*nComponents)
95 integer :: globalPlanSize !! size of all components in plan
96 integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
97 integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
98 integer :: myPlanComm !! My communicator for this plan
99 integer :: myPlanRank !! My rank in this plan
100 integer :: planNprocs !! Number of processors in this plan
101 end type gs_plan
102
103 ! plans for different decompositions
104 type (gs_plan), public, save :: plan_atom_row
105 type (gs_plan), public, save :: plan_atom_row_3d
106 type (gs_plan), public, save :: plan_atom_col
107 type (gs_plan), public, save :: plan_atom_col_3d
108 type (gs_plan), public, save :: plan_atom_row_Rotation
109 type (gs_plan), public, save :: plan_atom_col_Rotation
110 type (gs_plan), public, save :: plan_group_row
111 type (gs_plan), public, save :: plan_group_col
112 type (gs_plan), public, save :: plan_group_row_3d
113 type (gs_plan), public, save :: plan_group_col_3d
114
115 type (mpiComponentPlan), pointer :: simComponentPlan
116
117 ! interface for gather scatter routines
118 !! Generic interface for gather.
119 !! Gathers an local array into row or column array
120 !! Interface provided for integer, double and double
121 !! rank 2 arrays.
122 interface gather
123 module procedure gather_integer
124 module procedure gather_double
125 module procedure gather_double_2d
126 end interface
127
128 !! Generic interface for scatter.
129 !! Scatters a row or column array, adding componets
130 !! and reducing them to a local nComponent array.
131 !! Interface provided for double and double rank=2 arrays.
132
133 interface scatter
134 module procedure scatter_double
135 module procedure scatter_double_2d
136 end interface
137
138
139
140 contains
141
142 !! Sets up mpiComponentPlan with structure passed from C++.
143 subroutine setupSimParallel(thisComponentPlan, nAtomTags, atomTags, &
144 nGroupTags, groupTags, status)
145 !! Passed Arguments
146 !! mpiComponentPlan struct from C
147 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
148 !! Number of tags passed
149 integer, intent(in) :: nAtomTags, nGroupTags
150 !! Result status, 0 = normal, -1 = error
151 integer, intent(out) :: status
152 integer :: localStatus
153 !! Global reference tag for local particles
154 integer, dimension(nAtomTags), intent(inout) :: atomTags
155 integer, dimension(nGroupTags), intent(inout) :: groupTags
156
157 !write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, &
158 ! ' has atomTags(1) = ', atomTags(1)
159
160 status = 0
161 if (componentPlanSet) then
162 return
163 endif
164 componentPlanSet = .true.
165
166 !! copy c component plan to fortran
167 mpiSim = thisComponentPlan
168 !write(*,*) "Seting up simParallel"
169
170 call make_Force_Grid(mpiSim, localStatus)
171 if (localStatus /= 0) then
172 write(default_error,*) "Error creating force grid"
173 status = -1
174 return
175 endif
176
177 call updateGridComponents(mpiSim, localStatus)
178 if (localStatus /= 0) then
179 write(default_error,*) "Error updating grid components"
180 status = -1
181 return
182 endif
183
184 !! initialize gather and scatter plans used in this simulation
185 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
186 mpiSim, mpiSim%rowComm, plan_atom_row)
187 call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
188 mpiSim, mpiSim%rowComm, plan_atom_row_3d)
189 call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
190 mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
191 call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
192 mpiSim, mpiSim%rowComm, plan_group_row)
193 call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
194 mpiSim, mpiSim%rowComm, plan_group_row_3d)
195
196 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
197 mpiSim, mpiSim%columnComm, plan_atom_col)
198 call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
199 mpiSim, mpiSim%columnComm, plan_atom_col_3d)
200 call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
201 mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
202 call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
203 mpiSim, mpiSim%columnComm, plan_group_col)
204 call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
205 mpiSim, mpiSim%columnComm, plan_group_col_3d)
206
207 ! Initialize tags
208
209 call setAtomTags(atomTags,localStatus)
210 if (localStatus /= 0) then
211 status = -1
212 return
213 endif
214
215
216 call setGroupTags(groupTags,localStatus)
217 if (localStatus /= 0) then
218 status = -1
219 return
220 endif
221
222 isSimSet = .true.
223
224 ! call printComponentPlan(mpiSim,0)
225 end subroutine setupSimParallel
226
227 subroutine replanSimParallel(thisComponentPlan,status)
228 ! Passed Arguments
229 !! mpiComponentPlan struct from C
230 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
231 integer, intent(out) :: status
232 integer :: localStatus
233 integer :: mpierror
234 status = 0
235
236 call updateGridComponents(thisComponentPlan,localStatus)
237 if (localStatus /= 0) then
238 status = -1
239 return
240 endif
241
242 !! Unplan Gather Scatter plans
243 call unplan_gather_scatter(plan_atom_row)
244 call unplan_gather_scatter(plan_atom_row_3d)
245 call unplan_gather_scatter(plan_atom_row_Rotation)
246 call unplan_gather_scatter(plan_group_row)
247 call unplan_gather_scatter(plan_group_row_3d)
248
249 call unplan_gather_scatter(plan_atom_col)
250 call unplan_gather_scatter(plan_atom_col_3d)
251 call unplan_gather_scatter(plan_atom_col_Rotation)
252 call unplan_gather_scatter(plan_group_col)
253 call unplan_gather_scatter(plan_group_col_3d)
254
255 !! initialize gather and scatter plans used in this simulation
256 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
257 mpiSim, mpiSim%rowComm, plan_atom_row)
258 call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
259 mpiSim, mpiSim%rowComm, plan_atom_row_3d)
260 call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
261 mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
262 call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
263 mpiSim, mpiSim%rowComm, plan_group_row)
264 call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
265 mpiSim, mpiSim%rowComm, plan_group_row_3d)
266
267 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
268 mpiSim, mpiSim%columnComm, plan_atom_col)
269 call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
270 mpiSim, mpiSim%columnComm, plan_atom_col_3d)
271 call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
272 mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
273 call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
274 mpiSim, mpiSim%columnComm, plan_group_col)
275 call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
276 mpiSim, mpiSim%columnComm, plan_group_col_3d)
277
278 end subroutine replanSimParallel
279
280 !! Updates number of row and column components for long range forces.
281 subroutine updateGridComponents(thisComponentPlan, status)
282 type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
283
284 !! Status return
285 !! - 0 Success
286 !! - -1 Failure
287 integer, intent(out) :: status
288 integer :: nAtomsLocal
289 integer :: nAtomsInRow = 0
290 integer :: nAtomsInColumn = 0
291 integer :: nGroupsLocal
292 integer :: nGroupsInRow = 0
293 integer :: nGroupsInColumn = 0
294 integer :: mpiErrors
295
296 status = 0
297 if (.not. componentPlanSet) return
298
299 if (thisComponentPlan%nAtomsLocal == 0) then
300 status = -1
301 return
302 endif
303 if (thisComponentPlan%nGroupsLocal == 0) then
304 write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal
305 status = -1
306 return
307 endif
308
309 nAtomsLocal = thisComponentPlan%nAtomsLocal
310 nGroupsLocal = thisComponentPlan%nGroupsLocal
311
312 call mpi_allreduce(nAtomsLocal, nAtomsInRow, 1, mpi_integer, &
313 mpi_sum, thisComponentPlan%rowComm, mpiErrors)
314 if (mpiErrors /= 0) then
315 status = -1
316 return
317 endif
318
319 call mpi_allreduce(nAtomsLocal, nAtomsInColumn, 1, mpi_integer, &
320 mpi_sum, thisComponentPlan%columnComm, mpiErrors)
321 if (mpiErrors /= 0) then
322 status = -1
323 return
324 endif
325
326 call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, &
327 mpi_sum, thisComponentPlan%rowComm, mpiErrors)
328 if (mpiErrors /= 0) then
329 status = -1
330 return
331 endif
332
333 call mpi_allreduce(nGroupsLocal, nGroupsInColumn, 1, mpi_integer, &
334 mpi_sum, thisComponentPlan%columnComm, mpiErrors)
335 if (mpiErrors /= 0) then
336 status = -1
337 return
338 endif
339
340 thisComponentPlan%nAtomsInRow = nAtomsInRow
341 thisComponentPlan%nAtomsInColumn = nAtomsInColumn
342 thisComponentPlan%nGroupsInRow = nGroupsInRow
343 thisComponentPlan%nGroupsInColumn = nGroupsInColumn
344
345 end subroutine updateGridComponents
346
347
348 !! Creates a square force decomposition of processors into row and column
349 !! communicators.
350 subroutine make_Force_Grid(thisComponentPlan,status)
351 type (mpiComponentPlan) :: thisComponentPlan
352 integer, intent(out) :: status !! status returns -1 if error
353 integer :: nColumnsMax !! Maximum number of columns
354 integer :: nWorldProcessors !! Total number of processors in World comm.
355 integer :: rowIndex !! Row for this processor.
356 integer :: columnIndex !! Column for this processor.
357 integer :: nRows !! Total number of rows.
358 integer :: nColumns !! Total number of columns.
359 integer :: mpiErrors !! MPI errors.
360 integer :: rowCommunicator !! MPI row communicator.
361 integer :: columnCommunicator
362 integer :: myWorldRank
363 integer :: i
364
365
366 if (.not. ComponentPlanSet) return
367 status = 0
368
369 !! We make a dangerous assumption here that if numberProcessors is
370 !! zero, then we need to get the information from MPI.
371 if (thisComponentPlan%nProcessors == 0 ) then
372 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
373 if ( mpiErrors /= 0 ) then
374 status = -1
375 return
376 endif
377 call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
378 if ( mpiErrors /= 0 ) then
379 status = -1
380 return
381 endif
382
383 else
384 nWorldProcessors = thisComponentPlan%nProcessors
385 myWorldRank = thisComponentPlan%myNode
386 endif
387
388 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
389
390 do i = 1, nColumnsMax
391 if (mod(nWorldProcessors,i) == 0) nColumns = i
392 end do
393
394 nRows = nWorldProcessors/nColumns
395
396 rowIndex = myWorldRank/nColumns
397
398 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
399 if ( mpiErrors /= 0 ) then
400 write(default_error,*) "MPI comm split failed at row communicator"
401 status = -1
402 return
403 endif
404
405 columnIndex = mod(myWorldRank,nColumns)
406 call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
407 if ( mpiErrors /= 0 ) then
408 write(default_error,*) "MPI comm split faild at columnCommunicator"
409 status = -1
410 return
411 endif
412
413 ! Set appropriate components of thisComponentPlan
414 thisComponentPlan%rowComm = rowCommunicator
415 thisComponentPlan%columnComm = columnCommunicator
416 thisComponentPlan%rowIndex = rowIndex
417 thisComponentPlan%columnIndex = columnIndex
418 thisComponentPlan%nRows = nRows
419 thisComponentPlan%nColumns = nColumns
420
421 end subroutine make_Force_Grid
422
423 !! initalizes a gather scatter plan
424 subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, &
425 thisComm, this_plan, status)
426 integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
427 integer, intent(in) :: nObjects
428 type (mpiComponentPlan), intent(in), target :: thisComponentPlan
429 type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
430 integer, intent(in) :: thisComm !! MPI communicator for this plan
431
432 integer :: arraySize !! size to allocate plan for
433 integer, intent(out), optional :: status
434 integer :: ierror
435 integer :: i,junk
436
437 if (present(status)) status = 0
438
439 !! Set gsComponentPlan pointer
440 !! to the componet plan we want to use for this gather scatter plan.
441 !! WARNING this could be dangerous since thisComponentPlan was origionally
442 !! allocated in C++ and there is a significant difference between c and
443 !! f95 pointers....
444 this_plan%gsComponentPlan => thisComponentPlan
445
446 ! Set this plan size for displs array.
447 this_plan%gsPlanSize = nDim * nObjects
448
449 ! Duplicate communicator for this plan
450 call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err)
451 if (mpi_err /= 0) then
452 if (present(status)) status = -1
453 return
454 end if
455 call mpi_comm_rank(this_plan%myPlanComm, this_plan%myPlanRank, mpi_err)
456 if (mpi_err /= 0) then
457 if (present(status)) status = -1
458 return
459 end if
460
461 call mpi_comm_size(this_plan%myPlanComm, this_plan%planNprocs, mpi_err)
462
463 if (mpi_err /= 0) then
464 if (present(status)) status = -1
465 return
466 end if
467
468 !! counts and displacements arrays are indexed from 0 to be compatable
469 !! with MPI arrays.
470 allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
471 if (ierror /= 0) then
472 if (present(status)) status = -1
473 return
474 end if
475
476 allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
477 if (ierror /= 0) then
478 if (present(status)) status = -1
479 return
480 end if
481
482 !! gather all the local sizes into a size # processors array.
483 call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
484 1,mpi_integer,thisComm,mpi_err)
485
486 if (mpi_err /= 0) then
487 if (present(status)) status = -1
488 return
489 end if
490
491 !! figure out the total number of particles in this plan
492 this_plan%globalPlanSize = sum(this_plan%counts)
493
494 !! initialize plan displacements.
495 this_plan%displs(0) = 0
496 do i = 1, this_plan%planNprocs - 1,1
497 this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
498 end do
499 end subroutine plan_gather_scatter
500
501 subroutine unplan_gather_scatter(this_plan)
502 type (gs_plan), intent(inout) :: this_plan
503
504 this_plan%gsComponentPlan => null()
505 call mpi_comm_free(this_plan%myPlanComm,mpi_err)
506
507 deallocate(this_plan%counts)
508 deallocate(this_plan%displs)
509
510 end subroutine unplan_gather_scatter
511
512 subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
513
514 type (gs_plan), intent(inout) :: this_plan
515 integer, dimension(:), intent(inout) :: sbuffer
516 integer, dimension(:), intent(inout) :: rbuffer
517 integer :: noffset
518 integer, intent(out), optional :: status
519 integer :: i
520
521 if (present(status)) status = 0
522 noffset = this_plan%displs(this_plan%myPlanRank)
523
524 ! if (getmyNode() == 1) then
525 ! write(*,*) "Node 0 printing allgatherv vars"
526 ! write(*,*) "Noffset: ", noffset
527 ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
528 ! write(*,*) "PlanComm: ", this_plan%myPlanComm
529 ! end if
530
531 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
532 rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
533 this_plan%myPlanComm, mpi_err)
534
535 if (mpi_err /= 0) then
536 if (present(status)) status = -1
537 endif
538
539 end subroutine gather_integer
540
541 subroutine gather_double( sbuffer, rbuffer, this_plan, status)
542
543 type (gs_plan), intent(in) :: this_plan
544 real( kind = DP ), dimension(:), intent(inout) :: sbuffer
545 real( kind = DP ), dimension(:), intent(inout) :: rbuffer
546 integer :: noffset
547 integer, intent(out), optional :: status
548
549
550 if (present(status)) status = 0
551 noffset = this_plan%displs(this_plan%myPlanRank)
552 #ifdef PROFILE
553 call cpu_time(commTimeInitial)
554 #endif
555 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
556 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
557 this_plan%myPlanComm, mpi_err)
558 #ifdef PROFILE
559 call cpu_time(commTimeFinal)
560 commTime = commTime + commTimeFinal - commTimeInitial
561 #endif
562
563 if (mpi_err /= 0) then
564 if (present(status)) status = -1
565 endif
566
567 end subroutine gather_double
568
569 subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
570
571 type (gs_plan), intent(in) :: this_plan
572 real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
573 real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
574 integer :: noffset,i,ierror
575 integer, intent(out), optional :: status
576
577 external mpi_allgatherv
578
579 if (present(status)) status = 0
580
581 ! noffset = this_plan%displs(this_plan%me)
582 #ifdef PROFILE
583 call cpu_time(commTimeInitial)
584 #endif
585
586 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
587 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
588 this_plan%myPlanComm, mpi_err)
589
590 #ifdef PROFILE
591 call cpu_time(commTimeFinal)
592 commTime = commTime + commTimeFinal - commTimeInitial
593 #endif
594
595 if (mpi_err /= 0) then
596 if (present(status)) status = -1
597 endif
598
599 end subroutine gather_double_2d
600
601 subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
602
603 type (gs_plan), intent(in) :: this_plan
604 real( kind = DP ), dimension(:), intent(inout) :: sbuffer
605 real( kind = DP ), dimension(:), intent(inout) :: rbuffer
606 integer, intent(out), optional :: status
607 external mpi_reduce_scatter
608
609 if (present(status)) status = 0
610
611 #ifdef PROFILE
612 call cpu_time(commTimeInitial)
613 #endif
614 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
615 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
616 #ifdef PROFILE
617 call cpu_time(commTimeFinal)
618 commTime = commTime + commTimeFinal - commTimeInitial
619 #endif
620
621 if (mpi_err /= 0) then
622 if (present(status)) status = -1
623 endif
624
625 end subroutine scatter_double
626
627 subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
628
629 type (gs_plan), intent(in) :: this_plan
630 real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
631 real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
632 integer, intent(out), optional :: status
633 external mpi_reduce_scatter
634
635 if (present(status)) status = 0
636 #ifdef PROFILE
637 call cpu_time(commTimeInitial)
638 #endif
639
640 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
641 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
642 #ifdef PROFILE
643 call cpu_time(commTimeFinal)
644 commTime = commTime + commTimeFinal - commTimeInitial
645 #endif
646
647 if (mpi_err /= 0) then
648 if (present(status)) status = -1
649 endif
650
651 end subroutine scatter_double_2d
652
653 subroutine setAtomTags(tags, status)
654 integer, dimension(:) :: tags
655 integer :: status
656
657 integer :: alloc_stat
658
659 integer :: nAtomsInCol
660 integer :: nAtomsInRow
661
662 status = 0
663 ! allocate row arrays
664 nAtomsInRow = getNatomsInRow(plan_atom_row)
665 nAtomsInCol = getNatomsInCol(plan_atom_col)
666
667 if(.not. allocated(AtomLocalToGlobal)) then
668 allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
669 if (alloc_stat /= 0 ) then
670 status = -1
671 return
672 endif
673 else
674 deallocate(AtomLocalToGlobal)
675 allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
676 if (alloc_stat /= 0 ) then
677 status = -1
678 return
679 endif
680
681 endif
682
683 AtomLocalToGlobal = tags
684
685 if (.not. allocated(AtomRowToGlobal)) then
686 allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
687 if (alloc_stat /= 0 ) then
688 status = -1
689 return
690 endif
691 else
692 deallocate(AtomRowToGlobal)
693 allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
694 if (alloc_stat /= 0 ) then
695 status = -1
696 return
697 endif
698
699 endif
700 ! allocate column arrays
701 if (.not. allocated(AtomColToGlobal)) then
702 allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
703 if (alloc_stat /= 0 ) then
704 status = -1
705 return
706 endif
707 else
708 deallocate(AtomColToGlobal)
709 allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
710 if (alloc_stat /= 0 ) then
711 status = -1
712 return
713 endif
714 endif
715
716 call gather(tags, AtomRowToGlobal, plan_atom_row)
717 call gather(tags, AtomColToGlobal, plan_atom_col)
718
719 end subroutine setAtomTags
720
721 subroutine setGroupTags(tags, status)
722 integer, dimension(:) :: tags
723 integer :: status
724
725 integer :: alloc_stat
726
727 integer :: nGroupsInCol
728 integer :: nGroupsInRow
729
730 status = 0
731
732 nGroupsInRow = getNgroupsInRow(plan_group_row)
733 nGroupsInCol = getNgroupsInCol(plan_group_col)
734
735 if(allocated(GroupLocalToGlobal)) then
736 deallocate(GroupLocalToGlobal)
737 endif
738 allocate(GroupLocalToGlobal(size(tags)),STAT=alloc_stat)
739 if (alloc_stat /= 0 ) then
740 status = -1
741 return
742 endif
743
744 GroupLocalToGlobal = tags
745
746 if(allocated(GroupRowToGlobal)) then
747 deallocate(GroupRowToGlobal)
748 endif
749 allocate(GroupRowToGlobal(nGroupsInRow),STAT=alloc_stat)
750 if (alloc_stat /= 0 ) then
751 status = -1
752 return
753 endif
754
755 if(allocated(GroupColToGlobal)) then
756 deallocate(GroupColToGlobal)
757 endif
758 allocate(GroupColToGlobal(nGroupsInCol),STAT=alloc_stat)
759 if (alloc_stat /= 0 ) then
760 status = -1
761 return
762 endif
763
764 call gather(tags, GroupRowToGlobal, plan_group_row)
765 call gather(tags, GroupColToGlobal, plan_group_col)
766
767 end subroutine setGroupTags
768
769 function getNatomsInCol(thisplan) result(nInCol)
770 type (gs_plan), intent(in) :: thisplan
771 integer :: nInCol
772 nInCol = thisplan%gsComponentPlan%nAtomsInColumn
773 end function getNatomsInCol
774
775 function getNatomsInRow(thisplan) result(nInRow)
776 type (gs_plan), intent(in) :: thisplan
777 integer :: nInRow
778 nInRow = thisplan%gsComponentPlan%nAtomsInRow
779 end function getNatomsInRow
780
781 function getNgroupsInCol(thisplan) result(nInCol)
782 type (gs_plan), intent(in) :: thisplan
783 integer :: nInCol
784 nInCol = thisplan%gsComponentPlan%nGroupsInColumn
785 end function getNgroupsInCol
786
787 function getNgroupsInRow(thisplan) result(nInRow)
788 type (gs_plan), intent(in) :: thisplan
789 integer :: nInRow
790 nInRow = thisplan%gsComponentPlan%nGroupsInRow
791 end function getNgroupsInRow
792
793 function isMPISimSet() result(isthisSimSet)
794 logical :: isthisSimSet
795 if (isSimSet) then
796 isthisSimSet = .true.
797 else
798 isthisSimSet = .false.
799 endif
800 end function isMPISimSet
801
802 subroutine printComponentPlan(this_plan,printNode)
803
804 type (mpiComponentPlan), intent(in) :: this_plan
805 integer, optional :: printNode
806 logical :: print_me = .false.
807
808 if (present(printNode)) then
809 if (printNode == mpiSim%myNode) print_me = .true.
810 else
811 print_me = .true.
812 endif
813
814 if (print_me) then
815 write(default_error,*) "SetupSimParallel: writing component plan"
816
817 write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
818 write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
819 write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
820 write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
821 write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
822 write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal
823 write(default_error,*) "myNode: ", mpiSim%myNode
824 write(default_error,*) "nProcessors: ", mpiSim%nProcessors
825 write(default_error,*) "rowComm: ", mpiSim%rowComm
826 write(default_error,*) "columnComm: ", mpiSim%columnComm
827 write(default_error,*) "nRows: ", mpiSim%nRows
828 write(default_error,*) "nColumns: ", mpiSim%nColumns
829 write(default_error,*) "nAtomsInRow: ", mpiSim%nAtomsInRow
830 write(default_error,*) "nAtomsInColumn: ", mpiSim%nAtomsInColumn
831 write(default_error,*) "rowIndex: ", mpiSim%rowIndex
832 write(default_error,*) "columnIndex: ", mpiSim%columnIndex
833 endif
834 end subroutine printComponentPlan
835
836 function getMyNode() result(myNode)
837 integer :: myNode
838 myNode = mpiSim%myNode
839 end function getMyNode
840
841 #ifdef PROFILE
842 subroutine printCommTime()
843 write(*,*) "MPI communication time is: ", commTime
844 end subroutine printCommTime
845
846 function getCommTime() result(comm_time)
847 real :: comm_time
848 comm_time = commTime
849 end function getCommTime
850
851 #endif
852
853 #endif // is_mpi
854 end module mpiSimulation
855
856