ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE-4/src/UseTheForce/DarkSide/simParallel.F90
Revision: 2682
Committed: Mon Apr 3 15:37:43 2006 UTC (18 years, 3 months ago) by chuckv
File size: 29348 byte(s)
Log Message:
Added status module to simParallel for error reporting.

File Contents

# Content
1 !!
2 !! Copyright (c) 2005 The University of Notre Dame. All Rights Reserved.
3 !!
4 !! The University of Notre Dame grants you ("Licensee") a
5 !! non-exclusive, royalty free, license to use, modify and
6 !! redistribute this software in source and binary code form, provided
7 !! that the following conditions are met:
8 !!
9 !! 1. Acknowledgement of the program authors must be made in any
10 !! publication of scientific results based in part on use of the
11 !! program. An acceptable form of acknowledgement is citation of
12 !! the article in which the program was described (Matthew
13 !! A. Meineke, Charles F. Vardeman II, Teng Lin, Christopher
14 !! J. Fennell and J. Daniel Gezelter, "OOPSE: An Object-Oriented
15 !! Parallel Simulation Engine for Molecular Dynamics,"
16 !! J. Comput. Chem. 26, pp. 252-271 (2005))
17 !!
18 !! 2. Redistributions of source code must retain the above copyright
19 !! notice, this list of conditions and the following disclaimer.
20 !!
21 !! 3. Redistributions in binary form must reproduce the above copyright
22 !! notice, this list of conditions and the following disclaimer in the
23 !! documentation and/or other materials provided with the
24 !! distribution.
25 !!
26 !! This software is provided "AS IS," without a warranty of any
27 !! kind. All express or implied conditions, representations and
28 !! warranties, including any implied warranty of merchantability,
29 !! fitness for a particular purpose or non-infringement, are hereby
30 !! excluded. The University of Notre Dame and its licensors shall not
31 !! be liable for any damages suffered by licensee as a result of
32 !! using, modifying or distributing the software or its
33 !! derivatives. In no event will the University of Notre Dame or its
34 !! licensors be liable for any lost revenue, profit or data, or for
35 !! direct, indirect, special, consequential, incidental or punitive
36 !! damages, however caused and regardless of the theory of liability,
37 !! arising out of the use of or inability to use software, even if the
38 !! University of Notre Dame has been advised of the possibility of
39 !! such damages.
40 !!
41
42
43 !! MPI support for long range forces using force decomposition
44 !! on a square grid of processors.
45 !! Corresponds to mpiSimunation.cpp for C++
46 !! mpi_module also contains a private interface for mpi f90 routines.
47 !!
48 !! @author Charles F. Vardeman II
49 !! @author Matthew Meineke
50 !! @version $Id: simParallel.F90,v 1.6 2006-04-03 15:37:43 chuckv Exp $, $Date: 2006-04-03 15:37:43 $, $Name: not supported by cvs2svn $, $Revision: 1.6 $
51
52 module mpiSimulation
53 use definitions
54 use status
55 #ifdef IS_MPI
56 use oopseMPI
57 implicit none
58 PRIVATE
59
60
61 !! PUBLIC Subroutines contained in this module
62 !! gather and scatter are a generic interface
63 !! to gather and scatter routines
64 public :: gather, scatter
65 public :: setupSimParallel
66 public :: replanSimParallel
67 public :: getNatomsInCol
68 public :: getNatomsInRow
69 public :: getNgroupsInCol
70 public :: getNgroupsInRow
71 public :: isMPISimSet
72 public :: printComponentPlan
73 public :: getMyNode
74
75 !! PUBLIC Subroutines contained in MPI module
76 public :: mpi_bcast
77 public :: mpi_allreduce
78 ! public :: mpi_reduce
79 public :: mpi_send
80 public :: mpi_recv
81 public :: mpi_get_processor_name
82 public :: mpi_finalize
83
84 !! PUBLIC mpi variables
85 public :: mpi_comm_world
86 public :: mpi_character
87 public :: mpi_integer
88 public :: mpi_lor
89 public :: mpi_logical
90 public :: mpi_double_precision
91 public :: mpi_sum
92 public :: mpi_max
93 public :: mpi_status_size
94 public :: mpi_any_source
95
96
97
98 !! Safety logical to prevent access to ComponetPlan until
99 !! set by C++.
100 logical, save :: ComponentPlanSet = .false.
101
102
103 !! generic mpi error declaration.
104 integer, public :: mpi_err
105 character(len = statusMsgSize) :: errMsg
106
107 #ifdef PROFILE
108 public :: printCommTime
109 public :: getCommTime
110 real,save :: commTime = 0.0
111 real :: commTimeInitial,commTimeFinal
112 #endif
113
114 !! Include mpiComponentPlan type. mpiComponentPlan is a
115 !! dual header file for both c and fortran.
116 #define __FORTRAN90
117 #include "UseTheForce/mpiComponentPlan.h"
118
119
120 !! Tags used during force loop for parallel simulation
121 integer, public, allocatable, dimension(:) :: AtomLocalToGlobal
122 integer, public, allocatable, dimension(:) :: AtomRowToGlobal
123 integer, public, allocatable, dimension(:) :: AtomColToGlobal
124 integer, public, allocatable, dimension(:) :: GroupLocalToGlobal
125 integer, public, allocatable, dimension(:) :: GroupRowToGlobal
126 integer, public, allocatable, dimension(:) :: GroupColToGlobal
127
128 !! Logical set true if mpiSimulation has been initialized
129 logical, save :: isSimSet = .false.
130
131
132 type (mpiComponentPlan), save :: mpiSim
133
134 !! gs_plan contains plans for gather and scatter routines
135 type, public :: gs_plan
136 private
137 type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
138 integer :: gsPlanSize !! size of this plan (nDim*nComponents)
139 integer :: globalPlanSize !! size of all components in plan
140 integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
141 integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
142 integer :: myPlanComm !! My communicator for this plan
143 integer :: myPlanRank !! My rank in this plan
144 integer :: planNprocs !! Number of processors in this plan
145 end type gs_plan
146
147 ! plans for different decompositions
148 type (gs_plan), public, save :: plan_atom_row
149 type (gs_plan), public, save :: plan_atom_row_3d
150 type (gs_plan), public, save :: plan_atom_col
151 type (gs_plan), public, save :: plan_atom_col_3d
152 type (gs_plan), public, save :: plan_atom_row_Rotation
153 type (gs_plan), public, save :: plan_atom_col_Rotation
154 type (gs_plan), public, save :: plan_group_row
155 type (gs_plan), public, save :: plan_group_col
156 type (gs_plan), public, save :: plan_group_row_3d
157 type (gs_plan), public, save :: plan_group_col_3d
158
159 type (mpiComponentPlan), pointer :: simComponentPlan
160
161 ! interface for gather scatter routines
162 !! Generic interface for gather.
163 !! Gathers an local array into row or column array
164 !! Interface provided for integer, double and double
165 !! rank 2 arrays.
166 interface gather
167 module procedure gather_integer
168 module procedure gather_double
169 module procedure gather_double_2d
170 end interface
171
172 !! Generic interface for scatter.
173 !! Scatters a row or column array, adding componets
174 !! and reducing them to a local nComponent array.
175 !! Interface provided for double and double rank=2 arrays.
176
177 interface scatter
178 module procedure scatter_double
179 module procedure scatter_double_2d
180 end interface
181
182
183
184 contains
185
186 !! Sets up mpiComponentPlan with structure passed from C++.
187 subroutine setupSimParallel(thisComponentPlan, nAtomTags, atomTags, &
188 nGroupTags, groupTags, status)
189 !! Passed Arguments
190 !! mpiComponentPlan struct from C
191 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
192 !! Number of tags passed
193 integer, intent(in) :: nAtomTags, nGroupTags
194 !! Result status, 0 = normal, -1 = error
195 integer, intent(out) :: status
196 integer :: localStatus
197 !! Global reference tag for local particles
198 integer, dimension(nAtomTags), intent(inout) :: atomTags
199 integer, dimension(nGroupTags), intent(inout) :: groupTags
200
201 !write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, &
202 ! ' has atomTags(1) = ', atomTags(1)
203
204 status = 0
205 if (componentPlanSet) then
206 return
207 endif
208 componentPlanSet = .true.
209
210 !! copy c component plan to fortran
211 mpiSim = thisComponentPlan
212 write(*,*) "Seting up simParallel"
213
214 call make_Force_Grid(mpiSim, localStatus)
215 if (localStatus /= 0) then
216 write(errMsg, *) 'An error in making the force grid has occurred'
217 call handleError("setupSimParallel", errMsg)
218 status = -1
219 return
220 endif
221
222 call updateGridComponents(mpiSim, localStatus)
223 if (localStatus /= 0) then
224 write(errMsg,*) "Error updating grid components"
225 call handleError("setupSimParallel", errMsg)
226 status = -1
227 return
228 endif
229
230 !! initialize gather and scatter plans used in this simulation
231 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
232 mpiSim, mpiSim%rowComm, plan_atom_row)
233 call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
234 mpiSim, mpiSim%rowComm, plan_atom_row_3d)
235 call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
236 mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
237 call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
238 mpiSim, mpiSim%rowComm, plan_group_row)
239 call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
240 mpiSim, mpiSim%rowComm, plan_group_row_3d)
241
242 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
243 mpiSim, mpiSim%columnComm, plan_atom_col)
244 call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
245 mpiSim, mpiSim%columnComm, plan_atom_col_3d)
246 call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
247 mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
248 call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
249 mpiSim, mpiSim%columnComm, plan_group_col)
250 call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
251 mpiSim, mpiSim%columnComm, plan_group_col_3d)
252
253 ! Initialize tags
254
255 call setAtomTags(atomTags,localStatus)
256 if (localStatus /= 0) then
257 write(errMsg, *) 'An error in setting Atom Tags has occured'
258 call handleError("setupSimParallel", errMsg)
259 status = -1
260 return
261 endif
262
263
264 call setGroupTags(groupTags,localStatus)
265 if (localStatus /= 0) then
266 write(errMsg, *) 'An error in setting Group Tags has occured'
267 call handleError("setupSimParallel", errMsg)
268 status = -1
269 return
270 endif
271
272 isSimSet = .true.
273
274 ! call printComponentPlan(mpiSim,0)
275 end subroutine setupSimParallel
276
277 subroutine replanSimParallel(thisComponentPlan,status)
278 ! Passed Arguments
279 !! mpiComponentPlan struct from C
280 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
281 integer, intent(out) :: status
282 integer :: localStatus
283 integer :: mpierror
284 status = 0
285
286 call updateGridComponents(thisComponentPlan,localStatus)
287 if (localStatus /= 0) then
288 status = -1
289 return
290 endif
291
292 !! Unplan Gather Scatter plans
293 call unplan_gather_scatter(plan_atom_row)
294 call unplan_gather_scatter(plan_atom_row_3d)
295 call unplan_gather_scatter(plan_atom_row_Rotation)
296 call unplan_gather_scatter(plan_group_row)
297 call unplan_gather_scatter(plan_group_row_3d)
298
299 call unplan_gather_scatter(plan_atom_col)
300 call unplan_gather_scatter(plan_atom_col_3d)
301 call unplan_gather_scatter(plan_atom_col_Rotation)
302 call unplan_gather_scatter(plan_group_col)
303 call unplan_gather_scatter(plan_group_col_3d)
304
305 !! initialize gather and scatter plans used in this simulation
306 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
307 mpiSim, mpiSim%rowComm, plan_atom_row)
308 call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
309 mpiSim, mpiSim%rowComm, plan_atom_row_3d)
310 call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
311 mpiSim, mpiSim%rowComm, plan_atom_row_Rotation)
312 call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
313 mpiSim, mpiSim%rowComm, plan_group_row)
314 call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
315 mpiSim, mpiSim%rowComm, plan_group_row_3d)
316
317 call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
318 mpiSim, mpiSim%columnComm, plan_atom_col)
319 call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
320 mpiSim, mpiSim%columnComm, plan_atom_col_3d)
321 call plan_gather_scatter(9, mpiSim%nAtomsLocal, &
322 mpiSim, mpiSim%columnComm, plan_atom_col_Rotation)
323 call plan_gather_scatter(1, mpiSim%nGroupsLocal, &
324 mpiSim, mpiSim%columnComm, plan_group_col)
325 call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
326 mpiSim, mpiSim%columnComm, plan_group_col_3d)
327
328 end subroutine replanSimParallel
329
330 !! Updates number of row and column components for long range forces.
331 subroutine updateGridComponents(thisComponentPlan, status)
332 type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
333
334 !! Status return
335 !! - 0 Success
336 !! - -1 Failure
337 integer, intent(out) :: status
338 integer :: nAtomsLocal
339 integer :: nAtomsInRow = 0
340 integer :: nAtomsInColumn = 0
341 integer :: nGroupsLocal
342 integer :: nGroupsInRow = 0
343 integer :: nGroupsInColumn = 0
344 integer :: mpiErrors
345
346 status = 0
347 if (.not. componentPlanSet) return
348
349 if (thisComponentPlan%nAtomsLocal == 0) then
350 status = -1
351 return
352 endif
353 if (thisComponentPlan%nGroupsLocal == 0) then
354 write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal
355 status = -1
356 return
357 endif
358
359 nAtomsLocal = thisComponentPlan%nAtomsLocal
360 nGroupsLocal = thisComponentPlan%nGroupsLocal
361
362 call mpi_allreduce(nAtomsLocal, nAtomsInRow, 1, mpi_integer, &
363 mpi_sum, thisComponentPlan%rowComm, mpiErrors)
364 if (mpiErrors /= 0) then
365 status = -1
366 return
367 endif
368
369 call mpi_allreduce(nAtomsLocal, nAtomsInColumn, 1, mpi_integer, &
370 mpi_sum, thisComponentPlan%columnComm, mpiErrors)
371 if (mpiErrors /= 0) then
372 status = -1
373 return
374 endif
375
376 call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, &
377 mpi_sum, thisComponentPlan%rowComm, mpiErrors)
378 if (mpiErrors /= 0) then
379 status = -1
380 return
381 endif
382
383 call mpi_allreduce(nGroupsLocal, nGroupsInColumn, 1, mpi_integer, &
384 mpi_sum, thisComponentPlan%columnComm, mpiErrors)
385 if (mpiErrors /= 0) then
386 status = -1
387 return
388 endif
389
390 thisComponentPlan%nAtomsInRow = nAtomsInRow
391 thisComponentPlan%nAtomsInColumn = nAtomsInColumn
392 thisComponentPlan%nGroupsInRow = nGroupsInRow
393 thisComponentPlan%nGroupsInColumn = nGroupsInColumn
394
395 end subroutine updateGridComponents
396
397
398 !! Creates a square force decomposition of processors into row and column
399 !! communicators.
400 subroutine make_Force_Grid(thisComponentPlan,status)
401 type (mpiComponentPlan) :: thisComponentPlan
402 integer, intent(out) :: status !! status returns -1 if error
403 integer :: nColumnsMax !! Maximum number of columns
404 integer :: nWorldProcessors !! Total number of processors in World comm.
405 integer :: rowIndex !! Row for this processor.
406 integer :: columnIndex !! Column for this processor.
407 integer :: nRows !! Total number of rows.
408 integer :: nColumns !! Total number of columns.
409 integer :: mpiErrors !! MPI errors.
410 integer :: rowCommunicator !! MPI row communicator.
411 integer :: columnCommunicator
412 integer :: myWorldRank
413 integer :: i
414
415
416 if (.not. ComponentPlanSet) return
417 status = 0
418
419 !! We make a dangerous assumption here that if numberProcessors is
420 !! zero, then we need to get the information from MPI.
421 if (thisComponentPlan%nProcessors == 0 ) then
422 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
423 if ( mpiErrors /= 0 ) then
424 status = -1
425 return
426 endif
427 call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
428 if ( mpiErrors /= 0 ) then
429 status = -1
430 return
431 endif
432
433 else
434 nWorldProcessors = thisComponentPlan%nProcessors
435 myWorldRank = thisComponentPlan%myNode
436 endif
437
438 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
439
440 do i = 1, nColumnsMax
441 if (mod(nWorldProcessors,i) == 0) nColumns = i
442 end do
443
444 nRows = nWorldProcessors/nColumns
445
446 rowIndex = myWorldRank/nColumns
447
448 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
449 if ( mpiErrors /= 0 ) then
450 write(default_error,*) "MPI comm split failed at row communicator"
451 status = -1
452 return
453 endif
454
455 columnIndex = mod(myWorldRank,nColumns)
456 call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
457 if ( mpiErrors /= 0 ) then
458 write(default_error,*) "MPI comm split faild at columnCommunicator"
459 status = -1
460 return
461 endif
462
463 ! Set appropriate components of thisComponentPlan
464 thisComponentPlan%rowComm = rowCommunicator
465 thisComponentPlan%columnComm = columnCommunicator
466 thisComponentPlan%rowIndex = rowIndex
467 thisComponentPlan%columnIndex = columnIndex
468 thisComponentPlan%nRows = nRows
469 thisComponentPlan%nColumns = nColumns
470
471 end subroutine make_Force_Grid
472
473 !! initalizes a gather scatter plan
474 subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, &
475 thisComm, this_plan, status)
476 integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
477 integer, intent(in) :: nObjects
478 type (mpiComponentPlan), intent(in), target :: thisComponentPlan
479 type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
480 integer, intent(in) :: thisComm !! MPI communicator for this plan
481
482 integer :: arraySize !! size to allocate plan for
483 integer, intent(out), optional :: status
484 integer :: ierror
485 integer :: i,junk
486
487 if (present(status)) status = 0
488
489 !! Set gsComponentPlan pointer
490 !! to the componet plan we want to use for this gather scatter plan.
491 !! WARNING this could be dangerous since thisComponentPlan was origionally
492 !! allocated in C++ and there is a significant difference between c and
493 !! f95 pointers....
494 this_plan%gsComponentPlan => thisComponentPlan
495
496 ! Set this plan size for displs array.
497 this_plan%gsPlanSize = nDim * nObjects
498
499 ! Duplicate communicator for this plan
500 call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err)
501 if (mpi_err /= 0) then
502 if (present(status)) status = -1
503 return
504 end if
505 call mpi_comm_rank(this_plan%myPlanComm, this_plan%myPlanRank, mpi_err)
506 if (mpi_err /= 0) then
507 if (present(status)) status = -1
508 return
509 end if
510
511 call mpi_comm_size(this_plan%myPlanComm, this_plan%planNprocs, mpi_err)
512
513 if (mpi_err /= 0) then
514 if (present(status)) status = -1
515 return
516 end if
517
518 !! counts and displacements arrays are indexed from 0 to be compatable
519 !! with MPI arrays.
520 allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
521 if (ierror /= 0) then
522 if (present(status)) status = -1
523 return
524 end if
525
526 allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
527 if (ierror /= 0) then
528 if (present(status)) status = -1
529 return
530 end if
531
532 !! gather all the local sizes into a size # processors array.
533 call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
534 1,mpi_integer,thisComm,mpi_err)
535
536 if (mpi_err /= 0) then
537 if (present(status)) status = -1
538 return
539 end if
540
541 !! figure out the total number of particles in this plan
542 this_plan%globalPlanSize = sum(this_plan%counts)
543
544 !! initialize plan displacements.
545 this_plan%displs(0) = 0
546 do i = 1, this_plan%planNprocs - 1,1
547 this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
548 end do
549 end subroutine plan_gather_scatter
550
551 subroutine unplan_gather_scatter(this_plan)
552 type (gs_plan), intent(inout) :: this_plan
553
554 this_plan%gsComponentPlan => null()
555 call mpi_comm_free(this_plan%myPlanComm,mpi_err)
556
557 deallocate(this_plan%counts)
558 deallocate(this_plan%displs)
559
560 end subroutine unplan_gather_scatter
561
562 subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
563
564 type (gs_plan), intent(inout) :: this_plan
565 integer, dimension(:), intent(inout) :: sbuffer
566 integer, dimension(:), intent(inout) :: rbuffer
567 integer :: noffset
568 integer, intent(out), optional :: status
569 integer :: i
570
571 if (present(status)) status = 0
572 noffset = this_plan%displs(this_plan%myPlanRank)
573
574 ! if (getmyNode() == 1) then
575 ! write(*,*) "Node 0 printing allgatherv vars"
576 ! write(*,*) "Noffset: ", noffset
577 ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
578 ! write(*,*) "PlanComm: ", this_plan%myPlanComm
579 ! end if
580
581 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
582 rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
583 this_plan%myPlanComm, mpi_err)
584
585 if (mpi_err /= 0) then
586 if (present(status)) status = -1
587 endif
588
589 end subroutine gather_integer
590
591 subroutine gather_double( sbuffer, rbuffer, this_plan, status)
592
593 type (gs_plan), intent(in) :: this_plan
594 real( kind = DP ), dimension(:), intent(inout) :: sbuffer
595 real( kind = DP ), dimension(:), intent(inout) :: rbuffer
596 integer :: noffset
597 integer, intent(out), optional :: status
598
599
600 if (present(status)) status = 0
601 noffset = this_plan%displs(this_plan%myPlanRank)
602 #ifdef PROFILE
603 call cpu_time(commTimeInitial)
604 #endif
605 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
606 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
607 this_plan%myPlanComm, mpi_err)
608 #ifdef PROFILE
609 call cpu_time(commTimeFinal)
610 commTime = commTime + commTimeFinal - commTimeInitial
611 #endif
612
613 if (mpi_err /= 0) then
614 if (present(status)) status = -1
615 endif
616
617 end subroutine gather_double
618
619 subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
620
621 type (gs_plan), intent(in) :: this_plan
622 real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
623 real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
624 integer :: noffset,i,ierror
625 integer, intent(out), optional :: status
626
627 external mpi_allgatherv
628
629 if (present(status)) status = 0
630
631 ! noffset = this_plan%displs(this_plan%me)
632 #ifdef PROFILE
633 call cpu_time(commTimeInitial)
634 #endif
635
636 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
637 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
638 this_plan%myPlanComm, mpi_err)
639
640 #ifdef PROFILE
641 call cpu_time(commTimeFinal)
642 commTime = commTime + commTimeFinal - commTimeInitial
643 #endif
644
645 if (mpi_err /= 0) then
646 if (present(status)) status = -1
647 endif
648
649 end subroutine gather_double_2d
650
651 subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
652
653 type (gs_plan), intent(in) :: this_plan
654 real( kind = DP ), dimension(:), intent(inout) :: sbuffer
655 real( kind = DP ), dimension(:), intent(inout) :: rbuffer
656 integer, intent(out), optional :: status
657 external mpi_reduce_scatter
658
659 if (present(status)) status = 0
660
661 #ifdef PROFILE
662 call cpu_time(commTimeInitial)
663 #endif
664 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
665 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
666 #ifdef PROFILE
667 call cpu_time(commTimeFinal)
668 commTime = commTime + commTimeFinal - commTimeInitial
669 #endif
670
671 if (mpi_err /= 0) then
672 if (present(status)) status = -1
673 endif
674
675 end subroutine scatter_double
676
677 subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
678
679 type (gs_plan), intent(in) :: this_plan
680 real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
681 real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
682 integer, intent(out), optional :: status
683 external mpi_reduce_scatter
684
685 if (present(status)) status = 0
686 #ifdef PROFILE
687 call cpu_time(commTimeInitial)
688 #endif
689
690 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
691 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
692 #ifdef PROFILE
693 call cpu_time(commTimeFinal)
694 commTime = commTime + commTimeFinal - commTimeInitial
695 #endif
696
697 if (mpi_err /= 0) then
698 if (present(status)) status = -1
699 endif
700
701 end subroutine scatter_double_2d
702
703 subroutine setAtomTags(tags, status)
704 integer, dimension(:) :: tags
705 integer :: status
706
707 integer :: alloc_stat
708
709 integer :: nAtomsInCol
710 integer :: nAtomsInRow
711
712 status = 0
713 ! allocate row arrays
714 nAtomsInRow = getNatomsInRow(plan_atom_row)
715 nAtomsInCol = getNatomsInCol(plan_atom_col)
716
717 if(.not. allocated(AtomLocalToGlobal)) then
718 allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
719 if (alloc_stat /= 0 ) then
720 status = -1
721 return
722 endif
723 else
724 deallocate(AtomLocalToGlobal)
725 allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
726 if (alloc_stat /= 0 ) then
727 status = -1
728 return
729 endif
730
731 endif
732
733 AtomLocalToGlobal = tags
734
735 if (.not. allocated(AtomRowToGlobal)) then
736 allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
737 if (alloc_stat /= 0 ) then
738 status = -1
739 return
740 endif
741 else
742 deallocate(AtomRowToGlobal)
743 allocate(AtomRowToGlobal(nAtomsInRow),STAT=alloc_stat)
744 if (alloc_stat /= 0 ) then
745 status = -1
746 return
747 endif
748
749 endif
750 ! allocate column arrays
751 if (.not. allocated(AtomColToGlobal)) then
752 allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
753 if (alloc_stat /= 0 ) then
754 status = -1
755 return
756 endif
757 else
758 deallocate(AtomColToGlobal)
759 allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
760 if (alloc_stat /= 0 ) then
761 status = -1
762 return
763 endif
764 endif
765
766 call gather(tags, AtomRowToGlobal, plan_atom_row)
767 call gather(tags, AtomColToGlobal, plan_atom_col)
768
769 end subroutine setAtomTags
770
771 subroutine setGroupTags(tags, status)
772 integer, dimension(:) :: tags
773 integer :: status
774
775 integer :: alloc_stat
776
777 integer :: nGroupsInCol
778 integer :: nGroupsInRow
779
780 status = 0
781
782 nGroupsInRow = getNgroupsInRow(plan_group_row)
783 nGroupsInCol = getNgroupsInCol(plan_group_col)
784
785 if(allocated(GroupLocalToGlobal)) then
786 deallocate(GroupLocalToGlobal)
787 endif
788 allocate(GroupLocalToGlobal(size(tags)),STAT=alloc_stat)
789 if (alloc_stat /= 0 ) then
790 status = -1
791 return
792 endif
793
794 GroupLocalToGlobal = tags
795
796 if(allocated(GroupRowToGlobal)) then
797 deallocate(GroupRowToGlobal)
798 endif
799 allocate(GroupRowToGlobal(nGroupsInRow),STAT=alloc_stat)
800 if (alloc_stat /= 0 ) then
801 status = -1
802 return
803 endif
804
805 if(allocated(GroupColToGlobal)) then
806 deallocate(GroupColToGlobal)
807 endif
808 allocate(GroupColToGlobal(nGroupsInCol),STAT=alloc_stat)
809 if (alloc_stat /= 0 ) then
810 status = -1
811 return
812 endif
813
814 call gather(tags, GroupRowToGlobal, plan_group_row)
815 call gather(tags, GroupColToGlobal, plan_group_col)
816
817 end subroutine setGroupTags
818
819 function getNatomsInCol(thisplan) result(nInCol)
820 type (gs_plan), intent(in) :: thisplan
821 integer :: nInCol
822 nInCol = thisplan%gsComponentPlan%nAtomsInColumn
823 end function getNatomsInCol
824
825 function getNatomsInRow(thisplan) result(nInRow)
826 type (gs_plan), intent(in) :: thisplan
827 integer :: nInRow
828 nInRow = thisplan%gsComponentPlan%nAtomsInRow
829 end function getNatomsInRow
830
831 function getNgroupsInCol(thisplan) result(nInCol)
832 type (gs_plan), intent(in) :: thisplan
833 integer :: nInCol
834 nInCol = thisplan%gsComponentPlan%nGroupsInColumn
835 end function getNgroupsInCol
836
837 function getNgroupsInRow(thisplan) result(nInRow)
838 type (gs_plan), intent(in) :: thisplan
839 integer :: nInRow
840 nInRow = thisplan%gsComponentPlan%nGroupsInRow
841 end function getNgroupsInRow
842
843 function isMPISimSet() result(isthisSimSet)
844 logical :: isthisSimSet
845 if (isSimSet) then
846 isthisSimSet = .true.
847 else
848 isthisSimSet = .false.
849 endif
850 end function isMPISimSet
851
852 subroutine printComponentPlan(this_plan,printNode)
853
854 type (mpiComponentPlan), intent(in) :: this_plan
855 integer, optional :: printNode
856 logical :: print_me = .false.
857
858 if (present(printNode)) then
859 if (printNode == mpiSim%myNode) print_me = .true.
860 else
861 print_me = .true.
862 endif
863
864 if (print_me) then
865 write(default_error,*) "SetupSimParallel: writing component plan"
866
867 write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
868 write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
869 write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal
870 write(default_error,*) "myNode: ", mpiSim%myNode
871 write(default_error,*) "nProcessors: ", mpiSim%nProcessors
872 write(default_error,*) "rowComm: ", mpiSim%rowComm
873 write(default_error,*) "columnComm: ", mpiSim%columnComm
874 write(default_error,*) "nRows: ", mpiSim%nRows
875 write(default_error,*) "nColumns: ", mpiSim%nColumns
876 write(default_error,*) "nAtomsInRow: ", mpiSim%nAtomsInRow
877 write(default_error,*) "nAtomsInColumn: ", mpiSim%nAtomsInColumn
878 write(default_error,*) "rowIndex: ", mpiSim%rowIndex
879 write(default_error,*) "columnIndex: ", mpiSim%columnIndex
880 endif
881 end subroutine printComponentPlan
882
883 function getMyNode() result(myNode)
884 integer :: myNode
885 myNode = mpiSim%myNode
886 end function getMyNode
887
888 #ifdef PROFILE
889 subroutine printCommTime()
890 write(*,*) "MPI communication time is: ", commTime
891 end subroutine printCommTime
892
893 function getCommTime() result(comm_time)
894 real :: comm_time
895 comm_time = commTime
896 end function getCommTime
897
898 #endif
899
900 #endif // is_mpi
901 end module mpiSimulation
902