ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE/libmdtools/mpiSimulation_module.F90
Revision: 1150
Committed: Fri May 7 21:35:05 2004 UTC (20 years, 2 months ago) by gezelter
File size: 24777 byte(s)
Log Message:
Many changes to get group-based cutoffs to work

File Contents

# Content
1
2
3 !! MPI support for long range forces using force decomposition
4 !! on a square grid of processors.
5 !! Corresponds to mpiSimunation.cpp for C++
6 !! mpi_module also contains a private interface for mpi f90 routines.
7 !!
8 !! @author Charles F. Vardeman II
9 !! @author Matthew Meineke
10 !! @version $Id: mpiSimulation_module.F90,v 1.12 2004-05-07 21:35:04 gezelter Exp $, $Date: 2004-05-07 21:35:04 $, $Name: not supported by cvs2svn $, $Revision: 1.12 $
11
12 module mpiSimulation
13 use definitions
14 #ifdef IS_MPI
15 use oopseMPI
16 implicit none
17 PRIVATE
18
19
20 !! PUBLIC Subroutines contained in this module
21 !! gather and scatter are a generic interface
22 !! to gather and scatter routines
23 public :: gather, scatter
24 public :: setupSimParallel
25 public :: replanSimParallel
26 public :: getNcol
27 public :: getNrow
28 public :: getNcolGroup
29 public :: getNrowGroup
30 public :: isMPISimSet
31 public :: printComponentPlan
32 public :: getMyNode
33
34 !! PUBLIC Subroutines contained in MPI module
35 public :: mpi_bcast
36 public :: mpi_allreduce
37 ! public :: mpi_reduce
38 public :: mpi_send
39 public :: mpi_recv
40 public :: mpi_get_processor_name
41 public :: mpi_finalize
42
43 !! PUBLIC mpi variables
44 public :: mpi_comm_world
45 public :: mpi_character
46 public :: mpi_integer
47 public :: mpi_double_precision
48 public :: mpi_sum
49 public :: mpi_max
50 public :: mpi_status_size
51 public :: mpi_any_source
52
53
54
55 !! Safety logical to prevent access to ComponetPlan until
56 !! set by C++.
57 logical, save :: ComponentPlanSet = .false.
58
59
60 !! generic mpi error declaration.
61 integer, public :: mpi_err
62
63 #ifdef PROFILE
64 public :: printCommTime
65 public :: getCommTime
66 real,save :: commTime = 0.0
67 real :: commTimeInitial,commTimeFinal
68 #endif
69
70 !! Include mpiComponentPlan type. mpiComponentPlan is a
71 !! dual header file for both c and fortran.
72 #define __FORTRAN90
73 #include "mpiComponentPlan.h"
74
75
76 !! Tags used during force loop for parallel simulation
77 integer, public, allocatable, dimension(:) :: tagLocal
78 integer, public, allocatable, dimension(:) :: tagRow
79 integer, public, allocatable, dimension(:) :: tagColumn
80
81 !! Logical set true if mpiSimulation has been initialized
82 logical, save :: isSimSet = .false.
83
84
85 type (mpiComponentPlan), save :: mpiSim
86
87 !! gs_plan contains plans for gather and scatter routines
88 type, public :: gs_plan
89 private
90 type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
91 integer :: gsPlanSize !! size of this plan (nDim*nComponents)
92 integer :: globalPlanSize !! size of all components in plan
93 integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
94 integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
95 integer :: myPlanComm !! My communicator for this plan
96 integer :: myPlanRank !! My rank in this plan
97 integer :: planNprocs !! Number of processors in this plan
98 end type gs_plan
99
100 ! plans for different decompositions
101 type (gs_plan), public, save :: plan_row
102 type (gs_plan), public, save :: plan_row3d
103 type (gs_plan), public, save :: plan_col
104 type (gs_plan), public, save :: plan_col3d
105 type (gs_plan), public, save :: plan_row_Rotation
106 type (gs_plan), public, save :: plan_col_Rotation
107 type (gs_plan), public, save :: plan_row_Group
108 type (gs_plan), public, save :: plan_col_Group
109 type (gs_plan), public, save :: plan_row_Group_3d
110 type (gs_plan), public, save :: plan_col_Group_3d
111
112 type (mpiComponentPlan), pointer :: simComponentPlan
113
114 ! interface for gather scatter routines
115 !! Generic interface for gather.
116 !! Gathers an local array into row or column array
117 !! Interface provided for integer, double and double
118 !! rank 2 arrays.
119 interface gather
120 module procedure gather_integer
121 module procedure gather_double
122 module procedure gather_double_2d
123 end interface
124
125 !! Generic interface for scatter.
126 !! Scatters a row or column array, adding componets
127 !! and reducing them to a local nComponent array.
128 !! Interface provided for double and double rank=2 arrays.
129
130 interface scatter
131 module procedure scatter_double
132 module procedure scatter_double_2d
133 end interface
134
135
136
137 contains
138
139 !! Sets up mpiComponentPlan with structure passed from C++.
140 subroutine setupSimParallel(thisComponentPlan,ntags,tags,status)
141 ! Passed Arguments
142 ! integer, intent(inout) :: nDim !! Number of dimensions
143 !! mpiComponentPlan struct from C
144 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
145 !! Number of tags passed, nlocal
146 integer, intent(in) :: ntags
147 !! Result status, 0 = normal, -1 = error
148 integer, intent(out) :: status
149 integer :: localStatus
150 !! Global reference tag for local particles
151 integer, dimension(ntags),intent(inout) :: tags
152
153 write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, ' has tags(1) = ', tags(1)
154
155 status = 0
156 if (componentPlanSet) then
157 return
158 endif
159 componentPlanSet = .true.
160
161 !! copy c component plan to fortran
162 mpiSim = thisComponentPlan
163 write(*,*) "Seting up simParallel"
164
165 call make_Force_Grid(mpiSim,localStatus)
166 if (localStatus /= 0) then
167 write(default_error,*) "Error creating force grid"
168 status = -1
169 return
170 endif
171
172 call updateGridComponents(mpiSim,localStatus)
173 if (localStatus /= 0) then
174 write(default_error,*) "Error updating grid components"
175 status = -1
176 return
177 endif
178
179
180 !! initialize gather and scatter plans used in this simulation
181 call plan_gather_scatter(1,mpiSim%myNlocal,&
182 mpiSim,mpiSim%rowComm,plan_row)
183 call plan_gather_scatter(nDim,mpiSim%myNlocal,&
184 mpiSim,mpiSim%rowComm,plan_row3d)
185 call plan_gather_scatter(9,mpiSim%myNlocal,&
186 mpiSim,mpiSim%rowComm,plan_row_Rotation)
187 call plan_gather_scatter(1,mpiSim%myNgroup,&
188 mpiSim,mpiSim%rowComm,plan_row_Group)
189 call plan_gather_scatter(nDim,mpiSim%myNgroup,&
190 mpiSim,mpiSim%rowComm,plan_row_Group_3d)
191
192 call plan_gather_scatter(1,mpiSim%myNlocal,&
193 mpiSim,mpiSim%columnComm,plan_col)
194 call plan_gather_scatter(nDim,mpiSim%myNlocal,&
195 mpiSim,mpiSim%columnComm,plan_col3d)
196 call plan_gather_scatter(9,mpiSim%myNlocal,&
197 mpiSim,mpiSim%columnComm,plan_col_Rotation)
198 call plan_gather_scatter(1,mpiSim%myNgroup,&
199 mpiSim,mpiSim%columnComm,plan_col_Group)
200 call plan_gather_scatter(nDim,mpiSim%myNgroup,&
201 mpiSim,mpiSim%columnComm,plan_col_Group_3d)
202
203 ! Initialize tags
204 call setTags(tags,localStatus)
205 if (localStatus /= 0) then
206 status = -1
207 return
208 endif
209 isSimSet = .true.
210
211 ! call printComponentPlan(mpiSim,0)
212 end subroutine setupSimParallel
213
214 subroutine replanSimParallel(thisComponentPlan,status)
215 ! Passed Arguments
216 !! mpiComponentPlan struct from C
217 type (mpiComponentPlan), intent(inout) :: thisComponentPlan
218 integer, intent(out) :: status
219 integer :: localStatus
220 integer :: mpierror
221 status = 0
222
223 call updateGridComponents(thisComponentPlan,localStatus)
224 if (localStatus /= 0) then
225 status = -1
226 return
227 endif
228
229 !! Unplan Gather Scatter plans
230 call unplan_gather_scatter(plan_row)
231 call unplan_gather_scatter(plan_row3d)
232 call unplan_gather_scatter(plan_row_Rotation)
233 call unplan_gather_scatter(plan_row_Group)
234 call unplan_gather_scatter(plan_row_Group_3d)
235
236 call unplan_gather_scatter(plan_col)
237 call unplan_gather_scatter(plan_col3d)
238 call unplan_gather_scatter(plan_col_Rotation)
239 call unplan_gather_scatter(plan_col_Group)
240 call unplan_gather_scatter(plan_col_Group_3d)
241
242 !! initialize gather and scatter plans used in this simulation
243 call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
244 thisComponentPlan,thisComponentPlan%rowComm,plan_row)
245 call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
246 thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
247 call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
248 thisComponentPlan,thisComponentPlan%rowComm,plan_row_Rotation)
249 call plan_gather_scatter(1,thisComponentPlan%myNgroup,&
250 thisComponentPlan,thisComponentPlan%rowComm,plan_row_Group)
251 call plan_gather_scatter(nDim,thisComponentPlan%myNgroup,&
252 thisComponentPlan,thisComponentPlan%rowComm,plan_row_Group_3d)
253
254 call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
255 thisComponentPlan,thisComponentPlan%columnComm,plan_col)
256 call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
257 thisComponentPlan,thisComponentPlan%columnComm,plan_col3d)
258 call plan_gather_scatter(9,thisComponentPlan%myNlocal,&
259 thisComponentPlan,thisComponentPlan%columnComm,plan_col_Rotation)
260 call plan_gather_scatter(1,thisComponentPlan%myNgroup,&
261 thisComponentPlan,thisComponentPlan%columnComm,plan_col_Group)
262 call plan_gather_scatter(nDim,thisComponentPlan%myNgroup,&
263 thisComponentPlan,thisComponentPlan%columnComm,plan_col_Group_3d)
264
265 end subroutine replanSimParallel
266
267 !! Updates number of row and column components for long range forces.
268 subroutine updateGridComponents(thisComponentPlan,status)
269 type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
270
271 !! Status return
272 !! - 0 Success
273 !! - -1 Failure
274 integer, intent(out) :: status
275 integer :: nComponentsLocal
276 integer :: nComponentsRow = 0
277 integer :: nComponentsColumn = 0
278 integer :: mpiErrors
279
280 status = 0
281 if (.not. componentPlanSet) return
282
283 if (thisComponentPlan%myNlocal == 0 ) then
284 status = -1
285 return
286 endif
287
288 nComponentsLocal = thisComponentPlan%myNlocal
289
290 write(*,*) "UpdateGridComponents: myNlocal ", nComponentsLocal
291 call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
292 mpi_sum,thisComponentPlan%rowComm,mpiErrors)
293 if (mpiErrors /= 0) then
294 status = -1
295 return
296 endif
297
298 call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
299 mpi_sum,thisComponentPlan%columnComm,mpiErrors)
300 if (mpiErrors /= 0) then
301 status = -1
302 return
303 endif
304
305 thisComponentPlan%nComponentsRow = nComponentsRow
306 thisComponentPlan%nComponentsColumn = nComponentsColumn
307 write(*,*) "UpdateGridComponents: myNRow ",&
308 thisComponentPlan%nComponentsRow
309 write(*,*) "UpdateGridComponents: myNColumn ",&
310 thisComponentPlan%nComponentsColumn
311
312 end subroutine updateGridComponents
313
314
315 !! Creates a square force decomposition of processors into row and column
316 !! communicators.
317 subroutine make_Force_Grid(thisComponentPlan,status)
318 type (mpiComponentPlan) :: thisComponentPlan
319 integer, intent(out) :: status !! status returns -1 if error
320 integer :: nColumnsMax !! Maximum number of columns
321 integer :: nWorldProcessors !! Total number of processors in World comm.
322 integer :: rowIndex !! Row for this processor.
323 integer :: columnIndex !! Column for this processor.
324 integer :: nRows !! Total number of rows.
325 integer :: nColumns !! Total number of columns.
326 integer :: mpiErrors !! MPI errors.
327 integer :: rowCommunicator !! MPI row communicator.
328 integer :: columnCommunicator
329 integer :: myWorldRank
330 integer :: i
331
332
333 if (.not. ComponentPlanSet) return
334 status = 0
335
336 !! We make a dangerous assumption here that if numberProcessors is
337 !! zero, then we need to get the information from MPI.
338 if (thisComponentPlan%numberProcessors == 0 ) then
339 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
340 if ( mpiErrors /= 0 ) then
341 status = -1
342 return
343 endif
344 call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
345 if ( mpiErrors /= 0 ) then
346 status = -1
347 return
348 endif
349
350 else
351 nWorldProcessors = thisComponentPlan%numberProcessors
352 myWorldRank = thisComponentPlan%myNode
353 endif
354
355
356
357
358 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
359
360 do i = 1, nColumnsMax
361 if (mod(nWorldProcessors,i) == 0) nColumns = i
362 end do
363
364 nRows = nWorldProcessors/nColumns
365
366 rowIndex = myWorldRank/nColumns
367
368
369
370 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
371 if ( mpiErrors /= 0 ) then
372 write(default_error,*) "MPI comm split failed at row communicator"
373 status = -1
374 return
375 endif
376
377 columnIndex = mod(myWorldRank,nColumns)
378 call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
379 if ( mpiErrors /= 0 ) then
380 write(default_error,*) "MPI comm split faild at columnCommunicator"
381 status = -1
382 return
383 endif
384
385 ! Set appropriate components of thisComponentPlan
386 thisComponentPlan%rowComm = rowCommunicator
387 thisComponentPlan%columnComm = columnCommunicator
388 thisComponentPlan%rowIndex = rowIndex
389 thisComponentPlan%columnIndex = columnIndex
390 thisComponentPlan%numberRows = nRows
391 thisComponentPlan%numberColumns = nColumns
392
393
394 end subroutine make_Force_Grid
395
396
397 !! initalizes a gather scatter plan
398 subroutine plan_gather_scatter( nDim,nComponents,thisComponentPlan, &
399 thisComm, this_plan,status)
400 integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
401 integer, intent(in) :: nComponents
402 type (mpiComponentPlan), intent(in), target :: thisComponentPlan
403 type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
404 integer, intent(in) :: thisComm !! MPI communicator for this plan
405
406 integer :: arraySize !! size to allocate plan for
407 integer, intent(out), optional :: status
408 integer :: ierror
409 integer :: i,junk
410
411 if (present(status)) status = 0
412
413
414
415 !! Set gsComponetPlan pointer
416 !! to the componet plan we want to use for this gather scatter plan.
417 !! WARNING this could be dangerous since thisComponentPlan was origionally
418 !! allocated in C++ and there is a significant difference between c and
419 !! f95 pointers....
420 this_plan%gsComponentPlan => thisComponentPlan
421
422 ! Set this plan size for displs array.
423 this_plan%gsPlanSize = nDim * nComponents
424
425 ! Duplicate communicator for this plan
426 call mpi_comm_dup(thisComm,this_plan%myPlanComm,mpi_err)
427 if (mpi_err /= 0) then
428 if (present(status)) status = -1
429 return
430 end if
431 call mpi_comm_rank(this_plan%myPlanComm,this_plan%myPlanRank,mpi_err)
432 if (mpi_err /= 0) then
433 if (present(status)) status = -1
434 return
435 end if
436
437 call mpi_comm_size(this_plan%myPlanComm,this_plan%planNprocs,mpi_err)
438
439 if (mpi_err /= 0) then
440 if (present(status)) status = -1
441 return
442 end if
443
444 !! counts and displacements arrays are indexed from 0 to be compatable
445 !! with MPI arrays.
446 allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
447 if (ierror /= 0) then
448 if (present(status)) status = -1
449 return
450 end if
451
452 allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
453 if (ierror /= 0) then
454 if (present(status)) status = -1
455 return
456 end if
457
458 !! gather all the local sizes into a size # processors array.
459 call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
460 1,mpi_integer,thisComm,mpi_err)
461
462 if (mpi_err /= 0) then
463 if (present(status)) status = -1
464 return
465 end if
466
467
468 !! figure out the total number of particles in this plan
469 this_plan%globalPlanSize = sum(this_plan%counts)
470
471
472 !! initialize plan displacements.
473 this_plan%displs(0) = 0
474 do i = 1, this_plan%planNprocs - 1,1
475 this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
476 end do
477
478
479 end subroutine plan_gather_scatter
480
481
482 subroutine unplan_gather_scatter(this_plan)
483 type (gs_plan), intent(inout) :: this_plan
484
485
486 this_plan%gsComponentPlan => null()
487 call mpi_comm_free(this_plan%myPlanComm,mpi_err)
488
489 deallocate(this_plan%counts)
490 deallocate(this_plan%displs)
491
492 end subroutine unplan_gather_scatter
493
494 subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
495
496 type (gs_plan), intent(inout) :: this_plan
497 integer, dimension(:), intent(inout) :: sbuffer
498 integer, dimension(:), intent(inout) :: rbuffer
499 integer :: noffset
500 integer, intent(out), optional :: status
501 integer :: i
502
503
504
505 if (present(status)) status = 0
506 noffset = this_plan%displs(this_plan%myPlanRank)
507
508 ! if (getmyNode() == 1) then
509 ! write(*,*) "Node 0 printing allgatherv vars"
510 ! write(*,*) "Noffset: ", noffset
511 ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
512 ! write(*,*) "PlanComm: ", this_plan%myPlanComm
513 ! end if
514
515 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
516 rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
517 this_plan%myPlanComm, mpi_err)
518
519 if (mpi_err /= 0) then
520 if (present(status)) status = -1
521 endif
522
523 end subroutine gather_integer
524
525 subroutine gather_double( sbuffer, rbuffer, this_plan, status)
526
527 type (gs_plan), intent(in) :: this_plan
528 real( kind = DP ), dimension(:), intent(inout) :: sbuffer
529 real( kind = DP ), dimension(:), intent(inout) :: rbuffer
530 integer :: noffset
531 integer, intent(out), optional :: status
532
533
534 if (present(status)) status = 0
535 noffset = this_plan%displs(this_plan%myPlanRank)
536 #ifdef PROFILE
537 call cpu_time(commTimeInitial)
538 #endif
539 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
540 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
541 this_plan%myPlanComm, mpi_err)
542 #ifdef PROFILE
543 call cpu_time(commTimeFinal)
544 commTime = commTime + commTimeFinal - commTimeInitial
545 #endif
546
547 if (mpi_err /= 0) then
548 if (present(status)) status = -1
549 endif
550
551 end subroutine gather_double
552
553 subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
554
555 type (gs_plan), intent(in) :: this_plan
556 real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
557 real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
558 integer :: noffset,i,ierror
559 integer, intent(out), optional :: status
560
561 external mpi_allgatherv
562
563 if (present(status)) status = 0
564
565 ! noffset = this_plan%displs(this_plan%me)
566 #ifdef PROFILE
567 call cpu_time(commTimeInitial)
568 #endif
569
570 call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
571 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
572 this_plan%myPlanComm, mpi_err)
573
574 #ifdef PROFILE
575 call cpu_time(commTimeFinal)
576 commTime = commTime + commTimeFinal - commTimeInitial
577 #endif
578
579 if (mpi_err /= 0) then
580 if (present(status)) status = -1
581 endif
582
583 end subroutine gather_double_2d
584
585 subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
586
587 type (gs_plan), intent(in) :: this_plan
588 real( kind = DP ), dimension(:), intent(inout) :: sbuffer
589 real( kind = DP ), dimension(:), intent(inout) :: rbuffer
590 integer, intent(out), optional :: status
591 external mpi_reduce_scatter
592
593 if (present(status)) status = 0
594
595 #ifdef PROFILE
596 call cpu_time(commTimeInitial)
597 #endif
598 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
599 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
600 #ifdef PROFILE
601 call cpu_time(commTimeFinal)
602 commTime = commTime + commTimeFinal - commTimeInitial
603 #endif
604
605 if (mpi_err /= 0) then
606 if (present(status)) status = -1
607 endif
608
609 end subroutine scatter_double
610
611 subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
612
613 type (gs_plan), intent(in) :: this_plan
614 real( kind = DP ), dimension(:,:), intent(inout) :: sbuffer
615 real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
616 integer, intent(out), optional :: status
617 external mpi_reduce_scatter
618
619 if (present(status)) status = 0
620 #ifdef PROFILE
621 call cpu_time(commTimeInitial)
622 #endif
623
624 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
625 mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
626 #ifdef PROFILE
627 call cpu_time(commTimeFinal)
628 commTime = commTime + commTimeFinal - commTimeInitial
629 #endif
630
631 if (mpi_err /= 0) then
632 if (present(status)) status = -1
633 endif
634
635 end subroutine scatter_double_2d
636
637
638 subroutine setTags(tags,status)
639 integer, dimension(:) :: tags
640 integer :: status
641
642 integer :: alloc_stat
643
644 integer :: ncol
645 integer :: nrow
646
647 status = 0
648 ! allocate row arrays
649 nrow = getNrow(plan_row)
650 ncol = getNcol(plan_col)
651
652 if(.not. allocated(tagLocal)) then
653 allocate(tagLocal(size(tags)),STAT=alloc_stat)
654 if (alloc_stat /= 0 ) then
655 status = -1
656 return
657 endif
658 else
659 deallocate(tagLocal)
660 allocate(tagLocal(size(tags)),STAT=alloc_stat)
661 if (alloc_stat /= 0 ) then
662 status = -1
663 return
664 endif
665
666 endif
667
668 tagLocal = tags
669
670
671 if (.not. allocated(tagRow)) then
672 allocate(tagRow(nrow),STAT=alloc_stat)
673 if (alloc_stat /= 0 ) then
674 status = -1
675 return
676 endif
677 else
678 deallocate(tagRow)
679 allocate(tagRow(nrow),STAT=alloc_stat)
680 if (alloc_stat /= 0 ) then
681 status = -1
682 return
683 endif
684
685 endif
686 ! allocate column arrays
687 if (.not. allocated(tagColumn)) then
688 allocate(tagColumn(ncol),STAT=alloc_stat)
689 if (alloc_stat /= 0 ) then
690 status = -1
691 return
692 endif
693 else
694 deallocate(tagColumn)
695 allocate(tagColumn(ncol),STAT=alloc_stat)
696 if (alloc_stat /= 0 ) then
697 status = -1
698 return
699 endif
700 endif
701
702 call gather(tags,tagRow,plan_row)
703 call gather(tags,tagColumn,plan_col)
704
705
706 end subroutine setTags
707
708 ! pure function getNcol(thisplan) result(ncol)
709 function getNcol(thisplan) result(ncol)
710 type (gs_plan), intent(in) :: thisplan
711 integer :: ncol
712 ncol = thisplan%gsComponentPlan%nComponentsColumn
713 end function getNcol
714
715 ! pure function getNrow(thisplan) result(nrow)
716 function getNrow(thisplan) result(nrow)
717 type (gs_plan), intent(in) :: thisplan
718 integer :: nrow
719 nrow = thisplan%gsComponentPlan%nComponentsRow
720 end function getNrow
721
722 function getNcolGroup(thisplan) result(ncol_group)
723 type (gs_plan), intent(in) :: thisplan
724 integer :: ncol_group
725 ncol_group = thisplan%gsComponentPlan%nGroupColumn
726 end function getNcolGroup
727
728 function getNrowGroup(thisplan) result(nrow_group)
729 type (gs_plan), intent(in) :: thisplan
730 integer :: nrow_group
731 nrow_group = thisplan%gsComponentPlan%nGroupRow
732 end function getNrowGroup
733
734 function isMPISimSet() result(isthisSimSet)
735 logical :: isthisSimSet
736 if (isSimSet) then
737 isthisSimSet = .true.
738 else
739 isthisSimSet = .false.
740 endif
741 end function isMPISimSet
742
743
744
745 subroutine printComponentPlan(this_plan,printNode)
746
747 type (mpiComponentPlan), intent(in) :: this_plan
748 integer, optional :: printNode
749 logical :: print_me = .false.
750
751 if (present(printNode)) then
752 if (printNode == mpiSim%myNode) print_me = .true.
753 else
754 print_me = .true.
755 endif
756
757 if (print_me) then
758 write(default_error,*) "SetupSimParallel: writing component plan"
759
760 write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
761 write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
762 write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
763 write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
764 write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
765 write(default_error,*) "myNlocal: ", mpiSim%myNlocal
766 write(default_error,*) "myNode: ", mpiSim%myNode
767 write(default_error,*) "numberProcessors: ", mpiSim%numberProcessors
768 write(default_error,*) "rowComm: ", mpiSim%rowComm
769 write(default_error,*) "columnComm: ", mpiSim%columnComm
770 write(default_error,*) "numberRows: ", mpiSim%numberRows
771 write(default_error,*) "numberColumns: ", mpiSim%numberColumns
772 write(default_error,*) "nComponentsRow: ", mpiSim%nComponentsRow
773 write(default_error,*) "nComponentsColumn: ", mpiSim%nComponentsColumn
774 write(default_error,*) "rowIndex: ", mpiSim%rowIndex
775 write(default_error,*) "columnIndex: ", mpiSim%columnIndex
776 endif
777 end subroutine printComponentPlan
778
779 function getMyNode() result(myNode)
780 integer :: myNode
781 myNode = mpiSim%myNode
782 end function getMyNode
783
784 #ifdef PROFILE
785 subroutine printCommTime()
786 write(*,*) "MPI communication time is: ", commTime
787 end subroutine printCommTime
788
789 function getCommTime() result(comm_time)
790 real :: comm_time
791 comm_time = commTime
792 end function getCommTime
793
794 #endif
795
796 #endif // is_mpi
797 end module mpiSimulation
798
799