47 |
|
!! |
48 |
|
!! @author Charles F. Vardeman II |
49 |
|
!! @author Matthew Meineke |
50 |
< |
!! @version $Id: simParallel.F90,v 1.3 2005-01-14 20:31:16 gezelter Exp $, $Date: 2005-01-14 20:31:16 $, $Name: not supported by cvs2svn $, $Revision: 1.3 $ |
50 |
> |
!! @version $Id: simParallel.F90,v 1.4 2005-04-15 22:03:48 gezelter Exp $, $Date: 2005-04-15 22:03:48 $, $Name: not supported by cvs2svn $, $Revision: 1.4 $ |
51 |
|
|
52 |
|
module mpiSimulation |
53 |
|
use definitions |
57 |
|
PRIVATE |
58 |
|
|
59 |
|
|
60 |
< |
!! PUBLIC Subroutines contained in this module |
61 |
< |
!! gather and scatter are a generic interface |
62 |
< |
!! to gather and scatter routines |
60 |
> |
!! PUBLIC Subroutines contained in this module |
61 |
> |
!! gather and scatter are a generic interface |
62 |
> |
!! to gather and scatter routines |
63 |
|
public :: gather, scatter |
64 |
|
public :: setupSimParallel |
65 |
|
public :: replanSimParallel |
71 |
|
public :: printComponentPlan |
72 |
|
public :: getMyNode |
73 |
|
|
74 |
< |
!! PUBLIC Subroutines contained in MPI module |
74 |
> |
!! PUBLIC Subroutines contained in MPI module |
75 |
|
public :: mpi_bcast |
76 |
|
public :: mpi_allreduce |
77 |
< |
! public :: mpi_reduce |
77 |
> |
! public :: mpi_reduce |
78 |
|
public :: mpi_send |
79 |
|
public :: mpi_recv |
80 |
|
public :: mpi_get_processor_name |
81 |
|
public :: mpi_finalize |
82 |
|
|
83 |
< |
!! PUBLIC mpi variables |
83 |
> |
!! PUBLIC mpi variables |
84 |
|
public :: mpi_comm_world |
85 |
|
public :: mpi_character |
86 |
|
public :: mpi_integer |
92 |
|
|
93 |
|
|
94 |
|
|
95 |
< |
!! Safety logical to prevent access to ComponetPlan until |
96 |
< |
!! set by C++. |
95 |
> |
!! Safety logical to prevent access to ComponetPlan until |
96 |
> |
!! set by C++. |
97 |
|
logical, save :: ComponentPlanSet = .false. |
98 |
|
|
99 |
|
|
100 |
< |
!! generic mpi error declaration. |
100 |
> |
!! generic mpi error declaration. |
101 |
|
integer, public :: mpi_err |
102 |
|
|
103 |
|
#ifdef PROFILE |
107 |
|
real :: commTimeInitial,commTimeFinal |
108 |
|
#endif |
109 |
|
|
110 |
< |
!! Include mpiComponentPlan type. mpiComponentPlan is a |
111 |
< |
!! dual header file for both c and fortran. |
110 |
> |
!! Include mpiComponentPlan type. mpiComponentPlan is a |
111 |
> |
!! dual header file for both c and fortran. |
112 |
|
#define __FORTRAN90 |
113 |
|
#include "UseTheForce/mpiComponentPlan.h" |
114 |
|
|
115 |
|
|
116 |
< |
!! Tags used during force loop for parallel simulation |
116 |
> |
!! Tags used during force loop for parallel simulation |
117 |
|
integer, public, allocatable, dimension(:) :: AtomLocalToGlobal |
118 |
|
integer, public, allocatable, dimension(:) :: AtomRowToGlobal |
119 |
|
integer, public, allocatable, dimension(:) :: AtomColToGlobal |
121 |
|
integer, public, allocatable, dimension(:) :: GroupRowToGlobal |
122 |
|
integer, public, allocatable, dimension(:) :: GroupColToGlobal |
123 |
|
|
124 |
< |
!! Logical set true if mpiSimulation has been initialized |
124 |
> |
!! Logical set true if mpiSimulation has been initialized |
125 |
|
logical, save :: isSimSet = .false. |
126 |
|
|
127 |
|
|
128 |
|
type (mpiComponentPlan), save :: mpiSim |
129 |
|
|
130 |
< |
!! gs_plan contains plans for gather and scatter routines |
130 |
> |
!! gs_plan contains plans for gather and scatter routines |
131 |
|
type, public :: gs_plan |
132 |
|
private |
133 |
|
type (mpiComponentPlan), pointer :: gsComponentPlan => NULL() |
140 |
|
integer :: planNprocs !! Number of processors in this plan |
141 |
|
end type gs_plan |
142 |
|
|
143 |
< |
! plans for different decompositions |
143 |
> |
! plans for different decompositions |
144 |
|
type (gs_plan), public, save :: plan_atom_row |
145 |
|
type (gs_plan), public, save :: plan_atom_row_3d |
146 |
|
type (gs_plan), public, save :: plan_atom_col |
154 |
|
|
155 |
|
type (mpiComponentPlan), pointer :: simComponentPlan |
156 |
|
|
157 |
< |
! interface for gather scatter routines |
158 |
< |
!! Generic interface for gather. |
159 |
< |
!! Gathers an local array into row or column array |
160 |
< |
!! Interface provided for integer, double and double |
161 |
< |
!! rank 2 arrays. |
157 |
> |
! interface for gather scatter routines |
158 |
> |
!! Generic interface for gather. |
159 |
> |
!! Gathers an local array into row or column array |
160 |
> |
!! Interface provided for integer, double and double |
161 |
> |
!! rank 2 arrays. |
162 |
|
interface gather |
163 |
|
module procedure gather_integer |
164 |
|
module procedure gather_double |
165 |
|
module procedure gather_double_2d |
166 |
|
end interface |
167 |
|
|
168 |
< |
!! Generic interface for scatter. |
169 |
< |
!! Scatters a row or column array, adding componets |
170 |
< |
!! and reducing them to a local nComponent array. |
171 |
< |
!! Interface provided for double and double rank=2 arrays. |
168 |
> |
!! Generic interface for scatter. |
169 |
> |
!! Scatters a row or column array, adding componets |
170 |
> |
!! and reducing them to a local nComponent array. |
171 |
> |
!! Interface provided for double and double rank=2 arrays. |
172 |
|
|
173 |
|
interface scatter |
174 |
|
module procedure scatter_double |
196 |
|
|
197 |
|
!write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, & |
198 |
|
! ' has atomTags(1) = ', atomTags(1) |
199 |
< |
|
199 |
> |
|
200 |
|
status = 0 |
201 |
|
if (componentPlanSet) then |
202 |
|
return |
203 |
|
endif |
204 |
|
componentPlanSet = .true. |
205 |
< |
|
205 |
> |
|
206 |
|
!! copy c component plan to fortran |
207 |
|
mpiSim = thisComponentPlan |
208 |
|
!write(*,*) "Seting up simParallel" |
209 |
< |
|
209 |
> |
|
210 |
|
call make_Force_Grid(mpiSim, localStatus) |
211 |
|
if (localStatus /= 0) then |
212 |
|
write(default_error,*) "Error creating force grid" |
213 |
|
status = -1 |
214 |
|
return |
215 |
|
endif |
216 |
< |
|
216 |
> |
|
217 |
|
call updateGridComponents(mpiSim, localStatus) |
218 |
|
if (localStatus /= 0) then |
219 |
|
write(default_error,*) "Error updating grid components" |
232 |
|
mpiSim, mpiSim%rowComm, plan_group_row) |
233 |
|
call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, & |
234 |
|
mpiSim, mpiSim%rowComm, plan_group_row_3d) |
235 |
< |
|
235 |
> |
|
236 |
|
call plan_gather_scatter(1, mpiSim%nAtomsLocal, & |
237 |
|
mpiSim, mpiSim%columnComm, plan_atom_col) |
238 |
|
call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, & |
243 |
|
mpiSim, mpiSim%columnComm, plan_group_col) |
244 |
|
call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, & |
245 |
|
mpiSim, mpiSim%columnComm, plan_group_col_3d) |
246 |
– |
|
247 |
– |
! Initialize tags |
246 |
|
|
247 |
+ |
! Initialize tags |
248 |
+ |
|
249 |
|
call setAtomTags(atomTags,localStatus) |
250 |
|
if (localStatus /= 0) then |
251 |
|
status = -1 |
261 |
|
|
262 |
|
isSimSet = .true. |
263 |
|
|
264 |
< |
! call printComponentPlan(mpiSim,0) |
264 |
> |
! call printComponentPlan(mpiSim,0) |
265 |
|
end subroutine setupSimParallel |
266 |
|
|
267 |
|
subroutine replanSimParallel(thisComponentPlan,status) |
268 |
< |
! Passed Arguments |
268 |
> |
! Passed Arguments |
269 |
|
!! mpiComponentPlan struct from C |
270 |
|
type (mpiComponentPlan), intent(inout) :: thisComponentPlan |
271 |
|
integer, intent(out) :: status |
278 |
|
status = -1 |
279 |
|
return |
280 |
|
endif |
281 |
< |
|
281 |
> |
|
282 |
|
!! Unplan Gather Scatter plans |
283 |
|
call unplan_gather_scatter(plan_atom_row) |
284 |
|
call unplan_gather_scatter(plan_atom_row_3d) |
303 |
|
mpiSim, mpiSim%rowComm, plan_group_row) |
304 |
|
call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, & |
305 |
|
mpiSim, mpiSim%rowComm, plan_group_row_3d) |
306 |
< |
|
306 |
> |
|
307 |
|
call plan_gather_scatter(1, mpiSim%nAtomsLocal, & |
308 |
|
mpiSim, mpiSim%columnComm, plan_atom_col) |
309 |
|
call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, & |
314 |
|
mpiSim, mpiSim%columnComm, plan_group_col) |
315 |
|
call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, & |
316 |
|
mpiSim, mpiSim%columnComm, plan_group_col_3d) |
317 |
< |
|
317 |
> |
|
318 |
|
end subroutine replanSimParallel |
319 |
|
|
320 |
|
!! Updates number of row and column components for long range forces. |
321 |
|
subroutine updateGridComponents(thisComponentPlan, status) |
322 |
|
type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan |
323 |
< |
|
323 |
> |
|
324 |
|
!! Status return |
325 |
|
!! - 0 Success |
326 |
|
!! - -1 Failure |
339 |
|
if (thisComponentPlan%nAtomsLocal == 0) then |
340 |
|
status = -1 |
341 |
|
return |
342 |
< |
endif |
342 |
> |
endif |
343 |
|
if (thisComponentPlan%nGroupsLocal == 0) then |
344 |
|
write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal |
345 |
|
status = -1 |
346 |
|
return |
347 |
|
endif |
348 |
< |
|
348 |
> |
|
349 |
|
nAtomsLocal = thisComponentPlan%nAtomsLocal |
350 |
|
nGroupsLocal = thisComponentPlan%nGroupsLocal |
351 |
|
|
362 |
|
status = -1 |
363 |
|
return |
364 |
|
endif |
365 |
< |
|
365 |
> |
|
366 |
|
call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, & |
367 |
|
mpi_sum, thisComponentPlan%rowComm, mpiErrors) |
368 |
|
if (mpiErrors /= 0) then |
401 |
|
integer :: columnCommunicator |
402 |
|
integer :: myWorldRank |
403 |
|
integer :: i |
404 |
– |
|
404 |
|
|
405 |
+ |
|
406 |
|
if (.not. ComponentPlanSet) return |
407 |
|
status = 0 |
408 |
< |
|
408 |
> |
|
409 |
|
!! We make a dangerous assumption here that if numberProcessors is |
410 |
|
!! zero, then we need to get the information from MPI. |
411 |
|
if (thisComponentPlan%nProcessors == 0 ) then |
419 |
|
status = -1 |
420 |
|
return |
421 |
|
endif |
422 |
< |
|
422 |
> |
|
423 |
|
else |
424 |
|
nWorldProcessors = thisComponentPlan%nProcessors |
425 |
|
myWorldRank = thisComponentPlan%myNode |
426 |
|
endif |
427 |
< |
|
427 |
> |
|
428 |
|
nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp))) |
429 |
< |
|
429 |
> |
|
430 |
|
do i = 1, nColumnsMax |
431 |
|
if (mod(nWorldProcessors,i) == 0) nColumns = i |
432 |
|
end do |
433 |
< |
|
433 |
> |
|
434 |
|
nRows = nWorldProcessors/nColumns |
435 |
< |
|
435 |
> |
|
436 |
|
rowIndex = myWorldRank/nColumns |
437 |
< |
|
437 |
> |
|
438 |
|
call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors) |
439 |
|
if ( mpiErrors /= 0 ) then |
440 |
|
write(default_error,*) "MPI comm split failed at row communicator" |
441 |
|
status = -1 |
442 |
|
return |
443 |
|
endif |
444 |
< |
|
444 |
> |
|
445 |
|
columnIndex = mod(myWorldRank,nColumns) |
446 |
|
call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors) |
447 |
|
if ( mpiErrors /= 0 ) then |
449 |
|
status = -1 |
450 |
|
return |
451 |
|
endif |
452 |
< |
|
452 |
> |
|
453 |
|
! Set appropriate components of thisComponentPlan |
454 |
|
thisComponentPlan%rowComm = rowCommunicator |
455 |
|
thisComponentPlan%columnComm = columnCommunicator |
459 |
|
thisComponentPlan%nColumns = nColumns |
460 |
|
|
461 |
|
end subroutine make_Force_Grid |
462 |
< |
|
462 |
> |
|
463 |
|
!! initalizes a gather scatter plan |
464 |
|
subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, & |
465 |
|
thisComm, this_plan, status) |
476 |
|
|
477 |
|
if (present(status)) status = 0 |
478 |
|
|
479 |
< |
!! Set gsComponentPlan pointer |
480 |
< |
!! to the componet plan we want to use for this gather scatter plan. |
481 |
< |
!! WARNING this could be dangerous since thisComponentPlan was origionally |
482 |
< |
!! allocated in C++ and there is a significant difference between c and |
483 |
< |
!! f95 pointers.... |
479 |
> |
!! Set gsComponentPlan pointer |
480 |
> |
!! to the componet plan we want to use for this gather scatter plan. |
481 |
> |
!! WARNING this could be dangerous since thisComponentPlan was origionally |
482 |
> |
!! allocated in C++ and there is a significant difference between c and |
483 |
> |
!! f95 pointers.... |
484 |
|
this_plan%gsComponentPlan => thisComponentPlan |
485 |
|
|
486 |
< |
! Set this plan size for displs array. |
486 |
> |
! Set this plan size for displs array. |
487 |
|
this_plan%gsPlanSize = nDim * nObjects |
488 |
|
|
489 |
< |
! Duplicate communicator for this plan |
489 |
> |
! Duplicate communicator for this plan |
490 |
|
call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err) |
491 |
|
if (mpi_err /= 0) then |
492 |
|
if (present(status)) status = -1 |
519 |
|
return |
520 |
|
end if |
521 |
|
|
522 |
< |
!! gather all the local sizes into a size # processors array. |
522 |
> |
!! gather all the local sizes into a size # processors array. |
523 |
|
call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, & |
524 |
|
1,mpi_integer,thisComm,mpi_err) |
525 |
|
|
527 |
|
if (present(status)) status = -1 |
528 |
|
return |
529 |
|
end if |
530 |
< |
|
530 |
> |
|
531 |
|
!! figure out the total number of particles in this plan |
532 |
|
this_plan%globalPlanSize = sum(this_plan%counts) |
533 |
< |
|
533 |
> |
|
534 |
|
!! initialize plan displacements. |
535 |
|
this_plan%displs(0) = 0 |
536 |
|
do i = 1, this_plan%planNprocs - 1,1 |
540 |
|
|
541 |
|
subroutine unplan_gather_scatter(this_plan) |
542 |
|
type (gs_plan), intent(inout) :: this_plan |
543 |
< |
|
543 |
> |
|
544 |
|
this_plan%gsComponentPlan => null() |
545 |
|
call mpi_comm_free(this_plan%myPlanComm,mpi_err) |
546 |
|
|
561 |
|
if (present(status)) status = 0 |
562 |
|
noffset = this_plan%displs(this_plan%myPlanRank) |
563 |
|
|
564 |
< |
! if (getmyNode() == 1) then |
565 |
< |
! write(*,*) "Node 0 printing allgatherv vars" |
566 |
< |
! write(*,*) "Noffset: ", noffset |
567 |
< |
! write(*,*) "PlanSize: ", this_plan%gsPlanSize |
568 |
< |
! write(*,*) "PlanComm: ", this_plan%myPlanComm |
569 |
< |
! end if |
564 |
> |
! if (getmyNode() == 1) then |
565 |
> |
! write(*,*) "Node 0 printing allgatherv vars" |
566 |
> |
! write(*,*) "Noffset: ", noffset |
567 |
> |
! write(*,*) "PlanSize: ", this_plan%gsPlanSize |
568 |
> |
! write(*,*) "PlanComm: ", this_plan%myPlanComm |
569 |
> |
! end if |
570 |
|
|
571 |
|
call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, & |
572 |
|
rbuffer,this_plan%counts,this_plan%displs,mpi_integer, & |
573 |
|
this_plan%myPlanComm, mpi_err) |
574 |
|
|
575 |
|
if (mpi_err /= 0) then |
576 |
< |
if (present(status)) status = -1 |
576 |
> |
if (present(status)) status = -1 |
577 |
|
endif |
578 |
|
|
579 |
|
end subroutine gather_integer |
601 |
|
#endif |
602 |
|
|
603 |
|
if (mpi_err /= 0) then |
604 |
< |
if (present(status)) status = -1 |
604 |
> |
if (present(status)) status = -1 |
605 |
|
endif |
606 |
|
|
607 |
|
end subroutine gather_double |
613 |
|
real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer |
614 |
|
integer :: noffset,i,ierror |
615 |
|
integer, intent(out), optional :: status |
616 |
< |
|
616 |
> |
|
617 |
|
external mpi_allgatherv |
618 |
– |
|
619 |
– |
if (present(status)) status = 0 |
618 |
|
|
619 |
< |
! noffset = this_plan%displs(this_plan%me) |
619 |
> |
if (present(status)) status = 0 |
620 |
> |
|
621 |
> |
! noffset = this_plan%displs(this_plan%me) |
622 |
|
#ifdef PROFILE |
623 |
< |
call cpu_time(commTimeInitial) |
623 |
> |
call cpu_time(commTimeInitial) |
624 |
|
#endif |
625 |
|
|
626 |
|
call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, & |
627 |
< |
rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, & |
628 |
< |
this_plan%myPlanComm, mpi_err) |
627 |
> |
rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, & |
628 |
> |
this_plan%myPlanComm, mpi_err) |
629 |
|
|
630 |
|
#ifdef PROFILE |
631 |
|
call cpu_time(commTimeFinal) |
633 |
|
#endif |
634 |
|
|
635 |
|
if (mpi_err /= 0) then |
636 |
< |
if (present(status)) status = -1 |
636 |
> |
if (present(status)) status = -1 |
637 |
|
endif |
638 |
|
|
639 |
< |
end subroutine gather_double_2d |
639 |
> |
end subroutine gather_double_2d |
640 |
|
|
641 |
|
subroutine scatter_double( sbuffer, rbuffer, this_plan, status) |
642 |
|
|
646 |
|
integer, intent(out), optional :: status |
647 |
|
external mpi_reduce_scatter |
648 |
|
|
649 |
< |
if (present(status)) status = 0 |
649 |
> |
if (present(status)) status = 0 |
650 |
|
|
651 |
|
#ifdef PROFILE |
652 |
< |
call cpu_time(commTimeInitial) |
652 |
> |
call cpu_time(commTimeInitial) |
653 |
|
#endif |
654 |
|
call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, & |
655 |
|
mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err) |
659 |
|
#endif |
660 |
|
|
661 |
|
if (mpi_err /= 0) then |
662 |
< |
if (present(status)) status = -1 |
662 |
> |
if (present(status)) status = -1 |
663 |
|
endif |
664 |
|
|
665 |
|
end subroutine scatter_double |
671 |
|
real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer |
672 |
|
integer, intent(out), optional :: status |
673 |
|
external mpi_reduce_scatter |
674 |
< |
|
675 |
< |
if (present(status)) status = 0 |
674 |
> |
|
675 |
> |
if (present(status)) status = 0 |
676 |
|
#ifdef PROFILE |
677 |
< |
call cpu_time(commTimeInitial) |
677 |
> |
call cpu_time(commTimeInitial) |
678 |
|
#endif |
679 |
|
|
680 |
|
call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, & |
685 |
|
#endif |
686 |
|
|
687 |
|
if (mpi_err /= 0) then |
688 |
< |
if (present(status)) status = -1 |
688 |
> |
if (present(status)) status = -1 |
689 |
|
endif |
690 |
|
|
691 |
|
end subroutine scatter_double_2d |
692 |
< |
|
692 |
> |
|
693 |
|
subroutine setAtomTags(tags, status) |
694 |
|
integer, dimension(:) :: tags |
695 |
|
integer :: status |
696 |
|
|
697 |
|
integer :: alloc_stat |
698 |
< |
|
698 |
> |
|
699 |
|
integer :: nAtomsInCol |
700 |
|
integer :: nAtomsInRow |
701 |
|
|
702 |
|
status = 0 |
703 |
< |
! allocate row arrays |
703 |
> |
! allocate row arrays |
704 |
|
nAtomsInRow = getNatomsInRow(plan_atom_row) |
705 |
|
nAtomsInCol = getNatomsInCol(plan_atom_col) |
706 |
< |
|
706 |
> |
|
707 |
|
if(.not. allocated(AtomLocalToGlobal)) then |
708 |
|
allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat) |
709 |
< |
if (alloc_stat /= 0 ) then |
709 |
> |
if (alloc_stat /= 0 ) then |
710 |
|
status = -1 |
711 |
|
return |
712 |
|
endif |
737 |
|
endif |
738 |
|
|
739 |
|
endif |
740 |
< |
! allocate column arrays |
740 |
> |
! allocate column arrays |
741 |
|
if (.not. allocated(AtomColToGlobal)) then |
742 |
|
allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat) |
743 |
|
if (alloc_stat /= 0 ) then |
752 |
|
return |
753 |
|
endif |
754 |
|
endif |
755 |
< |
|
755 |
> |
|
756 |
|
call gather(tags, AtomRowToGlobal, plan_atom_row) |
757 |
|
call gather(tags, AtomColToGlobal, plan_atom_col) |
758 |
< |
|
758 |
> |
|
759 |
|
end subroutine setAtomTags |
760 |
|
|
761 |
|
subroutine setGroupTags(tags, status) |
763 |
|
integer :: status |
764 |
|
|
765 |
|
integer :: alloc_stat |
766 |
< |
|
766 |
> |
|
767 |
|
integer :: nGroupsInCol |
768 |
|
integer :: nGroupsInRow |
769 |
|
|
771 |
|
|
772 |
|
nGroupsInRow = getNgroupsInRow(plan_group_row) |
773 |
|
nGroupsInCol = getNgroupsInCol(plan_group_col) |
774 |
< |
|
774 |
> |
|
775 |
|
if(allocated(GroupLocalToGlobal)) then |
776 |
|
deallocate(GroupLocalToGlobal) |
777 |
|
endif |
780 |
|
status = -1 |
781 |
|
return |
782 |
|
endif |
783 |
< |
|
783 |
> |
|
784 |
|
GroupLocalToGlobal = tags |
785 |
|
|
786 |
|
if(allocated(GroupRowToGlobal)) then |
800 |
|
status = -1 |
801 |
|
return |
802 |
|
endif |
803 |
< |
|
803 |
> |
|
804 |
|
call gather(tags, GroupRowToGlobal, plan_group_row) |
805 |
|
call gather(tags, GroupColToGlobal, plan_group_col) |
806 |
< |
|
806 |
> |
|
807 |
|
end subroutine setGroupTags |
808 |
< |
|
808 |
> |
|
809 |
|
function getNatomsInCol(thisplan) result(nInCol) |
810 |
|
type (gs_plan), intent(in) :: thisplan |
811 |
|
integer :: nInCol |
817 |
|
integer :: nInRow |
818 |
|
nInRow = thisplan%gsComponentPlan%nAtomsInRow |
819 |
|
end function getNatomsInRow |
820 |
< |
|
820 |
> |
|
821 |
|
function getNgroupsInCol(thisplan) result(nInCol) |
822 |
|
type (gs_plan), intent(in) :: thisplan |
823 |
|
integer :: nInCol |
829 |
|
integer :: nInRow |
830 |
|
nInRow = thisplan%gsComponentPlan%nGroupsInRow |
831 |
|
end function getNgroupsInRow |
832 |
< |
|
832 |
> |
|
833 |
|
function isMPISimSet() result(isthisSimSet) |
834 |
|
logical :: isthisSimSet |
835 |
|
if (isSimSet) then |
838 |
|
isthisSimSet = .false. |
839 |
|
endif |
840 |
|
end function isMPISimSet |
841 |
< |
|
841 |
> |
|
842 |
|
subroutine printComponentPlan(this_plan,printNode) |
843 |
|
|
844 |
|
type (mpiComponentPlan), intent(in) :: this_plan |
853 |
|
|
854 |
|
if (print_me) then |
855 |
|
write(default_error,*) "SetupSimParallel: writing component plan" |
856 |
< |
|
856 |
> |
|
857 |
|
write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal |
858 |
|
write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal |
859 |
|
write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal |