ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE-4/src/UseTheForce/DarkSide/simParallel.F90
(Generate patch)

Comparing trunk/OOPSE-4/src/UseTheForce/DarkSide/simParallel.F90 (file contents):
Revision 1948 by gezelter, Fri Jan 14 20:31:16 2005 UTC vs.
Revision 2287 by chuckv, Wed Sep 7 22:23:20 2005 UTC

# Line 47 | Line 47
47   !!
48   !! @author Charles F. Vardeman II
49   !! @author Matthew Meineke
50 < !! @version $Id: simParallel.F90,v 1.3 2005-01-14 20:31:16 gezelter Exp $, $Date: 2005-01-14 20:31:16 $, $Name: not supported by cvs2svn $, $Revision: 1.3 $
50 > !! @version $Id: simParallel.F90,v 1.5 2005-09-07 22:23:20 chuckv Exp $, $Date: 2005-09-07 22:23:20 $, $Name: not supported by cvs2svn $, $Revision: 1.5 $
51  
52   module mpiSimulation  
53    use definitions
# Line 57 | Line 57 | module mpiSimulation  
57    PRIVATE
58  
59  
60 < !! PUBLIC  Subroutines contained in this module
61 < !! gather and scatter are a generic interface
62 < !! to gather and scatter routines
60 >  !! PUBLIC  Subroutines contained in this module
61 >  !! gather and scatter are a generic interface
62 >  !! to gather and scatter routines
63    public :: gather, scatter
64    public :: setupSimParallel
65    public :: replanSimParallel
# Line 71 | Line 71 | module mpiSimulation  
71    public :: printComponentPlan
72    public :: getMyNode
73  
74 < !! PUBLIC  Subroutines contained in MPI module
74 >  !! PUBLIC  Subroutines contained in MPI module
75    public :: mpi_bcast
76    public :: mpi_allreduce
77 < !  public :: mpi_reduce
77 >  !  public :: mpi_reduce
78    public :: mpi_send
79    public :: mpi_recv
80    public :: mpi_get_processor_name
81    public :: mpi_finalize
82  
83 < !! PUBLIC mpi variables
83 >  !! PUBLIC mpi variables
84    public :: mpi_comm_world
85    public :: mpi_character
86    public :: mpi_integer
87 +  public :: mpi_lor
88 +  public :: mpi_logical
89    public :: mpi_double_precision
90    public :: mpi_sum
91    public :: mpi_max
# Line 92 | Line 94 | module mpiSimulation  
94  
95  
96  
97 < !! Safety logical to prevent access to ComponetPlan until
98 < !! set by C++.
97 >  !! Safety logical to prevent access to ComponetPlan until
98 >  !! set by C++.
99    logical, save :: ComponentPlanSet = .false.
100  
101  
102 < !! generic mpi error declaration.
102 >  !! generic mpi error declaration.
103    integer, public :: mpi_err
104  
105   #ifdef PROFILE
# Line 107 | Line 109 | module mpiSimulation  
109    real   :: commTimeInitial,commTimeFinal
110   #endif
111  
112 < !! Include mpiComponentPlan type. mpiComponentPlan is a
113 < !! dual header file for both c and fortran.
112 >  !! Include mpiComponentPlan type. mpiComponentPlan is a
113 >  !! dual header file for both c and fortran.
114   #define __FORTRAN90
115   #include "UseTheForce/mpiComponentPlan.h"
116  
117  
118 < !! Tags used during force loop for parallel simulation
118 >  !! Tags used during force loop for parallel simulation
119    integer, public, allocatable, dimension(:) :: AtomLocalToGlobal
120    integer, public, allocatable, dimension(:) :: AtomRowToGlobal
121    integer, public, allocatable, dimension(:) :: AtomColToGlobal
# Line 121 | Line 123 | module mpiSimulation  
123    integer, public, allocatable, dimension(:) :: GroupRowToGlobal
124    integer, public, allocatable, dimension(:) :: GroupColToGlobal
125  
126 < !! Logical set true if mpiSimulation has been initialized
126 >  !! Logical set true if mpiSimulation has been initialized
127    logical, save :: isSimSet = .false.
128  
129  
130    type (mpiComponentPlan), save :: mpiSim
131  
132 < !! gs_plan contains plans for gather and scatter routines
132 >  !! gs_plan contains plans for gather and scatter routines
133    type, public :: gs_plan
134       private
135       type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
# Line 140 | Line 142 | module mpiSimulation  
142       integer :: planNprocs  !! Number of processors in this plan
143    end type gs_plan
144  
145 < ! plans for different decompositions
145 >  ! plans for different decompositions
146    type (gs_plan), public, save :: plan_atom_row
147    type (gs_plan), public, save :: plan_atom_row_3d
148    type (gs_plan), public, save :: plan_atom_col
# Line 154 | Line 156 | module mpiSimulation  
156  
157    type (mpiComponentPlan), pointer :: simComponentPlan
158  
159 < ! interface for gather scatter routines
160 < !! Generic interface for gather.
161 < !! Gathers an local array into row or column array
162 < !! Interface provided for integer, double and double
163 < !! rank 2 arrays.
159 >  ! interface for gather scatter routines
160 >  !! Generic interface for gather.
161 >  !! Gathers an local array into row or column array
162 >  !! Interface provided for integer, double and double
163 >  !! rank 2 arrays.
164    interface gather
165       module procedure gather_integer
166       module procedure gather_double
167       module procedure gather_double_2d
168    end interface
169  
170 < !! Generic interface for scatter.
171 < !! Scatters a row or column array, adding componets
172 < !! and reducing them to a local nComponent array.
173 < !! Interface provided for double and double rank=2 arrays.
170 >  !! Generic interface for scatter.
171 >  !! Scatters a row or column array, adding componets
172 >  !! and reducing them to a local nComponent array.
173 >  !! Interface provided for double and double rank=2 arrays.
174  
175    interface scatter
176       module procedure scatter_double
# Line 196 | Line 198 | contains
198  
199      !write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, &
200      !     ' has atomTags(1) = ', atomTags(1)
201 <    
201 >
202      status = 0
203      if (componentPlanSet) then
204         return
205      endif
206      componentPlanSet = .true.
207 <    
207 >
208      !! copy c component plan to fortran  
209      mpiSim = thisComponentPlan
210      !write(*,*) "Seting up simParallel"
211 <    
211 >
212      call make_Force_Grid(mpiSim, localStatus)
213      if (localStatus /= 0) then
214         write(default_error,*) "Error creating force grid"
215         status = -1
216         return
217      endif
218 <    
218 >
219      call updateGridComponents(mpiSim, localStatus)
220      if (localStatus /= 0) then
221         write(default_error,*) "Error updating grid components"
# Line 232 | Line 234 | contains
234           mpiSim, mpiSim%rowComm, plan_group_row)
235      call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
236           mpiSim, mpiSim%rowComm, plan_group_row_3d)
237 <        
237 >
238      call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
239           mpiSim, mpiSim%columnComm, plan_atom_col)
240      call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
# Line 243 | Line 245 | contains
245           mpiSim, mpiSim%columnComm, plan_group_col)
246      call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
247           mpiSim, mpiSim%columnComm, plan_group_col_3d)
246    
247 !  Initialize tags    
248  
249 +    !  Initialize tags    
250 +
251      call setAtomTags(atomTags,localStatus)
252      if (localStatus /= 0) then
253         status = -1
# Line 261 | Line 263 | contains
263  
264      isSimSet = .true.
265  
266 < !    call printComponentPlan(mpiSim,0)
266 >    !    call printComponentPlan(mpiSim,0)
267    end subroutine setupSimParallel
268  
269    subroutine replanSimParallel(thisComponentPlan,status)
270 < !  Passed Arguments
270 >    !  Passed Arguments
271      !! mpiComponentPlan struct from C
272      type (mpiComponentPlan), intent(inout) :: thisComponentPlan  
273      integer, intent(out) :: status
# Line 278 | Line 280 | contains
280         status = -1
281         return
282      endif
283 <    
283 >
284      !! Unplan Gather Scatter plans
285      call unplan_gather_scatter(plan_atom_row)
286      call unplan_gather_scatter(plan_atom_row_3d)
# Line 303 | Line 305 | contains
305           mpiSim, mpiSim%rowComm, plan_group_row)
306      call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
307           mpiSim, mpiSim%rowComm, plan_group_row_3d)
308 <        
308 >
309      call plan_gather_scatter(1, mpiSim%nAtomsLocal, &
310           mpiSim, mpiSim%columnComm, plan_atom_col)
311      call plan_gather_scatter(nDim, mpiSim%nAtomsLocal, &
# Line 314 | Line 316 | contains
316           mpiSim, mpiSim%columnComm, plan_group_col)
317      call plan_gather_scatter(nDim, mpiSim%nGroupsLocal, &
318           mpiSim, mpiSim%columnComm, plan_group_col_3d)
319 <        
319 >
320    end subroutine replanSimParallel
321  
322    !! Updates number of row and column components for long range forces.
323    subroutine updateGridComponents(thisComponentPlan, status)
324      type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
325 <    
325 >
326      !! Status return
327      !! -  0 Success
328      !! - -1 Failure
# Line 339 | Line 341 | contains
341      if (thisComponentPlan%nAtomsLocal == 0) then
342         status = -1
343         return
344 <    endif  
344 >    endif
345      if (thisComponentPlan%nGroupsLocal == 0) then
346         write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal
347         status = -1
348         return
349      endif
350 <    
350 >
351      nAtomsLocal = thisComponentPlan%nAtomsLocal
352      nGroupsLocal = thisComponentPlan%nGroupsLocal
353  
# Line 362 | Line 364 | contains
364         status = -1
365         return
366      endif
367 <        
367 >
368      call mpi_allreduce(nGroupsLocal, nGroupsInRow, 1, mpi_integer, &
369           mpi_sum, thisComponentPlan%rowComm, mpiErrors)
370      if (mpiErrors /= 0) then
# Line 401 | Line 403 | contains
403      integer :: columnCommunicator
404      integer :: myWorldRank
405      integer :: i
404    
406  
407 +
408      if (.not. ComponentPlanSet) return
409      status = 0
410 <  
410 >
411      !! We make a dangerous assumption here that if numberProcessors is
412      !! zero, then we need to get the information from MPI.
413      if (thisComponentPlan%nProcessors == 0 ) then
# Line 419 | Line 421 | contains
421            status = -1
422            return
423         endif
424 <      
424 >
425      else
426         nWorldProcessors = thisComponentPlan%nProcessors
427         myWorldRank = thisComponentPlan%myNode
428      endif
429 <    
429 >
430      nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
431 <    
431 >
432      do i = 1, nColumnsMax
433         if (mod(nWorldProcessors,i) == 0) nColumns = i
434      end do
435 <    
435 >
436      nRows = nWorldProcessors/nColumns
437 <    
437 >
438      rowIndex = myWorldRank/nColumns
439 <    
439 >
440      call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
441      if ( mpiErrors /= 0 ) then
442         write(default_error,*) "MPI comm split failed at row communicator"
443         status = -1
444         return
445      endif
446 <    
446 >
447      columnIndex = mod(myWorldRank,nColumns)
448      call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
449      if ( mpiErrors /= 0 ) then
# Line 449 | Line 451 | contains
451         status = -1
452         return
453      endif
454 <    
454 >
455      ! Set appropriate components of thisComponentPlan
456      thisComponentPlan%rowComm = rowCommunicator
457      thisComponentPlan%columnComm = columnCommunicator
# Line 459 | Line 461 | contains
461      thisComponentPlan%nColumns = nColumns
462  
463    end subroutine make_Force_Grid
464 <  
464 >
465    !! initalizes a gather scatter plan
466    subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, &
467         thisComm, this_plan, status)  
# Line 476 | Line 478 | contains
478  
479      if (present(status)) status = 0
480  
481 < !! Set gsComponentPlan pointer
482 < !! to the componet plan we want to use for this gather scatter plan.
483 < !! WARNING this could be dangerous since thisComponentPlan was origionally
484 < !! allocated in C++ and there is a significant difference between c and
485 < !! f95 pointers....  
481 >    !! Set gsComponentPlan pointer
482 >    !! to the componet plan we want to use for this gather scatter plan.
483 >    !! WARNING this could be dangerous since thisComponentPlan was origionally
484 >    !! allocated in C++ and there is a significant difference between c and
485 >    !! f95 pointers....  
486      this_plan%gsComponentPlan => thisComponentPlan
487  
488 < ! Set this plan size for displs array.
488 >    ! Set this plan size for displs array.
489      this_plan%gsPlanSize = nDim * nObjects
490  
491 < ! Duplicate communicator for this plan
491 >    ! Duplicate communicator for this plan
492      call mpi_comm_dup(thisComm, this_plan%myPlanComm, mpi_err)
493      if (mpi_err /= 0) then
494         if (present(status)) status = -1
# Line 519 | Line 521 | contains
521         return
522      end if
523  
524 <   !! gather all the local sizes into a size # processors array.
524 >    !! gather all the local sizes into a size # processors array.
525      call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
526           1,mpi_integer,thisComm,mpi_err)
527  
# Line 527 | Line 529 | contains
529         if (present(status)) status = -1
530         return
531      end if
532 <  
532 >
533      !! figure out the total number of particles in this plan
534      this_plan%globalPlanSize = sum(this_plan%counts)
535 <  
535 >
536      !! initialize plan displacements.
537      this_plan%displs(0) = 0
538      do i = 1, this_plan%planNprocs - 1,1
# Line 540 | Line 542 | contains
542  
543    subroutine unplan_gather_scatter(this_plan)
544      type (gs_plan), intent(inout) :: this_plan
545 <    
545 >
546      this_plan%gsComponentPlan => null()
547      call mpi_comm_free(this_plan%myPlanComm,mpi_err)
548  
# Line 561 | Line 563 | contains
563      if (present(status)) status = 0
564      noffset = this_plan%displs(this_plan%myPlanRank)
565  
566 < !    if (getmyNode() == 1) then
567 < !       write(*,*) "Node 0 printing allgatherv vars"
568 < !       write(*,*) "Noffset: ", noffset
569 < !       write(*,*) "PlanSize: ", this_plan%gsPlanSize
570 < !       write(*,*) "PlanComm: ", this_plan%myPlanComm
571 < !    end if
566 >    !    if (getmyNode() == 1) then
567 >    !       write(*,*) "Node 0 printing allgatherv vars"
568 >    !       write(*,*) "Noffset: ", noffset
569 >    !       write(*,*) "PlanSize: ", this_plan%gsPlanSize
570 >    !       write(*,*) "PlanComm: ", this_plan%myPlanComm
571 >    !    end if
572  
573      call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
574           rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
575           this_plan%myPlanComm, mpi_err)
576  
577      if (mpi_err /= 0) then
578 <      if (present(status)) status  = -1
578 >       if (present(status)) status  = -1
579      endif
580  
581    end subroutine gather_integer
# Line 601 | Line 603 | contains
603   #endif
604  
605      if (mpi_err /= 0) then
606 <      if (present(status)) status  = -1
606 >       if (present(status)) status  = -1
607      endif
608  
609    end subroutine gather_double
# Line 613 | Line 615 | contains
615      real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
616      integer :: noffset,i,ierror
617      integer, intent(out), optional :: status
618 <    
618 >
619      external  mpi_allgatherv
618    
619   if (present(status)) status = 0
620  
621 < !    noffset = this_plan%displs(this_plan%me)
621 >    if (present(status)) status = 0
622 >
623 >    !    noffset = this_plan%displs(this_plan%me)
624   #ifdef PROFILE
625 <   call cpu_time(commTimeInitial)
625 >    call cpu_time(commTimeInitial)
626   #endif
627  
628      call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
629 <        rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
630 <        this_plan%myPlanComm, mpi_err)
629 >         rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
630 >         this_plan%myPlanComm, mpi_err)
631  
632   #ifdef PROFILE
633      call cpu_time(commTimeFinal)
# Line 633 | Line 635 | contains
635   #endif
636  
637      if (mpi_err /= 0) then
638 <      if (present(status)) status = -1
638 >       if (present(status)) status = -1
639      endif
640  
641 < end subroutine gather_double_2d
641 >  end subroutine gather_double_2d
642  
643    subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
644  
# Line 646 | Line 648 | contains
648      integer, intent(out), optional :: status
649      external mpi_reduce_scatter
650  
651 <   if (present(status)) status = 0
651 >    if (present(status)) status = 0
652  
653   #ifdef PROFILE
654 <   call cpu_time(commTimeInitial)
654 >    call cpu_time(commTimeInitial)
655   #endif
656      call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
657           mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
# Line 659 | Line 661 | contains
661   #endif
662  
663      if (mpi_err /= 0) then
664 <     if (present(status))  status = -1
664 >       if (present(status))  status = -1
665      endif
666  
667    end subroutine scatter_double
# Line 671 | Line 673 | contains
673      real( kind = DP ), dimension(:,:), intent(inout) :: rbuffer
674      integer, intent(out), optional :: status
675      external mpi_reduce_scatter
676 <
677 <   if (present(status)) status = 0
676 >
677 >    if (present(status)) status = 0
678   #ifdef PROFILE
679 <   call cpu_time(commTimeInitial)
679 >    call cpu_time(commTimeInitial)
680   #endif
681  
682      call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
# Line 685 | Line 687 | contains
687   #endif
688  
689      if (mpi_err /= 0) then
690 <      if (present(status)) status = -1
690 >       if (present(status)) status = -1
691      endif
692  
693    end subroutine scatter_double_2d
694 <  
694 >
695    subroutine setAtomTags(tags, status)
696      integer, dimension(:) :: tags
697      integer :: status
698  
699      integer :: alloc_stat
700 <    
700 >
701      integer :: nAtomsInCol
702      integer :: nAtomsInRow
703  
704      status = 0
705 < ! allocate row arrays
705 >    ! allocate row arrays
706      nAtomsInRow = getNatomsInRow(plan_atom_row)
707      nAtomsInCol = getNatomsInCol(plan_atom_col)
708 <    
708 >
709      if(.not. allocated(AtomLocalToGlobal)) then
710         allocate(AtomLocalToGlobal(size(tags)),STAT=alloc_stat)
711 <        if (alloc_stat /= 0 ) then
711 >       if (alloc_stat /= 0 ) then
712            status = -1
713            return
714         endif
# Line 737 | Line 739 | contains
739         endif
740  
741      endif
742 < ! allocate column arrays
742 >    ! allocate column arrays
743      if (.not. allocated(AtomColToGlobal)) then
744         allocate(AtomColToGlobal(nAtomsInCol),STAT=alloc_stat)
745         if (alloc_stat /= 0 ) then
# Line 752 | Line 754 | contains
754            return
755         endif
756      endif
757 <    
757 >
758      call gather(tags, AtomRowToGlobal, plan_atom_row)
759      call gather(tags, AtomColToGlobal, plan_atom_col)
760 <    
760 >
761    end subroutine setAtomTags
762  
763    subroutine setGroupTags(tags, status)
# Line 763 | Line 765 | contains
765      integer :: status
766  
767      integer :: alloc_stat
768 <    
768 >
769      integer :: nGroupsInCol
770      integer :: nGroupsInRow
771  
# Line 771 | Line 773 | contains
773  
774      nGroupsInRow = getNgroupsInRow(plan_group_row)
775      nGroupsInCol = getNgroupsInCol(plan_group_col)
776 <    
776 >
777      if(allocated(GroupLocalToGlobal)) then
778         deallocate(GroupLocalToGlobal)
779      endif
# Line 780 | Line 782 | contains
782         status = -1
783         return
784      endif
785 <    
785 >
786      GroupLocalToGlobal = tags
787  
788      if(allocated(GroupRowToGlobal)) then
# Line 800 | Line 802 | contains
802         status = -1
803         return
804      endif
805 <    
805 >
806      call gather(tags, GroupRowToGlobal, plan_group_row)
807      call gather(tags, GroupColToGlobal, plan_group_col)
808 <    
808 >
809    end subroutine setGroupTags
810 <  
810 >
811    function getNatomsInCol(thisplan) result(nInCol)
812      type (gs_plan), intent(in) :: thisplan
813      integer :: nInCol
# Line 817 | Line 819 | contains
819      integer :: nInRow
820      nInRow = thisplan%gsComponentPlan%nAtomsInRow
821    end function getNatomsInRow
822 <
822 >
823    function getNgroupsInCol(thisplan) result(nInCol)
824      type (gs_plan), intent(in) :: thisplan
825      integer :: nInCol
# Line 829 | Line 831 | contains
831      integer :: nInRow
832      nInRow = thisplan%gsComponentPlan%nGroupsInRow
833    end function getNgroupsInRow
834 <  
834 >
835    function isMPISimSet() result(isthisSimSet)
836      logical :: isthisSimSet
837      if (isSimSet) then
# Line 838 | Line 840 | contains
840         isthisSimSet = .false.
841      endif
842    end function isMPISimSet
843 <  
843 >
844    subroutine printComponentPlan(this_plan,printNode)
845  
846      type (mpiComponentPlan), intent(in) :: this_plan
# Line 853 | Line 855 | contains
855  
856      if (print_me) then
857         write(default_error,*) "SetupSimParallel: writing component plan"
858 <      
858 >
859         write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
860         write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
861         write(default_error,*) "nAtomsLocal: ", mpiSim%nAtomsLocal

Diff Legend

Removed lines
+ Added lines
< Changed lines
> Changed lines