--- trunk/OOPSE/libmdtools/mpiSimulation_module.F90 2004/05/27 00:48:12 1198 +++ trunk/OOPSE/libmdtools/mpiSimulation_module.F90 2004/06/01 18:42:58 1214 @@ -7,7 +7,7 @@ !! !! @author Charles F. Vardeman II !! @author Matthew Meineke -!! @version $Id: mpiSimulation_module.F90,v 1.13 2004-05-27 00:48:12 tim Exp $, $Date: 2004-05-27 00:48:12 $, $Name: not supported by cvs2svn $, $Revision: 1.13 $ +!! @version $Id: mpiSimulation_module.F90,v 1.15 2004-06-01 18:42:58 gezelter Exp $, $Date: 2004-06-01 18:42:58 $, $Name: not supported by cvs2svn $, $Revision: 1.15 $ module mpiSimulation use definitions @@ -139,45 +139,47 @@ contains contains -!! Sets up mpiComponentPlan with structure passed from C++. - subroutine setupSimParallel(thisComponentPlan,nAtomTags,atomTags,status) -! Passed Arguments + !! Sets up mpiComponentPlan with structure passed from C++. + subroutine setupSimParallel(thisComponentPlan, nAtomTags, atomTags, & + nGroupTags, groupTags, status) + !! Passed Arguments !! mpiComponentPlan struct from C type (mpiComponentPlan), intent(inout) :: thisComponentPlan -!! Number of tags passed, nlocal - integer, intent(in) :: nAtomTags -!! Result status, 0 = normal, -1 = error + !! Number of tags passed + integer, intent(in) :: nAtomTags, nGroupTags + !! Result status, 0 = normal, -1 = error integer, intent(out) :: status integer :: localStatus -!! Global reference tag for local particles - integer, dimension(nAtomTags),intent(inout) :: atomTags + !! Global reference tag for local particles + integer, dimension(nAtomTags), intent(inout) :: atomTags + integer, dimension(nGroupTags), intent(inout) :: groupTags - write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, & - ' has atomTags(1) = ', atomTags(1) - + !write(*,*) 'mpiSim_mod thinks node', thisComponentPlan%myNode, & + ! ' has atomTags(1) = ', atomTags(1) + status = 0 if (componentPlanSet) then return endif componentPlanSet = .true. - !! copy c component plan to fortran + !! copy c component plan to fortran mpiSim = thisComponentPlan - write(*,*) "Seting up simParallel" - + !write(*,*) "Seting up simParallel" + call make_Force_Grid(mpiSim, localStatus) if (localStatus /= 0) then write(default_error,*) "Error creating force grid" status = -1 return endif - + call updateGridComponents(mpiSim, localStatus) if (localStatus /= 0) then write(default_error,*) "Error updating grid components" status = -1 return - endif + endif !! initialize gather and scatter plans used in this simulation call plan_gather_scatter(1, mpiSim%nAtomsLocal, & @@ -209,6 +211,14 @@ contains status = -1 return endif + + + call setGroupTags(groupTags,localStatus) + if (localStatus /= 0) then + status = -1 + return + endif + isSimSet = .true. ! call printComponentPlan(mpiSim,0) @@ -267,13 +277,13 @@ contains end subroutine replanSimParallel -!! Updates number of row and column components for long range forces. - subroutine updateGridComponents(thisComponentPlan,status) + !! Updates number of row and column components for long range forces. + subroutine updateGridComponents(thisComponentPlan, status) type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan - -!! Status return -!! - 0 Success -!! - -1 Failure + + !! Status return + !! - 0 Success + !! - -1 Failure integer, intent(out) :: status integer :: nAtomsLocal integer :: nAtomsInRow = 0 @@ -291,6 +301,7 @@ contains return endif if (thisComponentPlan%nGroupsLocal == 0) then + write(*,*) 'tcp%ngl = ', thisComponentPlan%nGroupsLocal status = -1 return endif @@ -334,13 +345,13 @@ contains end subroutine updateGridComponents -!! Creates a square force decomposition of processors into row and column -!! communicators. + !! Creates a square force decomposition of processors into row and column + !! communicators. subroutine make_Force_Grid(thisComponentPlan,status) type (mpiComponentPlan) :: thisComponentPlan integer, intent(out) :: status !! status returns -1 if error - integer :: nColumnsMax !! Maximum number of columns - integer :: nWorldProcessors !! Total number of processors in World comm. + integer :: nColumnsMax !! Maximum number of columns + integer :: nWorldProcessors !! Total number of processors in World comm. integer :: rowIndex !! Row for this processor. integer :: columnIndex !! Column for this processor. integer :: nRows !! Total number of rows. @@ -355,8 +366,8 @@ contains if (.not. ComponentPlanSet) return status = 0 -!! We make a dangerous assumption here that if numberProcessors is -!! zero, then we need to get the information from MPI. + !! We make a dangerous assumption here that if numberProcessors is + !! zero, then we need to get the information from MPI. if (thisComponentPlan%nProcessors == 0 ) then call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors) if ( mpiErrors /= 0 ) then @@ -368,29 +379,29 @@ contains status = -1 return endif - + else nWorldProcessors = thisComponentPlan%nProcessors myWorldRank = thisComponentPlan%myNode endif - + nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp))) - + do i = 1, nColumnsMax if (mod(nWorldProcessors,i) == 0) nColumns = i end do - + nRows = nWorldProcessors/nColumns - + rowIndex = myWorldRank/nColumns - + call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors) if ( mpiErrors /= 0 ) then write(default_error,*) "MPI comm split failed at row communicator" status = -1 return endif - + columnIndex = mod(myWorldRank,nColumns) call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors) if ( mpiErrors /= 0 ) then @@ -398,8 +409,8 @@ contains status = -1 return endif - -! Set appropriate components of thisComponentPlan + + ! Set appropriate components of thisComponentPlan thisComponentPlan%rowComm = rowCommunicator thisComponentPlan%columnComm = columnCommunicator thisComponentPlan%rowIndex = rowIndex @@ -408,7 +419,7 @@ contains thisComponentPlan%nColumns = nColumns end subroutine make_Force_Grid - + !! initalizes a gather scatter plan subroutine plan_gather_scatter( nDim, nObjects, thisComponentPlan, & thisComm, this_plan, status) @@ -706,6 +717,54 @@ contains call gather(tags, AtomColToGlobal, plan_atom_col) end subroutine setAtomTags + + subroutine setGroupTags(tags, status) + integer, dimension(:) :: tags + integer :: status + + integer :: alloc_stat + + integer :: nGroupsInCol + integer :: nGroupsInRow + + status = 0 + + nGroupsInRow = getNgroupsInRow(plan_group_row) + nGroupsInCol = getNgroupsInCol(plan_group_col) + + if(allocated(GroupLocalToGlobal)) then + deallocate(GroupLocalToGlobal) + endif + allocate(GroupLocalToGlobal(size(tags)),STAT=alloc_stat) + if (alloc_stat /= 0 ) then + status = -1 + return + endif + + GroupLocalToGlobal = tags + + if(allocated(GroupRowToGlobal)) then + deallocate(GroupRowToGlobal) + endif + allocate(GroupRowToGlobal(nGroupsInRow),STAT=alloc_stat) + if (alloc_stat /= 0 ) then + status = -1 + return + endif + + if(allocated(GroupColToGlobal)) then + deallocate(GroupColToGlobal) + endif + allocate(GroupColToGlobal(nGroupsInCol),STAT=alloc_stat) + if (alloc_stat /= 0 ) then + status = -1 + return + endif + + call gather(tags, GroupRowToGlobal, plan_group_row) + call gather(tags, GroupColToGlobal, plan_group_col) + + end subroutine setGroupTags function getNatomsInCol(thisplan) result(nInCol) type (gs_plan), intent(in) :: thisplan