--- trunk/OOPSE/libmdtools/do_Forces.F90 2003/03/21 22:11:50 388 +++ trunk/OOPSE/libmdtools/do_Forces.F90 2003/07/02 21:26:55 572 @@ -4,7 +4,7 @@ !! @author Charles F. Vardeman II !! @author Matthew Meineke -!! @version $Id: do_Forces.F90,v 1.2 2003-03-21 22:11:50 chuckv Exp $, $Date: 2003-03-21 22:11:50 $, $Name: not supported by cvs2svn $, $Revision: 1.2 $ +!! @version $Id: do_Forces.F90,v 1.17 2003-07-02 21:26:55 mmeineke Exp $, $Date: 2003-07-02 21:26:55 $, $Name: not supported by cvs2svn $, $Revision: 1.17 $ module do_Forces use force_globals @@ -140,8 +140,16 @@ contains if (FF_uses_GB .and. FF_uses_LJ) then endif + if (.not. do_forces_initialized) then + !! Create neighbor lists + call expandNeighborList(getNlocal(), my_status) + if (my_Status /= 0) then + write(default_error,*) "SimSetup: ExpandNeighborList returned error." + thisStat = -1 + return + endif + endif - do_forces_initialized = .true. end subroutine init_FF @@ -167,7 +175,7 @@ contains logical ( kind = 2) :: do_pot_c, do_stress_c logical :: do_pot logical :: do_stress -#ifdef IS_MPI +#ifdef IS_MPI real( kind = DP ) :: pot_local integer :: nrow integer :: ncol @@ -189,6 +197,7 @@ contains !! initialize local variables #ifdef IS_MPI + pot_local = 0.0_dp nlocal = getNlocal() nrow = getNrow(plan_row) ncol = getNcol(plan_col) @@ -196,7 +205,7 @@ contains nlocal = getNlocal() natoms = nlocal #endif - + call getRcut(rcut,rc2=rcutsq) call getRlist(rlist,rlistsq) @@ -245,7 +254,7 @@ contains !! save current configuration, construct neighbor list, !! and calculate forces - call saveNeighborList(q) + call saveNeighborList(nlocal, q) neighborListSize = size(list) nlist = 0 @@ -277,7 +286,7 @@ contains if (rijsq < rcutsq) then call do_pair(i, j, rijsq, d, do_pot, do_stress, & - u_l, A, f, t,pot) + u_l, A, f, t, pot_local) endif endif enddo inner @@ -299,7 +308,7 @@ contains call get_interatomic_vector(q_Row(:,i), q_Col(:,j), d, rijsq) call do_pair(i, j, rijsq, d, do_pot, do_stress, & - u_l, A, f, t,pot) + u_l, A, f, t, pot_local) enddo endif @@ -312,7 +321,7 @@ contains ! save current configuration, contruct neighbor list, ! and calculate forces - call saveNeighborList(q) + call saveNeighborList(natoms, q) neighborListSize = size(list) @@ -346,7 +355,7 @@ contains if (rijsq < rcutsq) then call do_pair(i, j, rijsq, d, do_pot, do_stress, & - u_l, A, f, t,pot) + u_l, A, f, t, pot) endif endif enddo inner @@ -368,7 +377,7 @@ contains call get_interatomic_vector(q(:,i), q(:,j), d, rijsq) call do_pair(i, j, rijsq, d, do_pot, do_stress, & - u_l, A, f, t,pot) + u_l, A, f, t, pot) enddo endif @@ -381,15 +390,26 @@ contains #ifdef IS_MPI !!distribute forces - - call scatter(f_Row,f,plan_row3d) + + f_temp = 0.0_dp + call scatter(f_Row,f_temp,plan_row3d) + do i = 1,nlocal + f(1:3,i) = f(1:3,i) + f_temp(1:3,i) + end do + + f_temp = 0.0_dp call scatter(f_Col,f_temp,plan_col3d) do i = 1,nlocal f(1:3,i) = f(1:3,i) + f_temp(1:3,i) end do if (FF_UsesDirectionalAtoms() .and. SimUsesDirectionalAtoms()) then - call scatter(t_Row,t,plan_row3d) + t_temp = 0.0_dp + call scatter(t_Row,t_temp,plan_row3d) + do i = 1,nlocal + t(1:3,i) = t(1:3,i) + t_temp(1:3,i) + end do + t_temp = 0.0_dp call scatter(t_Col,t_temp,plan_col3d) do i = 1,nlocal @@ -400,20 +420,20 @@ contains if (do_pot) then ! scatter/gather pot_row into the members of my column call scatter(pot_Row, pot_Temp, plan_row) - + ! scatter/gather pot_local into all other procs ! add resultant to get total pot do i = 1, nlocal pot_local = pot_local + pot_Temp(i) enddo + + pot_Temp = 0.0_DP - pot_Temp = 0.0_DP - call scatter(pot_Col, pot_Temp, plan_col) do i = 1, nlocal pot_local = pot_local + pot_Temp(i) enddo - + endif #endif @@ -461,15 +481,15 @@ contains #ifdef IS_MPI if (do_pot) then - pot = pot_local + pot = pot + pot_local !! we assume the c code will do the allreduce to get the total potential !! we could do it right here if we needed to... endif if (do_stress) then - call mpi_allreduce(tau, tau_Temp,9,mpi_double_precision,mpi_sum, & + call mpi_allreduce(tau_Temp, tau, 9,mpi_double_precision,mpi_sum, & mpi_comm_world,mpi_err) - call mpi_allreduce(virial, virial_Temp,1,mpi_double_precision,mpi_sum, & + call mpi_allreduce(virial_Temp, virial,1,mpi_double_precision,mpi_sum, & mpi_comm_world,mpi_err) endif @@ -484,13 +504,13 @@ contains end subroutine do_force_loop - subroutine do_pair(i, j, rijsq, d, do_pot, do_stress, u_l, A, f, t,pot) + subroutine do_pair(i, j, rijsq, d, do_pot, do_stress, u_l, A, f, t, pot) real( kind = dp ) :: pot - real( kind = dp ), dimension(:,:) :: u_l - real (kind=dp), dimension(:,:) :: A - real (kind=dp), dimension(:,:) :: f - real (kind=dp), dimension(:,:) :: t + real( kind = dp ), dimension(3,getNlocal()) :: u_l + real (kind=dp), dimension(9,getNlocal()) :: A + real (kind=dp), dimension(3,getNlocal()) :: f + real (kind=dp), dimension(3,getNlocal()) :: t logical, intent(inout) :: do_pot, do_stress integer, intent(in) :: i, j @@ -505,7 +525,12 @@ contains r = sqrt(rijsq) + + #ifdef IS_MPI + if (tagRow(i) .eq. tagColumn(j)) then + write(0,*) 'do_pair is doing', i , j, tagRow(i), tagColumn(j) + endif me_i = atid_row(i) me_j = atid_col(j) @@ -572,21 +597,52 @@ contains real (kind = dp), dimension(3) :: q_i real (kind = dp), dimension(3) :: q_j real ( kind = dp ), intent(out) :: r_sq - real( kind = dp ) :: d(3) - real( kind = dp ) :: d_old(3) - d(1:3) = q_i(1:3) - q_j(1:3) - d_old = d + real( kind = dp ) :: d(3), scaled(3) + integer i + + d(1:3) = q_j(1:3) - q_i(1:3) + ! Wrap back into periodic box if necessary if ( SimUsesPBC() ) then + + if( .not.boxIsOrthorhombic ) then + ! calc the scaled coordinates. + + scaled = matmul(HmatInv, d) + + ! wrap the scaled coordinates - d(1:3) = d(1:3) - box(1:3) * sign(1.0_dp,d(1:3)) * & - int(abs(d(1:3)/box(1:3)) + 0.5_dp) + scaled = scaled - anint(scaled) + + ! calc the wrapped real coordinates from the wrapped scaled + ! coordinates + + d = matmul(Hmat,scaled) + + else + ! calc the scaled coordinates. + + do i = 1, 3 + scaled(i) = d(i) * HmatInv(i,i) + + ! wrap the scaled coordinates + + scaled(i) = scaled(i) - anint(scaled(i)) + + ! calc the wrapped real coordinates from the wrapped scaled + ! coordinates + + d(i) = scaled(i)*Hmat(i,i) + enddo + endif + endif + r_sq = dot_product(d,d) - + end subroutine get_interatomic_vector - + subroutine check_initialization(error) integer, intent(out) :: error @@ -643,7 +699,6 @@ contains rf = 0.0_dp tau_Temp = 0.0_dp virial_Temp = 0.0_dp - end subroutine zero_work_arrays function skipThisPair(atom1, atom2) result(skip_it) @@ -687,7 +742,7 @@ contains #else unique_id_2 = atom2 #endif - + #ifdef IS_MPI !! this situation should only arise in MPI simulations if (unique_id_1 == unique_id_2) then @@ -697,14 +752,18 @@ contains !! this prevents us from doing the pair on multiple processors if (unique_id_1 < unique_id_2) then - if (mod(unique_id_1 + unique_id_2,2) == 0) skip_it = .true. - return + if (mod(unique_id_1 + unique_id_2,2) == 0) then + skip_it = .true. + return + endif else - if (mod(unique_id_1 + unique_id_2,2) == 1) skip_it = .true. - return + if (mod(unique_id_1 + unique_id_2,2) == 1) then + skip_it = .true. + return + endif endif #endif - + !! the rest of these situations can happen in all simulations: do i = 1, nExcludes_global if ((excludesGlobal(i) == unique_id_1) .or. & @@ -713,7 +772,7 @@ contains return endif enddo - + do i = 1, nExcludes_local if (excludesLocal(1,i) == unique_id_1) then if (excludesLocal(2,i) == unique_id_2) then