ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/mdtools/md_code/mpiSimulation.F90
Revision: 207
Committed: Fri Dec 13 16:51:23 2002 UTC (21 years, 8 months ago) by chuckv
File size: 12251 byte(s)
Log Message:
Changed name of mpi_force_module to mpiSimulation to match with C++ code.

File Contents

# User Rev Content
1 chuckv 207 !! MPI support for long range forces using force decomposition
2     !! on a square grid of processors.
3     !! Corresponds to mpiSimunation.cpp for C++
4     !! mpi_module also contains a private interface for mpi f90 routines.
5     !!
6     !! @author Charles F. Vardeman II
7     !! @author Matthew Meineke
8     !! @version $Id: mpiSimulation.F90,v 1.1 2002-12-13 16:51:23 chuckv Exp $, $Date: 2002-12-13 16:51:23 $, $Name: not supported by cvs2svn $, $Revision: 1.1 $
9    
10    
11    
12    
13     module mpiSimulation
14     #ifdef MPI
15     use mpi
16     implicit none
17     PRIVATE
18    
19    
20     !! PUBLIC Subroutines contained in this module
21     !! gather and scatter are a generic interface
22     !! to gather and scatter routines
23     public :: gather, scatter
24     public :: setupSimParallel
25     !! PUBLIC Subroutines contained in MPI module
26     public :: mpi_bcast
27     public :: mpi_allreduce
28     public :: mpi_reduce
29     public :: mpi_send
30     public :: mpi_recv
31     public :: mpi_get_processor_name
32     public :: mpi_finalize
33    
34     !! PUBLIC mpi variables
35     public :: mpi_comm_world
36     public :: mpi_character
37     public :: mpi_integer
38     public :: mpi_double_precision
39     public :: mpi_sum
40     public :: mpi_max
41     public :: mpi_status_size
42     public :: mpi_any_source
43    
44     !! Safety logical to prevent access to ComponetPlan until
45     !! set by C++.
46     logical :: ComponentPlanSet = .false.
47    
48    
49     !! generic mpi error declaration.
50     integer,public :: mpi_err
51    
52     !! Include mpiComponentPlan type. mpiComponentPlan is a
53     !! dual header file for both c and fortran.
54     #define __FORTRAN90
55     #include "mpiComponentPlan.h"
56    
57     !! gs_plan contains plans for gather and scatter routines
58     type, public :: gs_plan
59     private
60     type (mpiComponentPlan), pointer :: gsComponentPlan
61     ! integer :: me, nprocs, n_datum,full_size !n = # of datums on local proc
62     integer, dimension(:), pointer :: displs
63     integer, dimension(:), pointer :: counts
64     integer :: planComm !! Communicator for this plan
65     end type gs_plan
66    
67     ! plans for different decompositions
68     type (gs_plan), public :: plan_row
69     type (gs_plan), public :: plan_row3d
70     type (gs_plan), public :: plan_col
71     type (gs_plan), public :: plan_col3d
72    
73     type (mpiComponentPlan), pointer :: simComponentPlan
74    
75     ! interface for gather scatter routines
76     !! Generic interface for gather.
77     !! Gathers an local array into row or column array
78     !! Interface provided for integer, double and double
79     !! rank 2 arrays.
80     interface gather
81     module procedure gather_integer
82     module procedure gather_double
83     module procedure gather_double_2d
84     end interface
85    
86     !! Generic interface for scatter.
87     !! Scatters a row or column array, adding componets
88     !! and reducing them to a local nComponent array.
89     !! Interface provided for double and double rank=2 arrays.
90     interface scatter
91     module procedure scatter_double
92     module procedure scatter_double_2d
93     end interface
94    
95    
96     contains
97    
98     !! Sets up mpiComponentPlan with structure passed from C++.
99     subroutine setupSimParallel(nDim,thisComponentPlan)
100     ! Passed Arguments
101     integer, intent(inout) :: nDim !! Number of dimensions
102     !! mpiComponentPlan struct from C
103     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
104     integer :: status
105    
106     if (componentPlanSet) return
107    
108     componentPlanSet = .true.
109    
110     call make_Force_Grid(thisComponentPlan,status)
111     call updateGriComponents(thisComponentPlan,status)
112    
113    
114     call plan_gather_scatter(1,thisComponentPlan,row_comm,plan_row)
115     call plan_gather_scatter(nDim,thisComponentPlan,row_comm,plan_row3d)
116     call plan_gather_scatter(1,thisComponentPlan,col_comm,plan_col)
117     call plan_gather_scatte(nDim,thisComponentPlan,col_comm,plan_col3d)
118    
119    
120    
121     end subroutine setupSimParallel
122    
123     !! Updates number of row and column components for long range forces.
124     subroutine updateGridComponents(thisComponentPlan,status)
125     type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
126    
127     !! Status return
128     !! - 0 Success
129     !! - -1 Failure
130     integer, intent(out) :: status
131     integer :: nComponentsLocal
132     integer :: nComponentsRow = 0
133     integer :: nComponensColumn = 0
134     integer :: mpiErrors
135    
136     status = 0
137     if (.not. componentPlanSet) return
138    
139     if (thisComponentPlan%myNlocal == 0 ) then
140     status = -1
141     return
142     endif
143    
144     nComponentsLocal = thisComponentPlan%myNlocal
145    
146     call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
147     mpi_sum,thisComponentPlan%rowComm,mpiError)
148     if (mpiErrors /= 0) then
149     status = -1
150     return
151     endif
152    
153     call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
154     mpi_sum,thisComponentPlan%columnComm,mpiError)
155     if (mpiErrors /= 0) then
156     status = -1
157     return
158     endif
159    
160     thisComponentPlan%nComponentsRow = nComponentsRow
161     thisComponentPlan%nComponentsColumn = nComponentsColumn
162    
163    
164     end subroutine updateGridComponents
165    
166    
167     !! Creates a square force decomposition of processors into row and column
168     !! communicators.
169     subroutine make_Force_Grid(thisComponentPlan,status)
170     type (mpiComponentPlan) :: thisComponentPlan
171     integer, intent(out) :: status !! status returns -1 if error
172     integer :: nColumnsMax !! Maximum number of columns
173     integer :: nWorldProcessors !! Total number of processors in World comm.
174     integer :: rowIndex !! Row for this processor.
175     integer :: columnIndex !! Column for this processor.
176     integer :: nRows !! Total number of rows.
177     integer :: nColumns !! Total number of columns.
178     integer :: mpiErrors !! MPI errors.
179     integer :: rowCommunicator !! MPI row communicator.
180     integer :: columnCommunicator
181     integer :: myWorldRank
182     integer :: i
183    
184    
185     if (.not. ComponentPlanSet) return
186     status = 0
187    
188     !! We make a dangerous assumption here that if numberProcessors is
189     !! zero, then we need to get the information from MPI.
190     if (thisComponentPlan%numberProcessors == 0 ) then
191     call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
192     if ( mpiErrors /= 0 ) then
193     status = -1
194     return
195     endif
196     call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
197     if ( mpiErrors /= 0 ) then
198     status = -1
199     return
200     endif
201    
202     else
203     nWorldProcessors = thisComponentPlan%numberProcessors
204     myWorldRank = thisComponentPlan%myRank
205     endif
206    
207    
208    
209    
210     nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
211    
212     do i = 1, nColumnsMax
213     if (mod(nWorldProcessors,i) == 0) nColumns = i
214     end do
215    
216     nRows = nWorldProcessors/nColumns
217    
218     rowIndex = myWorldRank/nColumns
219     call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiError)
220     if ( mpiErrors /= 0 ) then
221     status = -1
222     return
223     endif
224    
225     columnIndex = mod(myWorldRank,nColumns)
226     call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiError)
227     if ( mpiErrors /= 0 ) then
228     status = -1
229     return
230     endif
231    
232     ! Set appropriate components of thisComponentPlan
233     thisComponentPlan%rowComm = rowCommunicator
234     thisComponentPlan%columnComm = columnCommunicator
235     thisComponentPlan%rowIndex = rowIndex
236     thisComponentPlan%columnIndex = columnIndex
237     thisComponentPlan%numberRows = nRows
238     thisComponentPlan%numberColumns = nColumns
239    
240    
241     end subroutine make_Force_Grid
242    
243    
244     !! initalizes a gather scatter plan
245     subroutine plan_gather_scatter( nDim,thisComponentPlan, &
246     thisComm, this_plan)
247     integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
248     type (mpiComponentPlan), intent(in) :: thisComponentPlan
249     type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
250     integer, intent(in) :: thisComm !!
251     integer :: sizeof_int
252     integer :: ierror
253     integer :: comm
254     integer :: me
255     integer :: comm_procs
256     integer :: i,junk
257     integer :: number_of_particles
258    
259    
260    
261     number_of_particles = 0
262     call mpi_comm_dup(orig_comm,comm,mpi_err)
263     call mpi_comm_rank(comm,me,mpi_err)
264     call mpi_comm_size(comm,comm_procs,mpi_err)
265    
266     sizeof_int = selected_int_kind(4)
267    
268     allocate (this_plan%counts(0:comm_procs-1),STAT=ierror)
269     if (ierror /= 0) then
270    
271     end if
272    
273     allocate (this_plan%displs(0:comm_procs-1),STAT=ierror)
274     if (ierror /= 0) then
275    
276     end if
277    
278    
279     call mpi_allgather(local_number,1,mpi_integer,this_plan%counts, &
280     1,mpi_integer,comm,mpi_err)
281    
282    
283     !! figure out the total number of particles in this plan
284     number_of_particles = sum(this_plan%counts)
285    
286    
287     !initialize plan
288     this_plan%displs(0) = 0
289     do i = 1, comm_procs - 1,1
290     this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
291     end do
292    
293    
294     this_plan%me = me
295     this_plan%nprocs = comm_procs
296     this_plan%full_size = number_of_particles
297     this_plan%comm = comm
298     this_plan%n_datum = local_number
299    
300     end subroutine plan_gather_scatter
301    
302    
303     subroutine unplan_gather_scatter(this_plan)
304    
305     type (gs_plan), intent(inout) :: this_plan
306    
307     call mpi_comm_free(this_plan%comm,mpi_err)
308    
309     deallocate(this_plan%counts)
310     deallocate(this_plan%displs)
311    
312     end subroutine unplan_gather_scatter
313    
314     subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
315    
316     type (gs_plan), intent(in) :: this_plan
317     integer, dimension(:), intent(in) :: sbuffer
318     integer, dimension(:), intent(in) :: rbuffer
319     integer :: noffset
320     integer, intent(out), optional :: status
321    
322     if (present(status)) status = 0
323     noffset = this_plan%displs(this_plan%me)
324    
325     call mpi_allgatherv(sbuffer,this_plan%n_datum, mpi_integer, &
326     rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
327     this_plan%comm, mpi_err)
328    
329     if (mpi_err /= 0) then
330     if (present(status)) status = -1
331     endif
332    
333     end subroutine gather_integer
334    
335     subroutine gather_double( sbuffer, rbuffer, this_plan, status)
336    
337     type (gs_plan), intent(in) :: this_plan
338     real( kind = DP ), dimension(:), intent(in) :: sbuffer
339     real( kind = DP ), dimension(:), intent(in) :: rbuffer
340     integer :: noffset
341     integer, intent(out), optional :: status
342    
343     if (present(status)) status = 0
344     noffset = this_plan%displs(this_plan%me)
345    
346     call mpi_allgatherv(sbuffer,this_plan%n_datum, mpi_double_precision, &
347     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
348     this_plan%comm, mpi_err)
349    
350     if (mpi_err /= 0) then
351     if (present(status)) status = -1
352     endif
353    
354     end subroutine gather_double
355    
356     subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
357    
358     type (gs_plan), intent(in) :: this_plan
359     real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
360     real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
361     integer :: noffset,i,ierror
362     integer, intent(out), optional :: status
363    
364     external mpi_allgatherv
365    
366     if (present(status)) status = 0
367    
368     ! noffset = this_plan%displs(this_plan%me)
369    
370     call mpi_allgatherv(sbuffer,this_plan%n_datum, mpi_double_precision, &
371     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
372     this_plan%comm, mpi_err)
373    
374     if (mpi_err /= 0) then
375     if (present(status)) status = -1
376     endif
377    
378     end subroutine gather_double_2d
379    
380     subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
381    
382     type (gs_plan), intent(in) :: this_plan
383     real( kind = DP ), dimension(:), intent(in) :: sbuffer
384     real( kind = DP ), dimension(:), intent(in) :: rbuffer
385     integer, intent(out), optional :: status
386     external mpi_reduce_scatter
387    
388     if (present(status)) status = 0
389    
390     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
391     mpi_double_precision, MPI_SUM, this_plan%comm, mpi_err)
392    
393     if (mpi_err /= 0) then
394     if (present(status)) status = -1
395     endif
396    
397     end subroutine scatter_double
398    
399     subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
400    
401     type (gs_plan), intent(in) :: this_plan
402     real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
403     real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
404     integer, intent(out), optional :: status
405     external mpi_reduce_scatter
406    
407     if (present(status)) status = 0
408     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
409     mpi_double_precision, MPI_SUM, this_plan%comm, mpi_err)
410    
411     if (mpi_err /= 0) then
412     if (present(status)) status = -1
413     endif
414    
415     end subroutine scatter_double_2d
416    
417    
418     #endif
419     end module mpiSimulation
420