ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/mdtools/md_code/mpi_force_module.F90
Revision: 168
Committed: Fri Nov 8 15:01:43 2002 UTC (21 years, 10 months ago) by chuckv
File size: 11229 byte(s)
Log Message:
Initial commit of mpi routines for force decomposition.

File Contents

# User Rev Content
1 chuckv 168
2     module mpi_module
3     #ifdef MPI
4     use mpi
5     implicit none
6     PRIVATE
7    
8    
9     !!PUBLIC Subroutines contained in this module
10     public :: gather, scatter
11    
12     !!PUBLIC Subroutines contained in MPI module
13     public :: mpi_bcast
14     public :: mpi_allreduce
15     public :: mpi_reduce
16     public :: mpi_send
17     public :: mpi_recv
18     public :: mpi_get_processor_name
19     public :: mpi_finalize
20    
21     !!PUBLIC mpi variables
22     public :: mpi_comm_world
23     public :: mpi_character
24     public :: mpi_integer
25     public :: mpi_double_precision
26     public :: mpi_sum
27     public :: mpi_max
28     public :: mpi_status_size
29     ! public :: mpi_recv_char
30     public :: mpi_any_source
31    
32     !! public variables
33     integer,public :: mpi_err
34     integer,public :: node
35     integer,public :: numberProcessors
36     integer,public :: row_comm,col_comm
37     integer,public :: nstart,nend,nrow,ncol
38     integer,public :: nmol_start,nmol_end
39     integer,public :: max_local,max_row,max_col
40     integer,public :: maxmol_local
41     integer,public :: number_of_cols,number_of_rows
42    
43     integer,public, allocatable, dimension(:,:) :: node_atype_index
44     type, public :: gs_plan
45     ! private
46     integer :: me, nprocs, n_datum,full_size !n = # of datums on local proc
47     integer, dimension(:), pointer :: displs
48     integer, dimension(:), pointer :: counts
49     integer :: comm
50     end type gs_plan
51    
52     type (gs_plan), public :: plan_row
53     type (gs_plan), public :: plan_row3
54     type (gs_plan), public :: plan_col
55     type (gs_plan), public :: plan_col3
56    
57    
58     contains
59    
60    
61    
62    
63    
64     subroutine setup_parallel_mpi(status)
65    
66     integer, intent(out), optional :: status
67     integer :: i,junk,junk1
68     integer :: numcolmax,row_indx,col_indx
69     character(len=80) msg
70    
71     if (present(status)) status = 0
72    
73     call mpi_allreduce(nlocal,natoms,1,mpi_integer, &
74     mpi_sum,mpi_comm_world,mpi_err)
75     if (mpi_err /= 0) then
76     if (present(status)) status = -1
77     endif
78    
79     ! nlocal = nend - nstart + 1
80     nend = nlocal + nstart - 1
81    
82    
83     !! force decomp gives proc_atoms = natoms/dsqrt(nprocs) to form
84     !! a square matrix of length dsqrt(nprocs)
85    
86     numcolmax = nint(dsqrt(dble(nprocs)))
87     do i = 1, numcolmax
88     if (mod(nprocs,i).eq.0) number_of_cols = i
89     end do
90    
91     number_of_rows = nprocs/number_of_cols
92    
93    
94    
95     row_indx = node/number_of_cols
96     call mpi_comm_split(mpi_comm_world,row_indx,0,row_comm,mpi_err)
97    
98     col_indx = mod(node,number_of_cols)
99     call mpi_comm_split(mpi_comm_world,col_indx,0,col_comm,mpi_err)
100    
101     call mpi_comm_size(row_comm,junk,mpi_err)
102     call mpi_comm_size(col_comm,junk1,mpi_err)
103    
104    
105     !! figure out natoms_row, number of atoms owned by my row
106     !! figure out natoms_col, number of atoms owned by my col
107    
108     call mpi_allreduce(nlocal,nrow,1,mpi_integer,mpi_sum,row_comm,mpi_err)
109     if (mpi_err /= 0) then
110     if (present(status)) status = -1
111     endif
112    
113     call mpi_allreduce(nlocal,ncol,1,mpi_integer,mpi_sum,col_comm,mpi_err)
114    
115    
116     if (mpi_err /= 0) then
117     if (present(status)) status = -1
118     endif
119    
120    
121     ! Init gather scatter plans ala plimpton...
122    
123     call plan_gather_scatter(nlocal,row_comm,plan_row)
124     call plan_gather_scatter(3*nlocal,row_comm,plan_row3)
125     call plan_gather_scatter(nlocal,col_comm,plan_col)
126     call plan_gather_scatter(3*nlocal,col_comm,plan_col3)
127    
128    
129    
130     ! compute current bounds mpi_max will return the largest value
131    
132    
133     call mpi_allreduce(nlocal,max_local,1,mpi_integer,mpi_max, &
134     mpi_comm_world,mpi_err)
135     if (mpi_err /= 0) then
136     if (present(status)) status = -1
137     endif
138    
139     call mpi_allreduce(nrow,max_row,1,mpi_integer,mpi_max, &
140     mpi_comm_world,mpi_err)
141     if (mpi_err /= 0) then
142     if (present(status)) status = -1
143     endif
144    
145     call mpi_allreduce(ncol,max_col,1,mpi_integer,mpi_max, &
146     mpi_comm_world,mpi_err)
147     if (mpi_err /= 0) then
148     if (present(status)) status = -1
149     endif
150    
151    
152     !! allocate mpi row and col arrays...
153     ! call allocate_mpi_row_arrays(max_row)
154     ! call allocate_mpi_col_arrays(max_col)
155     !! allocate mpi row and col arrays...
156     call allocate_mpi_row_arrays(nrow)
157     call allocate_mpi_col_arrays(ncol)
158    
159    
160    
161     end subroutine setup_parallel_mpi
162    
163    
164    
165    
166     !! subroutine to distribute column and row atom type info...
167    
168     subroutine mpi_handle_atypes(status)
169     integer :: row_proc_size, column_proc_size
170     integer :: i,ntmp
171     integer, allocatable, dimension(:) :: ident_row_displs, ident_row_counts
172     integer, allocatable, dimension(:) :: ident_column_displs, ident_column_counts
173     ! integer :: max_alloc
174     integer, intent(out), optional :: status
175     ! max_alloc = max(max_row,max_col)
176    
177     !! setup tag_local arrays
178     ntmp = 0
179     if (present(status)) status = 0
180     do i = 1,natoms
181     if (i >= nstart .AND. i <= nend) then
182     ntmp = i - nstart + 1
183     tag_local(ntmp) = i
184     end if
185     end do
186    
187     !! do row idents and tags
188     call mpi_comm_size(row_comm,row_proc_size,mpi_err)
189    
190     call mpi_barrier(mpi_comm_world,mpi_err)
191    
192     allocate(ident_row_displs(row_proc_size))
193     allocate(ident_row_counts(row_proc_size))
194    
195    
196     call mpi_allgather(nlocal,1,mpi_integer,ident_row_counts,1,mpi_integer, &
197     row_comm,mpi_err)
198     if (mpi_err /= 0) then
199     if (present(status)) status = -1
200     endif
201    
202     ident_row_displs(1) = 0
203     do i = 2, row_proc_size
204     ident_row_displs(i) = ident_row_displs(i-1) + ident_row_counts(i-1)
205     enddo
206    
207    
208     call mpi_allgatherv(ident,nlocal,mpi_integer, &
209     ident_row,ident_row_counts,ident_row_displs,mpi_integer,row_comm,mpi_err)
210     if (mpi_err /= 0) then
211     if (present(status)) status = -1
212     endif
213    
214     call mpi_allgatherv(tag_local,nlocal,mpi_integer, &
215     tag_row,ident_row_counts,ident_row_displs,mpi_integer,row_comm,mpi_err)
216    
217     if (mpi_err /= 0) then
218     if (present(status)) status = -1
219     endif
220    
221     deallocate(ident_row_displs)
222     deallocate(ident_row_counts)
223    
224     !! do col idents
225     call mpi_comm_size(col_comm,column_proc_size,mpi_err)
226    
227     allocate(ident_column_displs(column_proc_size))
228     allocate(ident_column_counts(column_proc_size))
229    
230     call mpi_allgather(nlocal,1,mpi_integer,ident_column_counts,1,mpi_integer, &
231     col_comm,mpi_err)
232     if (mpi_err /= 0) then
233     if (present(status)) status = -1
234     endif
235    
236     ident_column_displs(1) = 0
237     do i = 2, column_proc_size
238     ident_column_displs(i) = ident_column_displs(i-1) + ident_column_counts(i-1)
239     enddo
240    
241     call mpi_allgatherv(ident,nlocal,mpi_integer, &
242     ident_col,ident_column_counts,ident_column_displs,mpi_integer,col_comm,mpi_err)
243     if (mpi_err /= 0) then
244     if (present(status)) status = -1
245     endif
246    
247     call mpi_allgatherv(tag_local,nlocal,mpi_integer, &
248     tag_col,ident_column_counts,ident_column_displs,mpi_integer,col_comm,mpi_err)
249     if (mpi_err /= 0) then
250     if (present(status)) status = -1
251     endif
252    
253    
254     deallocate(ident_column_displs)
255     deallocate(ident_column_counts)
256    
257    
258     end subroutine mpi_handle_atypes
259    
260    
261     !! initalizes a gather scatter plan
262     subroutine plan_gather_scatter( local_number, &
263     orig_comm, this_plan)
264    
265     type (gs_plan), intent(out) :: this_plan
266     integer, intent(in) :: local_number
267     integer, intent(in) :: orig_comm
268     integer :: sizeof_int
269     integer :: ierror
270     integer :: comm
271     integer :: me
272     integer :: comm_procs
273     integer :: i,junk
274     integer :: number_of_particles
275    
276    
277    
278     number_of_particles = 0
279     call mpi_comm_dup(orig_comm,comm,mpi_err)
280     call mpi_comm_rank(comm,me,mpi_err)
281     call mpi_comm_size(comm,comm_procs,mpi_err)
282    
283     sizeof_int = selected_int_kind(4)
284    
285     allocate (this_plan%counts(0:comm_procs-1),STAT=ierror)
286     if (ierror /= 0) then
287    
288     end if
289    
290     allocate (this_plan%displs(0:comm_procs-1),STAT=ierror)
291     if (ierror /= 0) then
292    
293     end if
294    
295    
296     call mpi_allgather(local_number,1,mpi_integer,this_plan%counts, &
297     1,mpi_integer,comm,mpi_err)
298    
299    
300     !! figure out the total number of particles in this plan
301     number_of_particles = sum(this_plan%counts)
302    
303    
304     !initialize plan
305     this_plan%displs(0) = 0
306     do i = 1, comm_procs - 1,1
307     this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
308     end do
309    
310    
311     this_plan%me = me
312     this_plan%nprocs = comm_procs
313     this_plan%full_size = number_of_particles
314     this_plan%comm = comm
315     this_plan%n_datum = local_number
316    
317     end subroutine plan_gather_scatter
318    
319     subroutine unplan_gather_scatter(this_plan)
320    
321     type (gs_plan), intent(inout) :: this_plan
322    
323     call mpi_comm_free(this_plan%comm,mpi_err)
324    
325     deallocate(this_plan%counts)
326     deallocate(this_plan%displs)
327    
328     end subroutine unplan_gather_scatter
329    
330     subroutine gather_double( sbuffer, rbuffer, this_plan, status)
331    
332     type (gs_plan), intent(in) :: this_plan
333     real( kind = DP ), dimension(:), intent(in) :: sbuffer
334     real( kind = DP ), dimension(:), intent(in) :: rbuffer
335     integer :: noffset
336     integer, intent(out), optional :: status
337    
338     if (present(status)) status = 0
339     noffset = this_plan%displs(this_plan%me)
340    
341     call mpi_allgatherv(sbuffer,this_plan%n_datum, mpi_double_precision, &
342     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
343     this_plan%comm, mpi_err)
344    
345     if (mpi_err /= 0) then
346     if (present(status)) status = -1
347     endif
348    
349     end subroutine gather_double
350    
351     subroutine gather_double_dim( sbuffer, rbuffer, this_plan, status)
352    
353     type (gs_plan), intent(in) :: this_plan
354     real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
355     real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
356     integer :: noffset,i,ierror
357     integer, intent(out), optional :: status
358    
359     external mpi_allgatherv
360    
361     if (present(status)) status = 0
362    
363     ! noffset = this_plan%displs(this_plan%me)
364    
365     call mpi_allgatherv(sbuffer,this_plan%n_datum, mpi_double_precision, &
366     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
367     this_plan%comm, mpi_err)
368    
369     if (mpi_err /= 0) then
370     if (present(status)) status = -1
371     endif
372    
373     end subroutine gather_double_dim
374    
375     subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
376    
377     type (gs_plan), intent(in) :: this_plan
378     real( kind = DP ), dimension(:), intent(in) :: sbuffer
379     real( kind = DP ), dimension(:), intent(in) :: rbuffer
380     integer, intent(out), optional :: status
381     external mpi_reduce_scatter
382    
383     if (present(status)) status = 0
384    
385     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
386     mpi_double_precision, MPI_SUM, this_plan%comm, mpi_err)
387    
388     if (mpi_err /= 0) then
389     if (present(status)) status = -1
390     endif
391    
392     end subroutine scatter_double
393    
394     subroutine scatter_double_dim( sbuffer, rbuffer, this_plan, status)
395    
396     type (gs_plan), intent(in) :: this_plan
397     real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
398     real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
399     integer, intent(out), optional :: status
400     external mpi_reduce_scatter
401    
402     if (present(status)) status = 0
403     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
404     mpi_double_precision, MPI_SUM, this_plan%comm, mpi_err)
405    
406     if (mpi_err /= 0) then
407     if (present(status)) status = -1
408     endif
409    
410     end subroutine scatter_double_dim
411    
412    
413     #endif
414     end module mpi_module
415