ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/mdtools/md_code/mpi_force_module.F90
Revision: 169
Committed: Fri Nov 8 20:05:44 2002 UTC (21 years, 8 months ago) by chuckv
File size: 12515 byte(s)
Log Message:
MPI Force rewrite in progress...

File Contents

# Content
1
2 module mpi_module
3 #ifdef MPI
4 use mpi
5 implicit none
6 PRIVATE
7
8
9 !!PUBLIC Subroutines contained in this module
10 public :: gather, scatter
11
12 !!PUBLIC Subroutines contained in MPI module
13 public :: mpi_bcast
14 public :: mpi_allreduce
15 public :: mpi_reduce
16 public :: mpi_send
17 public :: mpi_recv
18 public :: mpi_get_processor_name
19 public :: mpi_finalize
20
21 !!PUBLIC mpi variables
22 public :: mpi_comm_world
23 public :: mpi_character
24 public :: mpi_integer
25 public :: mpi_double_precision
26 public :: mpi_sum
27 public :: mpi_max
28 public :: mpi_status_size
29 public :: mpi_any_source
30
31 !! public variables
32
33
34 integer :: nColumns = 0 ! number of columns in processor grid
35 integer :: nRows = 0 ! number of rows in processor grid
36 integer :: nWorldProcessors = 0 ! number of world processors
37 integer :: myWorldRank = 0 ! world communciator processor rank
38 integer :: myRowRank = 0 ! column communicator processor rank
39 integer :: myColRank = 0 ! row communicator processor rank
40
41 integer :: rowCommunicator ! MPI row communicator
42 integer :: columnCommunicator ! MPI column communicator
43
44 integer :: componentLocalStart ! local component start index
45 integer :: componentLocalEnd ! local component end index
46
47 ! Number of components in long range forces
48 integer, public :: nComponentsGlobal
49 integer, public :: nComponentsLocal
50
51 integer, public :: nComponentsRow ! number of row components
52 integer, public :: nComponentsCol ! number of column components
53
54 integer,public :: mpi_err
55 integer,public :: node
56 integer,public :: numberProcessors
57 integer,public :: row_comm,col_comm
58 integer,public :: nstart,nend,nrow,ncol
59 integer,public :: nmol_start,nmol_end
60 integer,public :: max_local,max_row,max_col
61 integer,public :: maxmol_local
62 integer,public :: number_of_cols,number_of_rows
63
64 integer,public, allocatable, dimension(:,:) :: node_atype_index
65 type, public :: gs_plan
66 ! private
67 integer :: me, nprocs, n_datum,full_size !n = # of datums on local proc
68 integer, dimension(:), pointer :: displs
69 integer, dimension(:), pointer :: counts
70 integer :: comm
71 end type gs_plan
72
73 ! plans for different decompositions
74 type (gs_plan), public :: plan_row
75 type (gs_plan), public :: plan_row3
76 type (gs_plan), public :: plan_col
77 type (gs_plan), public :: plan_col3
78
79
80
81 ! interface for gather scatter routines
82
83 interface gather
84 module procedure gather_double
85 module procedure gather_double_dim
86 end interface
87
88 interface scatter
89 module procedure scatter_double
90 module procedure scatter_double_dim
91 end interface
92
93
94
95
96 contains
97
98
99 subroutine setup_parallel_lr_force(nComponents,tag_local,ident_local)
100 ! Passed Arguments
101 integer :: nComponents ! number of local long range components
102 integer, dimension(nlocal) :: tag_local ! component numbers
103 integer, dimension(nlocal) :: ident_local ! identities
104
105 integer :: status
106
107
108 nComponentsLocal = nComponents
109
110 call make_Force_Grid(status)
111
112 call get_Grid_Components(nComponentsRow,nComponentsColumn,status)
113
114
115 call plan_gather_scatter(nlocal,row_comm,plan_row)
116 call plan_gather_scatter(3*nlocal,row_comm,plan_row3)
117 call plan_gather_scatter(nlocal,col_comm,plan_col)
118 call plan_gather_scatter(3*nlocal,col_comm,plan_col3)
119
120
121
122 end subroutine setup_parallel_lr_force
123
124
125 subroutine get_Grid_Components(rowComponentNumber,columnComponentNumber,status)
126 integer, intent(out) :: rowComponentNumber
127 integer, intent(out) :: columnComponentNumber
128 integer, intent(out) :: status
129
130 integer :: mpiErrors
131 status = 0
132
133 call mpi_allreduce(nComponentsLocal,rowComponentNumber,1,mpi_integer,&
134 mpi_sum,rowCommunicator,mpiError)
135 if (mpiErrors /= 0) then
136 status = -1
137 return
138 endif
139
140 call mpi_allreduce(nComponentsLocal,,1,mpi_integer, &
141 mpi_sum,columnCommunicator,mpiError)
142 if (mpiErrors /= 0) then
143 status = -1
144 return
145 endif
146
147
148 end subroutine get_Grid_Components
149
150
151 ! Creates a square force decomposition of processors
152 subroutine make_Force_Grid(status)
153 integer, intent(out) :: status ! status returns -1 if error
154 integer :: nWorldProcs
155 integer :: nColumnsMax
156
157 integer :: rowIndex
158 integer :: columnIndex
159
160 integer :: mpiErrors
161 integer :: i
162
163 status = 0
164
165 if (nWorldProcessors == 0 ) then
166 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
167 if ( mpiErrors /= 0 ) then
168 status = -1
169 return
170 endif
171 endif
172
173 if (myWorldRank == 0 ) then
174 call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
175 if ( mpiErrors /= 0 ) then
176 status = -1
177 return
178 endif
179 endif
180
181 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
182
183 do i = 1, nColumnsMax
184 if (mod(nWorldProcessors,i) == 0) nColumns = i
185 end do
186
187 nRows = nWorldProcessors/nColumns
188
189 rowIndex = myWorldRank/nColumns
190 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiError)
191 if ( mpiErrors /= 0 ) then
192 status = -1
193 return
194 endif
195
196 columnIndex = mod(myWorldRank,nColumns)
197 call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiError)
198 if ( mpiErrors /= 0 ) then
199 status = -1
200 return
201 endif
202 end subroutine make_Force_Grid
203
204
205
206
207
208
209
210 !! subroutine to distribute column and row atom type info...
211
212 subroutine mpi_handle_atypes(status)
213 integer :: row_proc_size, column_proc_size
214 integer :: i,ntmp
215 integer, allocatable, dimension(:) :: ident_row_displs, ident_row_counts
216 integer, allocatable, dimension(:) :: ident_column_displs, ident_column_counts
217 ! integer :: max_alloc
218 integer, intent(out), optional :: status
219 ! max_alloc = max(max_row,max_col)
220
221 !! setup tag_local arrays
222 ntmp = 0
223 if (present(status)) status = 0
224 do i = 1,natoms
225 if (i >= nstart .AND. i <= nend) then
226 ntmp = i - nstart + 1
227 tag_local(ntmp) = i
228 end if
229 end do
230
231 !! do row idents and tags
232 call mpi_comm_size(row_comm,row_proc_size,mpi_err)
233
234 call mpi_barrier(mpi_comm_world,mpi_err)
235
236 allocate(ident_row_displs(row_proc_size))
237 allocate(ident_row_counts(row_proc_size))
238
239
240 call mpi_allgather(nlocal,1,mpi_integer,ident_row_counts,1,mpi_integer, &
241 row_comm,mpi_err)
242 if (mpi_err /= 0) then
243 if (present(status)) status = -1
244 endif
245
246 ident_row_displs(1) = 0
247 do i = 2, row_proc_size
248 ident_row_displs(i) = ident_row_displs(i-1) + ident_row_counts(i-1)
249 enddo
250
251
252 call mpi_allgatherv(ident,nlocal,mpi_integer, &
253 ident_row,ident_row_counts,ident_row_displs,mpi_integer,row_comm,mpi_err)
254 if (mpi_err /= 0) then
255 if (present(status)) status = -1
256 endif
257
258 call mpi_allgatherv(tag_local,nlocal,mpi_integer, &
259 tag_row,ident_row_counts,ident_row_displs,mpi_integer,row_comm,mpi_err)
260
261 if (mpi_err /= 0) then
262 if (present(status)) status = -1
263 endif
264
265 deallocate(ident_row_displs)
266 deallocate(ident_row_counts)
267
268 !! do col idents
269 call mpi_comm_size(col_comm,column_proc_size,mpi_err)
270
271 allocate(ident_column_displs(column_proc_size))
272 allocate(ident_column_counts(column_proc_size))
273
274 call mpi_allgather(nlocal,1,mpi_integer,ident_column_counts,1,mpi_integer, &
275 col_comm,mpi_err)
276 if (mpi_err /= 0) then
277 if (present(status)) status = -1
278 endif
279
280 ident_column_displs(1) = 0
281 do i = 2, column_proc_size
282 ident_column_displs(i) = ident_column_displs(i-1) + ident_column_counts(i-1)
283 enddo
284
285 call mpi_allgatherv(ident,nlocal,mpi_integer, &
286 ident_col,ident_column_counts,ident_column_displs,mpi_integer,col_comm,mpi_err)
287 if (mpi_err /= 0) then
288 if (present(status)) status = -1
289 endif
290
291 call mpi_allgatherv(tag_local,nlocal,mpi_integer, &
292 tag_col,ident_column_counts,ident_column_displs,mpi_integer,col_comm,mpi_err)
293 if (mpi_err /= 0) then
294 if (present(status)) status = -1
295 endif
296
297
298 deallocate(ident_column_displs)
299 deallocate(ident_column_counts)
300
301
302 end subroutine mpi_handle_atypes
303
304
305 !! initalizes a gather scatter plan
306 subroutine plan_gather_scatter( local_number, &
307 orig_comm, this_plan)
308
309 type (gs_plan), intent(out) :: this_plan
310 integer, intent(in) :: local_number
311 integer, intent(in) :: orig_comm
312 integer :: sizeof_int
313 integer :: ierror
314 integer :: comm
315 integer :: me
316 integer :: comm_procs
317 integer :: i,junk
318 integer :: number_of_particles
319
320
321
322 number_of_particles = 0
323 call mpi_comm_dup(orig_comm,comm,mpi_err)
324 call mpi_comm_rank(comm,me,mpi_err)
325 call mpi_comm_size(comm,comm_procs,mpi_err)
326
327 sizeof_int = selected_int_kind(4)
328
329 allocate (this_plan%counts(0:comm_procs-1),STAT=ierror)
330 if (ierror /= 0) then
331
332 end if
333
334 allocate (this_plan%displs(0:comm_procs-1),STAT=ierror)
335 if (ierror /= 0) then
336
337 end if
338
339
340 call mpi_allgather(local_number,1,mpi_integer,this_plan%counts, &
341 1,mpi_integer,comm,mpi_err)
342
343
344 !! figure out the total number of particles in this plan
345 number_of_particles = sum(this_plan%counts)
346
347
348 !initialize plan
349 this_plan%displs(0) = 0
350 do i = 1, comm_procs - 1,1
351 this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
352 end do
353
354
355 this_plan%me = me
356 this_plan%nprocs = comm_procs
357 this_plan%full_size = number_of_particles
358 this_plan%comm = comm
359 this_plan%n_datum = local_number
360
361 end subroutine plan_gather_scatter
362
363 subroutine unplan_gather_scatter(this_plan)
364
365 type (gs_plan), intent(inout) :: this_plan
366
367 call mpi_comm_free(this_plan%comm,mpi_err)
368
369 deallocate(this_plan%counts)
370 deallocate(this_plan%displs)
371
372 end subroutine unplan_gather_scatter
373
374 subroutine gather_double( sbuffer, rbuffer, this_plan, status)
375
376 type (gs_plan), intent(in) :: this_plan
377 real( kind = DP ), dimension(:), intent(in) :: sbuffer
378 real( kind = DP ), dimension(:), intent(in) :: rbuffer
379 integer :: noffset
380 integer, intent(out), optional :: status
381
382 if (present(status)) status = 0
383 noffset = this_plan%displs(this_plan%me)
384
385 call mpi_allgatherv(sbuffer,this_plan%n_datum, mpi_double_precision, &
386 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
387 this_plan%comm, mpi_err)
388
389 if (mpi_err /= 0) then
390 if (present(status)) status = -1
391 endif
392
393 end subroutine gather_double
394
395 subroutine gather_double_dim( sbuffer, rbuffer, this_plan, status)
396
397 type (gs_plan), intent(in) :: this_plan
398 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
399 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
400 integer :: noffset,i,ierror
401 integer, intent(out), optional :: status
402
403 external mpi_allgatherv
404
405 if (present(status)) status = 0
406
407 ! noffset = this_plan%displs(this_plan%me)
408
409 call mpi_allgatherv(sbuffer,this_plan%n_datum, mpi_double_precision, &
410 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
411 this_plan%comm, mpi_err)
412
413 if (mpi_err /= 0) then
414 if (present(status)) status = -1
415 endif
416
417 end subroutine gather_double_dim
418
419 subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
420
421 type (gs_plan), intent(in) :: this_plan
422 real( kind = DP ), dimension(:), intent(in) :: sbuffer
423 real( kind = DP ), dimension(:), intent(in) :: rbuffer
424 integer, intent(out), optional :: status
425 external mpi_reduce_scatter
426
427 if (present(status)) status = 0
428
429 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
430 mpi_double_precision, MPI_SUM, this_plan%comm, mpi_err)
431
432 if (mpi_err /= 0) then
433 if (present(status)) status = -1
434 endif
435
436 end subroutine scatter_double
437
438 subroutine scatter_double_dim( sbuffer, rbuffer, this_plan, status)
439
440 type (gs_plan), intent(in) :: this_plan
441 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
442 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
443 integer, intent(out), optional :: status
444 external mpi_reduce_scatter
445
446 if (present(status)) status = 0
447 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
448 mpi_double_precision, MPI_SUM, this_plan%comm, mpi_err)
449
450 if (mpi_err /= 0) then
451 if (present(status)) status = -1
452 endif
453
454 end subroutine scatter_double_dim
455
456
457 #endif
458 end module mpi_module
459