ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/mdtools/md_code/mpi_force_module.F90
Revision: 174
Committed: Mon Nov 11 22:35:41 2002 UTC (21 years, 8 months ago) by chuckv
File size: 13269 byte(s)
Log Message:
*** empty log message ***

File Contents

# Content
1
2 module mpi_module
3 #ifdef MPI
4 use mpi
5 implicit none
6 PRIVATE
7
8
9 !!PUBLIC Subroutines contained in this module
10 public :: gather, scatter
11
12 !!PUBLIC Subroutines contained in MPI module
13 public :: mpi_bcast
14 public :: mpi_allreduce
15 public :: mpi_reduce
16 public :: mpi_send
17 public :: mpi_recv
18 public :: mpi_get_processor_name
19 public :: mpi_finalize
20
21 !!PUBLIC mpi variables
22 public :: mpi_comm_world
23 public :: mpi_character
24 public :: mpi_integer
25 public :: mpi_double_precision
26 public :: mpi_sum
27 public :: mpi_max
28 public :: mpi_status_size
29 public :: mpi_any_source
30
31 !! public variables
32
33
34 integer :: nColumns = 0 ! number of columns in processor grid
35 integer :: nRows = 0 ! number of rows in processor grid
36 integer :: nWorldProcessors = 0 ! number of world processors
37 integer :: myWorldRank = 0 ! world communciator processor rank
38 integer :: myRowRank = 0 ! column communicator processor rank
39 integer :: myColRank = 0 ! row communicator processor rank
40
41 integer :: rowCommunicator ! MPI row communicator
42 integer :: columnCommunicator ! MPI column communicator
43
44 integer :: componentLocalStart ! local component start index
45 integer :: componentLocalEnd ! local component end index
46
47 ! Number of components in long range forces
48 integer, public :: nComponentsGlobal
49 integer, public :: nComponentsLocal
50
51 integer, public :: nComponentsRow ! number of row components
52 integer, public :: nComponentsCol ! number of column components
53
54 integer,public :: mpi_err
55 integer,public :: node
56 integer,public :: numberProcessors
57 integer,public :: row_comm,col_comm
58 integer,public :: nstart,nend,nrow,ncol
59 integer,public :: nmol_start,nmol_end
60 integer,public :: max_local,max_row,max_col
61 integer,public :: maxmol_local
62 integer,public :: number_of_cols,number_of_rows
63
64 integer,public, allocatable, dimension(:,:) :: node_atype_index
65 type, public :: gs_plan
66 ! private
67 integer :: me, nprocs, n_datum,full_size !n = # of datums on local proc
68 integer, dimension(:), pointer :: displs
69 integer, dimension(:), pointer :: counts
70 integer :: comm
71 end type gs_plan
72
73 ! plans for different decompositions
74 type (gs_plan), public :: plan_row
75 type (gs_plan), public :: plan_row3
76 type (gs_plan), public :: plan_col
77 type (gs_plan), public :: plan_col3
78
79
80
81 ! interface for gather scatter routines
82
83 interface gather
84 module procedure gather_integer
85 module procedure gather_double
86 module procedure gather_double_dim
87 end interface
88
89 interface scatter
90 module procedure scatter_double
91 module procedure scatter_double_dim
92 end interface
93
94
95
96
97 contains
98
99
100 subroutine setup_parallel_lr_force(nComponents,tag_local,ident_local)
101 ! Passed Arguments
102 integer :: nComponents ! number of local long range components
103 integer, dimension(nlocal) :: tag_local ! component numbers
104 integer, dimension(nlocal) :: ident_local ! identities
105
106 integer :: status
107
108
109 nComponentsLocal = nComponents
110
111 call make_Force_Grid(status)
112
113 call get_Grid_Components(nComponentsRow,nComponentsColumn,status)
114
115
116 call plan_gather_scatter(nlocal,row_comm,plan_row)
117 call plan_gather_scatter(3*nlocal,row_comm,plan_row3)
118 call plan_gather_scatter(nlocal,col_comm,plan_col)
119 call plan_gather_scatter(3*nlocal,col_comm,plan_col3)
120
121
122
123 end subroutine setup_parallel_lr_force
124
125
126 subroutine get_Grid_Components(rowComponentNumber,columnComponentNumber,status)
127 integer, intent(out) :: rowComponentNumber
128 integer, intent(out) :: columnComponentNumber
129 integer, intent(out) :: status
130
131 integer :: mpiErrors
132 status = 0
133
134 call mpi_allreduce(nComponentsLocal,rowComponentNumber,1,mpi_integer,&
135 mpi_sum,rowCommunicator,mpiError)
136 if (mpiErrors /= 0) then
137 status = -1
138 return
139 endif
140
141 call mpi_allreduce(nComponentsLocal,,1,mpi_integer, &
142 mpi_sum,columnCommunicator,mpiError)
143 if (mpiErrors /= 0) then
144 status = -1
145 return
146 endif
147
148
149 end subroutine get_Grid_Components
150
151
152 ! Creates a square force decomposition of processors
153 subroutine make_Force_Grid(status)
154 integer, intent(out) :: status ! status returns -1 if error
155 integer :: nWorldProcs
156 integer :: nColumnsMax !Maximum number of columns
157
158 integer :: rowIndex
159 integer :: columnIndex
160
161 integer :: mpiErrors
162 integer :: i
163
164 status = 0
165
166 if (nWorldProcessors == 0 ) then
167 call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
168 if ( mpiErrors /= 0 ) then
169 status = -1
170 return
171 endif
172 endif
173
174 if (myWorldRank == 0 ) then
175 call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
176 if ( mpiErrors /= 0 ) then
177 status = -1
178 return
179 endif
180 endif
181
182 nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
183
184 do i = 1, nColumnsMax
185 if (mod(nWorldProcessors,i) == 0) nColumns = i
186 end do
187
188 nRows = nWorldProcessors/nColumns
189
190 rowIndex = myWorldRank/nColumns
191 call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiError)
192 if ( mpiErrors /= 0 ) then
193 status = -1
194 return
195 endif
196
197 columnIndex = mod(myWorldRank,nColumns)
198 call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiError)
199 if ( mpiErrors /= 0 ) then
200 status = -1
201 return
202 endif
203 end subroutine make_Force_Grid
204
205
206
207
208 subroutine mpi()
209
210
211
212 end subroutine mpi_init_Forces
213
214 !! subroutine to distribute column and row atom type info...
215
216 subroutine mpi_handle_atypes(status)
217 integer :: row_proc_size, column_proc_size
218 integer :: i,ntmp
219 integer, allocatable, dimension(:) :: ident_row_displs, ident_row_counts
220 integer, allocatable, dimension(:) :: ident_column_displs, ident_column_counts
221 ! integer :: max_alloc
222 integer, intent(out), optional :: status
223 ! max_alloc = max(max_row,max_col)
224
225 !! setup tag_local arrays
226 ntmp = 0
227 if (present(status)) status = 0
228 do i = 1,natoms
229 if (i >= nstart .AND. i <= nend) then
230 ntmp = i - nstart + 1
231 tag_local(ntmp) = i
232 end if
233 end do
234
235 !! do row idents and tags
236 call mpi_comm_size(row_comm,row_proc_size,mpi_err)
237
238 call mpi_barrier(mpi_comm_world,mpi_err)
239
240 allocate(ident_row_displs(row_proc_size))
241 allocate(ident_row_counts(row_proc_size))
242
243
244 call mpi_allgather(nlocal,1,mpi_integer,ident_row_counts,1,mpi_integer, &
245 row_comm,mpi_err)
246 if (mpi_err /= 0) then
247 if (present(status)) status = -1
248 endif
249
250 ident_row_displs(1) = 0
251 do i = 2, row_proc_size
252 ident_row_displs(i) = ident_row_displs(i-1) + ident_row_counts(i-1)
253 enddo
254
255
256 call mpi_allgatherv(ident,nlocal,mpi_integer, &
257 ident_row,ident_row_counts,ident_row_displs,mpi_integer,row_comm,mpi_err)
258 if (mpi_err /= 0) then
259 if (present(status)) status = -1
260 endif
261
262 call mpi_allgatherv(tag_local,nlocal,mpi_integer, &
263 tag_row,ident_row_counts,ident_row_displs,mpi_integer,row_comm,mpi_err)
264
265 if (mpi_err /= 0) then
266 if (present(status)) status = -1
267 endif
268
269 deallocate(ident_row_displs)
270 deallocate(ident_row_counts)
271
272 !! do col idents
273 call mpi_comm_size(col_comm,column_proc_size,mpi_err)
274
275 allocate(ident_column_displs(column_proc_size))
276 allocate(ident_column_counts(column_proc_size))
277
278 call mpi_allgather(nlocal,1,mpi_integer,ident_column_counts,1,mpi_integer, &
279 col_comm,mpi_err)
280 if (mpi_err /= 0) then
281 if (present(status)) status = -1
282 endif
283
284 ident_column_displs(1) = 0
285 do i = 2, column_proc_size
286 ident_column_displs(i) = ident_column_displs(i-1) + ident_column_counts(i-1)
287 enddo
288
289 call mpi_allgatherv(ident,nlocal,mpi_integer, &
290 ident_col,ident_column_counts,ident_column_displs,mpi_integer,col_comm,mpi_err)
291 if (mpi_err /= 0) then
292 if (present(status)) status = -1
293 endif
294
295 call mpi_allgatherv(tag_local,nlocal,mpi_integer, &
296 tag_col,ident_column_counts,ident_column_displs,mpi_integer,col_comm,mpi_err)
297 if (mpi_err /= 0) then
298 if (present(status)) status = -1
299 endif
300
301
302 deallocate(ident_column_displs)
303 deallocate(ident_column_counts)
304
305
306 end subroutine mpi_handle_atypes
307
308
309 !! initalizes a gather scatter plan
310 subroutine plan_gather_scatter( local_number, &
311 orig_comm, this_plan)
312
313 type (gs_plan), intent(out) :: this_plan
314 integer, intent(in) :: local_number
315 integer, intent(in) :: orig_comm
316 integer :: sizeof_int
317 integer :: ierror
318 integer :: comm
319 integer :: me
320 integer :: comm_procs
321 integer :: i,junk
322 integer :: number_of_particles
323
324
325
326 number_of_particles = 0
327 call mpi_comm_dup(orig_comm,comm,mpi_err)
328 call mpi_comm_rank(comm,me,mpi_err)
329 call mpi_comm_size(comm,comm_procs,mpi_err)
330
331 sizeof_int = selected_int_kind(4)
332
333 allocate (this_plan%counts(0:comm_procs-1),STAT=ierror)
334 if (ierror /= 0) then
335
336 end if
337
338 allocate (this_plan%displs(0:comm_procs-1),STAT=ierror)
339 if (ierror /= 0) then
340
341 end if
342
343
344 call mpi_allgather(local_number,1,mpi_integer,this_plan%counts, &
345 1,mpi_integer,comm,mpi_err)
346
347
348 !! figure out the total number of particles in this plan
349 number_of_particles = sum(this_plan%counts)
350
351
352 !initialize plan
353 this_plan%displs(0) = 0
354 do i = 1, comm_procs - 1,1
355 this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
356 end do
357
358
359 this_plan%me = me
360 this_plan%nprocs = comm_procs
361 this_plan%full_size = number_of_particles
362 this_plan%comm = comm
363 this_plan%n_datum = local_number
364
365 end subroutine plan_gather_scatter
366
367 subroutine unplan_gather_scatter(this_plan)
368
369 type (gs_plan), intent(inout) :: this_plan
370
371 call mpi_comm_free(this_plan%comm,mpi_err)
372
373 deallocate(this_plan%counts)
374 deallocate(this_plan%displs)
375
376 end subroutine unplan_gather_scatter
377
378 subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
379
380 type (gs_plan), intent(in) :: this_plan
381 integer, dimension(:), intent(in) :: sbuffer
382 integer, dimension(:), intent(in) :: rbuffer
383 integer :: noffset
384 integer, intent(out), optional :: status
385
386 if (present(status)) status = 0
387 noffset = this_plan%displs(this_plan%me)
388
389 call mpi_allgatherv(sbuffer,this_plan%n_datum, mpi_integer, &
390 rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
391 this_plan%comm, mpi_err)
392
393 if (mpi_err /= 0) then
394 if (present(status)) status = -1
395 endif
396
397 end subroutine gather_double
398
399 subroutine gather_double( sbuffer, rbuffer, this_plan, status)
400
401 type (gs_plan), intent(in) :: this_plan
402 real( kind = DP ), dimension(:), intent(in) :: sbuffer
403 real( kind = DP ), dimension(:), intent(in) :: rbuffer
404 integer :: noffset
405 integer, intent(out), optional :: status
406
407 if (present(status)) status = 0
408 noffset = this_plan%displs(this_plan%me)
409
410 call mpi_allgatherv(sbuffer,this_plan%n_datum, mpi_double_precision, &
411 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
412 this_plan%comm, mpi_err)
413
414 if (mpi_err /= 0) then
415 if (present(status)) status = -1
416 endif
417
418 end subroutine gather_double
419
420 subroutine gather_double_dim( sbuffer, rbuffer, this_plan, status)
421
422 type (gs_plan), intent(in) :: this_plan
423 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
424 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
425 integer :: noffset,i,ierror
426 integer, intent(out), optional :: status
427
428 external mpi_allgatherv
429
430 if (present(status)) status = 0
431
432 ! noffset = this_plan%displs(this_plan%me)
433
434 call mpi_allgatherv(sbuffer,this_plan%n_datum, mpi_double_precision, &
435 rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
436 this_plan%comm, mpi_err)
437
438 if (mpi_err /= 0) then
439 if (present(status)) status = -1
440 endif
441
442 end subroutine gather_double_dim
443
444 subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
445
446 type (gs_plan), intent(in) :: this_plan
447 real( kind = DP ), dimension(:), intent(in) :: sbuffer
448 real( kind = DP ), dimension(:), intent(in) :: rbuffer
449 integer, intent(out), optional :: status
450 external mpi_reduce_scatter
451
452 if (present(status)) status = 0
453
454 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
455 mpi_double_precision, MPI_SUM, this_plan%comm, mpi_err)
456
457 if (mpi_err /= 0) then
458 if (present(status)) status = -1
459 endif
460
461 end subroutine scatter_double
462
463 subroutine scatter_double_dim( sbuffer, rbuffer, this_plan, status)
464
465 type (gs_plan), intent(in) :: this_plan
466 real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
467 real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
468 integer, intent(out), optional :: status
469 external mpi_reduce_scatter
470
471 if (present(status)) status = 0
472 call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
473 mpi_double_precision, MPI_SUM, this_plan%comm, mpi_err)
474
475 if (mpi_err /= 0) then
476 if (present(status)) status = -1
477 endif
478
479 end subroutine scatter_double_dim
480
481
482 #endif
483 end module mpi_module
484