ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE_old/src/mdtools/libmdCode/mpiSimulation_module.F90
Revision: 274
Committed: Mon Feb 17 21:27:47 2003 UTC (21 years, 6 months ago) by mmeineke
File size: 20436 byte(s)
Log Message:
added mpi for libmdCode (still working out the kinks)

File Contents

# User Rev Content
1 mmeineke 274 !! MPI support for long range forces using force decomposition
2     !! on a square grid of processors.
3     !! Corresponds to mpiSimunation.cpp for C++
4     !! mpi_module also contains a private interface for mpi f90 routines.
5     !!
6     !! @author Charles F. Vardeman II
7     !! @author Matthew Meineke
8     !! @version $Id: mpiSimulation_module.F90,v 1.1 2003-02-17 21:27:47 mmeineke Exp $, $Date: 2003-02-17 21:27:47 $, $Name: not supported by cvs2svn $, $Revision: 1.1 $
9    
10    
11    
12    
13     module mpiSimulation
14     use definitions
15     use mpi
16     implicit none
17     PRIVATE
18    
19    
20     !! PUBLIC Subroutines contained in this module
21     !! gather and scatter are a generic interface
22     !! to gather and scatter routines
23     public :: gather, scatter
24     public :: setupSimParallel
25     public :: replanSimParallel
26     public :: getNcol
27     public :: getNrow
28     public :: isMPISimSet
29     public :: printComponentPlan
30     public :: getMyNode
31    
32     !! PUBLIC Subroutines contained in MPI module
33     public :: mpi_bcast
34     public :: mpi_allreduce
35     public :: mpi_reduce
36     public :: mpi_send
37     public :: mpi_recv
38     public :: mpi_get_processor_name
39     public :: mpi_finalize
40    
41     !! PUBLIC mpi variables
42     public :: mpi_comm_world
43     public :: mpi_character
44     public :: mpi_integer
45     public :: mpi_double_precision
46     public :: mpi_sum
47     public :: mpi_max
48     public :: mpi_status_size
49     public :: mpi_any_source
50    
51     !! Safety logical to prevent access to ComponetPlan until
52     !! set by C++.
53     logical :: ComponentPlanSet = .false.
54    
55    
56     !! generic mpi error declaration.
57     integer,public :: mpi_err
58    
59    
60    
61     !! Include mpiComponentPlan type. mpiComponentPlan is a
62     !! dual header file for both c and fortran.
63     #define __FORTRAN90
64     #include "mpiComponentPlan.h"
65    
66    
67    
68     !! Tags used during force loop for parallel simulation
69     integer, allocatable, dimension(:) :: tagLocal
70     integer, public, allocatable, dimension(:) :: tagRow
71     integer ,public, allocatable, dimension(:) :: tagColumn
72    
73     !! Logical set true if mpiSimulation has been initialized
74     logical :: isSimSet = .false.
75    
76    
77     type (mpiComponentPlan) :: mpiSim
78    
79     !! gs_plan contains plans for gather and scatter routines
80     type, public :: gs_plan
81     private
82     type (mpiComponentPlan), pointer :: gsComponentPlan => NULL()
83     integer :: gsPlanSize !! size of this plan (nDim*nComponents)
84     integer :: globalPlanSize !! size of all components in plan
85     integer, dimension(:), pointer :: displs !! Displacements array for mpi indexed from 0.
86     integer, dimension(:), pointer :: counts !! Counts array for mpi indexed from 0.
87     integer :: myPlanComm !! My communicator for this plan
88     integer :: myPlanRank !! My rank in this plan
89     integer :: planNprocs !! Number of processors in this plan
90     end type gs_plan
91    
92     ! plans for different decompositions
93     type (gs_plan), public :: plan_row
94     type (gs_plan), public :: plan_row3d
95     type (gs_plan), public :: plan_col
96     type (gs_plan), public :: plan_col3d
97    
98     type (mpiComponentPlan), pointer :: simComponentPlan
99    
100     ! interface for gather scatter routines
101     !! Generic interface for gather.
102     !! Gathers an local array into row or column array
103     !! Interface provided for integer, double and double
104     !! rank 2 arrays.
105     interface gather
106     module procedure gather_integer
107     module procedure gather_double
108     module procedure gather_double_2d
109     end interface
110    
111     !! Generic interface for scatter.
112     !! Scatters a row or column array, adding componets
113     !! and reducing them to a local nComponent array.
114     !! Interface provided for double and double rank=2 arrays.
115     interface scatter
116     module procedure scatter_double
117     module procedure scatter_double_2d
118     end interface
119    
120    
121    
122     contains
123    
124     !! Sets up mpiComponentPlan with structure passed from C++.
125     subroutine setupSimParallel(thisComponentPlan,ntags,tags,status)
126     ! Passed Arguments
127     ! integer, intent(inout) :: nDim !! Number of dimensions
128     !! mpiComponentPlan struct from C
129     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
130     !! Number of tags passed, nlocal
131     integer, intent(in) :: ntags
132     !! Result status, 0 = normal, -1 = error
133     integer, intent(out) :: status
134     integer :: localStatus
135     !! Global reference tag for local particles
136     integer, dimension(ntags),intent(inout) :: tags
137    
138    
139     status = 0
140     if (componentPlanSet) then
141     return
142     endif
143     componentPlanSet = .true.
144    
145     !! copy c component plan to fortran
146     mpiSim = thisComponentPlan
147     write(*,*) "Seting up simParallel"
148    
149     call make_Force_Grid(mpiSim,localStatus)
150     if (localStatus /= 0) then
151     write(default_error,*) "Error creating force grid"
152     status = -1
153     return
154     endif
155    
156     call updateGridComponents(mpiSim,localStatus)
157     if (localStatus /= 0) then
158     write(default_error,*) "Error updating grid components"
159     status = -1
160     return
161     endif
162    
163    
164     !! initialize gather and scatter plans used in this simulation
165     call plan_gather_scatter(1,mpiSim%myNlocal,&
166     mpiSim,mpiSim%rowComm,plan_row)
167     call plan_gather_scatter(nDim,mpiSim%myNlocal,&
168     mpiSim,mpiSim%rowComm,plan_row3d)
169     call plan_gather_scatter(1,mpiSim%myNlocal,&
170     mpiSim,mpiSim%columnComm,plan_col)
171     call plan_gather_scatter(nDim,mpiSim%myNlocal,&
172     mpiSim,mpiSim%columnComm,plan_col3d)
173    
174     ! Initialize tags
175     call setTags(tags,localStatus)
176     if (localStatus /= 0) then
177     status = -1
178     return
179     endif
180     isSimSet = .true.
181    
182     ! call printComponentPlan(mpiSim,0)
183     end subroutine setupSimParallel
184    
185     subroutine replanSimParallel(thisComponentPlan,status)
186     ! Passed Arguments
187     !! mpiComponentPlan struct from C
188     type (mpiComponentPlan), intent(inout) :: thisComponentPlan
189     integer, intent(out) :: status
190     integer :: localStatus
191     integer :: mpierror
192     status = 0
193    
194     call updateGridComponents(thisComponentPlan,localStatus)
195     if (localStatus /= 0) then
196     status = -1
197     return
198     endif
199    
200     !! Unplan Gather Scatter plans
201     call unplan_gather_scatter(plan_row)
202     call unplan_gather_scatter(plan_row3d)
203     call unplan_gather_scatter(plan_col)
204     call unplan_gather_scatter(plan_col3d)
205    
206    
207     !! initialize gather and scatter plans used in this simulation
208     call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
209     thisComponentPlan,thisComponentPlan%rowComm,plan_row)
210     call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
211     thisComponentPlan,thisComponentPlan%rowComm,plan_row3d)
212     call plan_gather_scatter(1,thisComponentPlan%myNlocal,&
213     thisComponentPlan,thisComponentPlan%columnComm,plan_col)
214     call plan_gather_scatter(nDim,thisComponentPlan%myNlocal,&
215     thisComponentPlan,thisComponentPlan%rowComm,plan_col3d)
216    
217    
218    
219     end subroutine replanSimParallel
220    
221     !! Updates number of row and column components for long range forces.
222     subroutine updateGridComponents(thisComponentPlan,status)
223     type (mpiComponentPlan) :: thisComponentPlan !! mpiComponentPlan
224    
225     !! Status return
226     !! - 0 Success
227     !! - -1 Failure
228     integer, intent(out) :: status
229     integer :: nComponentsLocal
230     integer :: nComponentsRow = 0
231     integer :: nComponentsColumn = 0
232     integer :: mpiErrors
233    
234     status = 0
235     if (.not. componentPlanSet) return
236    
237     if (thisComponentPlan%myNlocal == 0 ) then
238     status = -1
239     return
240     endif
241    
242     nComponentsLocal = thisComponentPlan%myNlocal
243    
244     call mpi_allreduce(nComponentsLocal,nComponentsRow,1,mpi_integer,&
245     mpi_sum,thisComponentPlan%rowComm,mpiErrors)
246     if (mpiErrors /= 0) then
247     status = -1
248     return
249     endif
250    
251     call mpi_allreduce(nComponentsLocal,nComponentsColumn,1,mpi_integer, &
252     mpi_sum,thisComponentPlan%columnComm,mpiErrors)
253     if (mpiErrors /= 0) then
254     status = -1
255     return
256     endif
257    
258     thisComponentPlan%nComponentsRow = nComponentsRow
259     thisComponentPlan%nComponentsColumn = nComponentsColumn
260    
261    
262     end subroutine updateGridComponents
263    
264    
265     !! Creates a square force decomposition of processors into row and column
266     !! communicators.
267     subroutine make_Force_Grid(thisComponentPlan,status)
268     type (mpiComponentPlan) :: thisComponentPlan
269     integer, intent(out) :: status !! status returns -1 if error
270     integer :: nColumnsMax !! Maximum number of columns
271     integer :: nWorldProcessors !! Total number of processors in World comm.
272     integer :: rowIndex !! Row for this processor.
273     integer :: columnIndex !! Column for this processor.
274     integer :: nRows !! Total number of rows.
275     integer :: nColumns !! Total number of columns.
276     integer :: mpiErrors !! MPI errors.
277     integer :: rowCommunicator !! MPI row communicator.
278     integer :: columnCommunicator
279     integer :: myWorldRank
280     integer :: i
281    
282    
283     if (.not. ComponentPlanSet) return
284     status = 0
285    
286     !! We make a dangerous assumption here that if numberProcessors is
287     !! zero, then we need to get the information from MPI.
288     if (thisComponentPlan%numberProcessors == 0 ) then
289     call mpi_comm_size( MPI_COMM_WORLD, nWorldProcessors,mpiErrors)
290     if ( mpiErrors /= 0 ) then
291     status = -1
292     return
293     endif
294     call mpi_comm_rank( MPI_COMM_WORLD,myWorldRank,mpiErrors)
295     if ( mpiErrors /= 0 ) then
296     status = -1
297     return
298     endif
299    
300     else
301     nWorldProcessors = thisComponentPlan%numberProcessors
302     myWorldRank = thisComponentPlan%myNode
303     endif
304    
305    
306    
307    
308     nColumnsMax = nint(sqrt(real(nWorldProcessors,kind=dp)))
309    
310     do i = 1, nColumnsMax
311     if (mod(nWorldProcessors,i) == 0) nColumns = i
312     end do
313    
314     nRows = nWorldProcessors/nColumns
315    
316     rowIndex = myWorldRank/nColumns
317    
318    
319    
320     call mpi_comm_split(mpi_comm_world,rowIndex,0,rowCommunicator,mpiErrors)
321     if ( mpiErrors /= 0 ) then
322     write(default_error,*) "MPI comm split failed at row communicator"
323     status = -1
324     return
325     endif
326    
327     columnIndex = mod(myWorldRank,nColumns)
328     call mpi_comm_split(mpi_comm_world,columnIndex,0,columnCommunicator,mpiErrors)
329     if ( mpiErrors /= 0 ) then
330     write(default_error,*) "MPI comm split faild at columnCommunicator"
331     status = -1
332     return
333     endif
334    
335     ! Set appropriate components of thisComponentPlan
336     thisComponentPlan%rowComm = rowCommunicator
337     thisComponentPlan%columnComm = columnCommunicator
338     thisComponentPlan%rowIndex = rowIndex
339     thisComponentPlan%columnIndex = columnIndex
340     thisComponentPlan%numberRows = nRows
341     thisComponentPlan%numberColumns = nColumns
342    
343    
344     end subroutine make_Force_Grid
345    
346    
347     !! initalizes a gather scatter plan
348     subroutine plan_gather_scatter( nDim,nComponents,thisComponentPlan, &
349     thisComm, this_plan,status)
350     integer, intent(in) :: nDim !! Number of dimensions for gather scatter plan
351     integer, intent(in) :: nComponents
352     type (mpiComponentPlan), intent(in), target :: thisComponentPlan
353     type (gs_plan), intent(out) :: this_plan !! MPI Component Plan
354     integer, intent(in) :: thisComm !! MPI communicator for this plan
355    
356     integer :: arraySize !! size to allocate plan for
357     integer, intent(out), optional :: status
358     integer :: ierror
359     integer :: i,junk
360    
361     if (present(status)) status = 0
362    
363    
364    
365     !! Set gsComponetPlan pointer
366     !! to the componet plan we want to use for this gather scatter plan.
367     !! WARNING this could be dangerous since thisComponentPlan was origionally
368     !! allocated in C++ and there is a significant difference between c and
369     !! f95 pointers....
370     this_plan%gsComponentPlan => thisComponentPlan
371    
372     ! Set this plan size for displs array.
373     this_plan%gsPlanSize = nDim * nComponents
374    
375     ! Duplicate communicator for this plan
376     call mpi_comm_dup(thisComm,this_plan%myPlanComm,mpi_err)
377     if (mpi_err /= 0) then
378     if (present(status)) status = -1
379     return
380     end if
381     call mpi_comm_rank(this_plan%myPlanComm,this_plan%myPlanRank,mpi_err)
382     if (mpi_err /= 0) then
383     if (present(status)) status = -1
384     return
385     end if
386    
387     call mpi_comm_size(this_plan%myPlanComm,this_plan%planNprocs,mpi_err)
388    
389     if (mpi_err /= 0) then
390     if (present(status)) status = -1
391     return
392     end if
393    
394     !! counts and displacements arrays are indexed from 0 to be compatable
395     !! with MPI arrays.
396     allocate (this_plan%counts(0:this_plan%planNprocs-1),STAT=ierror)
397     if (ierror /= 0) then
398     if (present(status)) status = -1
399     return
400     end if
401    
402     allocate (this_plan%displs(0:this_plan%planNprocs-1),STAT=ierror)
403     if (ierror /= 0) then
404     if (present(status)) status = -1
405     return
406     end if
407    
408     !! gather all the local sizes into a size # processors array.
409     call mpi_allgather(this_plan%gsPlanSize,1,mpi_integer,this_plan%counts, &
410     1,mpi_integer,thisComm,mpi_err)
411    
412     if (mpi_err /= 0) then
413     if (present(status)) status = -1
414     return
415     end if
416    
417    
418     !! figure out the total number of particles in this plan
419     this_plan%globalPlanSize = sum(this_plan%counts)
420    
421    
422     !! initialize plan displacements.
423     this_plan%displs(0) = 0
424     do i = 1, this_plan%planNprocs - 1,1
425     this_plan%displs(i) = this_plan%displs(i-1) + this_plan%counts(i-1)
426     end do
427    
428    
429     end subroutine plan_gather_scatter
430    
431    
432     subroutine unplan_gather_scatter(this_plan)
433     type (gs_plan), intent(inout) :: this_plan
434    
435    
436     this_plan%gsComponentPlan => null()
437     call mpi_comm_free(this_plan%myPlanComm,mpi_err)
438    
439     deallocate(this_plan%counts)
440     deallocate(this_plan%displs)
441    
442     end subroutine unplan_gather_scatter
443    
444     subroutine gather_integer( sbuffer, rbuffer, this_plan, status)
445    
446     type (gs_plan), intent(in) :: this_plan
447     integer, dimension(:), intent(in) :: sbuffer
448     integer, dimension(:), intent(in) :: rbuffer
449     integer :: noffset
450     integer, intent(out), optional :: status
451     integer :: i
452    
453    
454    
455     if (present(status)) status = 0
456     noffset = this_plan%displs(this_plan%myPlanRank)
457    
458     ! if (getmyNode() == 1) then
459     ! write(*,*) "Node 0 printing allgatherv vars"
460     ! write(*,*) "Noffset: ", noffset
461     ! write(*,*) "PlanSize: ", this_plan%gsPlanSize
462     ! write(*,*) "PlanComm: ", this_plan%myPlanComm
463     ! end if
464    
465     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_integer, &
466     rbuffer,this_plan%counts,this_plan%displs,mpi_integer, &
467     this_plan%myPlanComm, mpi_err)
468    
469     if (mpi_err /= 0) then
470     if (present(status)) status = -1
471     endif
472    
473     end subroutine gather_integer
474    
475     subroutine gather_double( sbuffer, rbuffer, this_plan, status)
476    
477     type (gs_plan), intent(in) :: this_plan
478     real( kind = DP ), dimension(:), intent(in) :: sbuffer
479     real( kind = DP ), dimension(:), intent(in) :: rbuffer
480     integer :: noffset
481     integer, intent(out), optional :: status
482    
483    
484     if (present(status)) status = 0
485     noffset = this_plan%displs(this_plan%myPlanRank)
486    
487     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
488     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
489     this_plan%myPlanComm, mpi_err)
490    
491     if (mpi_err /= 0) then
492     if (present(status)) status = -1
493     endif
494    
495     end subroutine gather_double
496    
497     subroutine gather_double_2d( sbuffer, rbuffer, this_plan, status)
498    
499     type (gs_plan), intent(in) :: this_plan
500     real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
501     real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
502     integer :: noffset,i,ierror
503     integer, intent(out), optional :: status
504    
505     external mpi_allgatherv
506    
507     if (present(status)) status = 0
508    
509     ! noffset = this_plan%displs(this_plan%me)
510    
511     call mpi_allgatherv(sbuffer,this_plan%gsPlanSize, mpi_double_precision, &
512     rbuffer,this_plan%counts,this_plan%displs,mpi_double_precision, &
513     this_plan%myPlanComm, mpi_err)
514    
515     if (mpi_err /= 0) then
516     if (present(status)) status = -1
517     endif
518    
519     end subroutine gather_double_2d
520    
521     subroutine scatter_double( sbuffer, rbuffer, this_plan, status)
522    
523     type (gs_plan), intent(in) :: this_plan
524     real( kind = DP ), dimension(:), intent(in) :: sbuffer
525     real( kind = DP ), dimension(:), intent(in) :: rbuffer
526     integer, intent(out), optional :: status
527     external mpi_reduce_scatter
528    
529     if (present(status)) status = 0
530    
531     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
532     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
533    
534     if (mpi_err /= 0) then
535     if (present(status)) status = -1
536     endif
537    
538     end subroutine scatter_double
539    
540     subroutine scatter_double_2d( sbuffer, rbuffer, this_plan, status)
541    
542     type (gs_plan), intent(in) :: this_plan
543     real( kind = DP ), dimension(:,:), intent(in) :: sbuffer
544     real( kind = DP ), dimension(:,:), intent(in) :: rbuffer
545     integer, intent(out), optional :: status
546     external mpi_reduce_scatter
547    
548     if (present(status)) status = 0
549     call mpi_reduce_scatter(sbuffer,rbuffer, this_plan%counts, &
550     mpi_double_precision, MPI_SUM, this_plan%myPlanComm, mpi_err)
551    
552     if (mpi_err /= 0) then
553     if (present(status)) status = -1
554     endif
555    
556     end subroutine scatter_double_2d
557    
558    
559     subroutine setTags(tags,status)
560     integer, dimension(:) :: tags
561     integer :: status
562    
563     integer :: alloc_stat
564    
565     integer :: ncol
566     integer :: nrow
567    
568     status = 0
569     ! allocate row arrays
570     nrow = getNrow(plan_row)
571     ncol = getNcol(plan_col)
572    
573     if (.not. allocated(tagRow)) then
574     allocate(tagRow(nrow),STAT=alloc_stat)
575     if (alloc_stat /= 0 ) then
576     status = -1
577     return
578     endif
579     else
580     deallocate(tagRow)
581     allocate(tagRow(nrow),STAT=alloc_stat)
582     if (alloc_stat /= 0 ) then
583     status = -1
584     return
585     endif
586    
587     endif
588     ! allocate column arrays
589     if (.not. allocated(tagColumn)) then
590     allocate(tagColumn(ncol),STAT=alloc_stat)
591     if (alloc_stat /= 0 ) then
592     status = -1
593     return
594     endif
595     else
596     deallocate(tagColumn)
597     allocate(tagColumn(ncol),STAT=alloc_stat)
598     if (alloc_stat /= 0 ) then
599     status = -1
600     return
601     endif
602     endif
603    
604     call gather(tags,tagRow,plan_row)
605     call gather(tags,tagColumn,plan_col)
606    
607    
608     end subroutine setTags
609    
610     pure function getNcol(thisplan) result(ncol)
611     type (gs_plan), intent(in) :: thisplan
612     integer :: ncol
613     ncol = thisplan%gsComponentPlan%nComponentsColumn
614     end function getNcol
615    
616     pure function getNrow(thisplan) result(ncol)
617     type (gs_plan), intent(in) :: thisplan
618     integer :: ncol
619     ncol = thisplan%gsComponentPlan%nComponentsrow
620     end function getNrow
621    
622     function isMPISimSet() result(isthisSimSet)
623     logical :: isthisSimSet
624     if (isSimSet) then
625     isthisSimSet = .true.
626     else
627     isthisSimSet = .false.
628     endif
629     end function isMPISimSet
630    
631    
632    
633     subroutine printComponentPlan(this_plan,printNode)
634    
635     type (mpiComponentPlan), intent(in) :: this_plan
636     integer, optional :: printNode
637     logical :: print_me = .false.
638    
639     if (present(printNode)) then
640     if (printNode == mpiSim%myNode) print_me = .true.
641     else
642     print_me = .true.
643     endif
644    
645     if (print_me) then
646     write(default_error,*) "SetupSimParallel: writing component plan"
647    
648     write(default_error,*) "nMolGlobal: ", mpiSim%nMolGlobal
649     write(default_error,*) "nAtomsGlobal: ", mpiSim%nAtomsGlobal
650     write(default_error,*) "nBondGlobal: ", mpiSim%nBondsGlobal
651     write(default_error,*) "nTorsionsGlobal: ", mpiSim%nTorsionsGlobal
652     write(default_error,*) "nSRIGlobal: ", mpiSim%nSRIGlobal
653     write(default_error,*) "myMolStart: ", mpiSim%myMolStart
654     write(default_error,*) "myMolEnd: ", mpiSim%myMolEnd
655     write(default_error,*) "myMol: ", mpiSim%myMol
656     write(default_error,*) "myNlocal: ", mpiSim%myNlocal
657     write(default_error,*) "myNode: ", mpiSim%myNode
658     write(default_error,*) "numberProcessors: ", mpiSim%numberProcessors
659     write(default_error,*) "rowComm: ", mpiSim%rowComm
660     write(default_error,*) "columnComm: ", mpiSim%columnComm
661     write(default_error,*) "numberRows: ", mpiSim%numberRows
662     write(default_error,*) "numberColumns: ", mpiSim%numberColumns
663     write(default_error,*) "nComponentsRow: ", mpiSim%nComponentsRow
664     write(default_error,*) "nComponentsColumn: ", mpiSim%nComponentsColumn
665     write(default_error,*) "rowIndex: ", mpiSim%rowIndex
666     write(default_error,*) "columnIndex: ", mpiSim%columnIndex
667     endif
668     end subroutine printComponentPlan
669    
670     function getMyNode() result(myNode)
671     integer :: myNode
672     myNode = mpiSim%myNode
673     end function getMyNode
674    
675    
676     end module mpiSimulation
677