ViewVC Help
View File | Revision Log | Show Annotations | View Changeset | Root Listing
root/group/trunk/OOPSE/libmdtools/NLModel0.cpp
Revision: 1000
Committed: Fri Jan 30 21:47:22 2004 UTC (20 years, 5 months ago) by tim
File size: 6212 byte(s)
Log Message:
using class  Minimizer1D derived from MinimizerBase instead of a functor to do line seach

File Contents

# User Rev Content
1 tim 996 #include "NLModel.hpp"
2    
3     /**
4     * calculate gradient using backward finite difference
5     * df(Xk)/dXi = (f(Xk) - f(Xk - h*ei)) /h
6     * where h is a small positive scalar and ei is the ith unit vector (ith column of the identity Matrix)
7     * h can be used for all paritial derivatives, but in some cases, it is essential to use a different value
8     * for each partial derivative
9     */
10     vector<double> NLModel0::BackwardGrad(const vector<double>& x, double& fx, vector<double>& grad, const vector<double>& h){
11     vector<double> tempX = x;
12     vector<double> partialGrad;
13     double fminus;
14     double hi;
15    
16     for(int i = 0; i < ndim; i++){
17    
18     #ifndef IS_MPI
19     hi = copysign(h[i], tempX[i]);
20    
21     //tempX[i] = x[i] + hi;
22     tempX[i] -= hi;
23    
24     fminus = (*objfunc)(tempX);
25    
26     partialGrad[i] = (fx - fminus) / hi;
27    
28     //restore tempX to its original value
29     tempX[i] += hi;
30     #else
31    
32     if(procMappingArray[i] == myRank){
33    
34     hi = copysign(h[i], tempX[i]);
35    
36     tempX[i] -= hi;
37     }
38    
39     fminus = (*objfunc)(tempX);
40    
41     if(procMappingArray[i] == myRank){
42     partialGrad[i] = (fx - fminus) / hi;
43    
44     //restore tempX to its original value
45     tempX[i] += hi;
46     }
47    
48     #endif
49     }
50    
51     return partialGrad;
52     }
53    
54     /**
55     * calculate gradient using forward finite difference
56     * df(Xk)/dXi = (f(Xk+h*ei) - f(Xk)) /h
57     * where h is a small positive scalar and ei is the ith unit vector (ith column of the identity Matrix)
58     * h can be used for all paritial derivatives, but in some cases, it is essential to use a different value
59     * for each partial derivative
60     */
61     vector<double> NLModel0::ForwardGrad(const vector<double>& x, double& fx, vector<double>& grad, const vector<double>& h){
62     vector<double> tempX = x;
63     vector<double> partialGrad;
64     double fplus;
65     double hi;
66    
67     for(int i = 0; i < ndim; i++){
68    
69     #ifndef IS_MPI
70     hi = copysign(h[i], tempX[i]);
71    
72     //tempX[i] = x[i] + hi;
73     tempX[i] += hi;
74    
75     fplus = (*objfunc)(tempX);
76    
77     partialGrad[i] = (fplus - fx) / hi;
78    
79     //restore tempX to its original value
80     tempX[i] -= hi;
81     #else
82    
83     if(procMappingArray[i] == myRank){
84    
85     hi = copysign(h[i], tempX[i]);
86    
87     //tempX[i] = x[i] + hi;
88     tempX[i] += hi;
89     }
90    
91     fminus = (*objfunc)(tempX);
92    
93     if(procMappingArray[i] == myRank){
94     partialGrad[i] = (fx - fminus) / hi;
95    
96     //restore tempX to its original value
97     tempX[i] -= hi;
98     }
99    
100     #endif
101     }
102    
103     return partialGrad;
104     }
105    
106     /**
107     * calculate gradient using central finite difference
108     * df(Xk)/dXi = (f(Xk+h*ei) - f(Xk - h*ei )) /h
109     * where h is a small positive scalar and ei is the ith unit vector (ith column of the identity Matrix)
110     * h can be used for all paritial derivatives, but in some cases, it is essential to use a different value
111     * for each partial derivative
112     */
113     vector<double> NLModel0::CentralGrad(const vector<double>& x, double& fx, vector<double>& grad, const vector<double>& h){
114     vector<double> tempX = x;
115     vector<double> partialGrad;
116     double fplus, fminus;
117     double hi;
118    
119     for(int i = 0; i < ndim; i++){
120    
121     #ifndef IS_MPI
122     hi = copysign(h[i], tempX[i]);
123    
124     //tempX[i] = x[i] + hi
125     tempX[i] += hi;
126    
127     fplus = (*objfunc)(tempX);
128    
129     //tempX[i] = x[i] -hi
130     tempX[i] -= 2*hi;
131     fminus = (*objfunc)(tempX);
132    
133     partialGrad[i] = (fplus + fminus) / (2*hi);
134    
135     //restore tempX to its original value
136     tempX[i] += hi;
137     #else
138    
139     if(procMappingArray[i] == myRank){
140    
141     hi = copysign(h[i], tempX[i]);
142    
143     //tempX[i] = x[i] + hi;
144     tempX[i] += hi;
145     }
146    
147     fplus = (*objfunc)(tempX);
148    
149     if(procMappingArray[i] == myRank){
150     partialGrad[i] = (fx - fminus) / hi;
151    
152     //restore tempX to its original value
153     tempX[i] -= 2*hi;
154     }
155    
156     fminus = (*objfunc)(tempX);
157    
158     if(procMappingArray[i] == myRank){
159     partialGrad[i] = (fx - fminus) / (2*hi);
160    
161     //restore tempX to its original value
162     tempX[i] -= hi;
163     }
164     #endif
165     }
166    
167     return partialGrad;
168     }
169    
170     /**
171     * calculate hessian using finite difference
172     * d2f(Xk)/dxidxj = (df(Xk+h*ej)/dXi + df(Xk - h*ej)/dXi) /2h
173     * where h is a small positive scalar and ei is the ith unit vector (ith column of the identity Matrix)
174 tim 1000 *
175 tim 996 */
176 tim 1000 SymMatrix NLModel0::FiniteHessian(vector<double>& x, double fx, vector<double>& h){
177 tim 996 SymMatrix H(ndim);
178 tim 1000 vector<double> tempX;
179     vector<double> fi(ndim);
180     vector<double> hi;
181     double fij;
182    
183     tempX = x;
184 tim 996
185 tim 1000 //calculate f(X+ h*ei)
186 tim 996 for(int i = 0; i < ndim; i++){
187    
188 tim 1000 #ifndef IS_MPI
189    
190     hi[i] = copysign(h[i],tempX[i]);
191    
192     tempX[i] += hi[i];
193    
194     fi[i] = calcF(tempX);
195    
196     tempX[i] -= hi;
197    
198     #else
199    
200     if(procMappingArray[i] == myRank){
201    
202     hi[i] = copysign(h[i],tempX[i]);
203    
204     tempX[i] += hi[i];
205     }
206    
207     fi[i] = calcF(tempX);
208    
209     if(procMappingArray[i] == myRank){
210     tempX[i] -= hi[i];
211     }
212    
213     #endif
214     }
215    
216     #ifndef IS_MPI
217    
218     for(int i = 0; i < ndim; i++){
219    
220     tempX[i] += hi[i] * 2;
221    
222     fii = calcF(tempX);
223    
224     H(i,j) = ((fx - fi[i]) + (fii - fi[i])) / (hi[i]*hi[i]);
225    
226     tempX[i] -= hi[i];
227    
228 tim 996 for(int j = i + 1; j < ndim; j++){
229    
230 tim 1000 tempX[j] += hi[j];
231    
232     fij = calcF(tempX);
233    
234     H(i,j) = ((f - fi[i]) + (fij - fi[j])) / (hi[i]*hi[j]);
235    
236     tempX[j] -= hi[j];
237 tim 996 }
238 tim 1000
239     tempX[i] -= hi[i];
240 tim 996 }
241 tim 1000
242     #else
243     for(int i = 0; i < ndim; i++){
244    
245     if(procMappingArray[i] == myRank){
246     tempX[i] += hi[i] * 2;
247     }
248    
249     fii = calcF(tempX);
250    
251     H(i,j) = ((fx - fi[i]) + (fii - fi[i])) / (hi[i]*hi[i]);
252    
253     if(procMappingArray[i] == myRank){
254     tempX[i] -= hi[i];
255     }
256    
257     for(int j = i + 1; j < ndim; j++){
258    
259     if(procMappingArray[j] == myRank){
260     tempX[j] += hi[j];
261     }
262    
263     fij = calcF(tempX);
264     H(i,j) = ((f - fi[i]) + (fij - fi[j])) / (hi[i]*hi[j]);
265    
266     if(procMappingArray[j] == myRank){
267     tempX[j] -= hi[j];
268     }
269    
270     }
271    
272     if(procMappingArray[i] == myRank){
273     tempX[i] -= hi[i];
274     }
275    
276     }
277    
278     #endif
279    
280     return H;
281 tim 996 }

Properties

Name Value
svn:executable *