1 |
/* Nonlinear Optimization using the algorithm of Hooke and Jeeves */ |
/* Nonlinear Optimization using the algorithm of Hooke and Jeeves */ |
2 |
/* 12 February 1994 author: Mark G. Johnson */ |
/* 12 February 1994 author: Mark G. Johnson */ |
3 |
|
// |
4 |
/* Find a point X where the nonlinear function f(X) has a local */ |
/* Find a point X where the nonlinear function f(X) has a local */ |
5 |
/* minimum. X is an n-vector and f(X) is a scalar. In mathe- */ |
/* minimum. X is an n-vector and f(X) is a scalar. In mathe- */ |
6 |
/* matical notation f: R^n -> R^1. The objective function f() */ |
/* matical notation f: R^n -> R^1. The objective function f() */ |
7 |
/* is not required to be continuous. Nor does f() need to be */ |
/* is not required to be continuous. Nor does f() need to be */ |
8 |
/* differentiable. The program does not use or require */ |
/* differentiable. The program does not use or require */ |
9 |
/* derivatives of f(). */ |
/* derivatives of f(). */ |
10 |
|
// |
11 |
/* The software user supplies three things: a subroutine that */ |
/* The software user supplies three things: a subroutine that */ |
12 |
/* computes f(X), an initial "starting guess" of the minimum point */ |
/* computes f(X), an initial "starting guess" of the minimum point */ |
13 |
/* X, and values for the algorithm convergence parameters. Then */ |
/* X, and values for the algorithm convergence parameters. Then */ |
14 |
/* the program searches for a local minimum, beginning from the */ |
/* the program searches for a local minimum, beginning from the */ |
15 |
/* starting guess, using the Direct Search algorithm of Hooke and */ |
/* starting guess, using the Direct Search algorithm of Hooke and */ |
16 |
/* Jeeves. */ |
/* Jeeves. */ |
17 |
|
// |
18 |
/* This C program is adapted from the Algol pseudocode found in */ |
/* This C program is adapted from the Algol pseudocode found in */ |
19 |
/* "Algorithm 178: Direct Search" by Arthur F. Kaupe Jr., Commun- */ |
/* "Algorithm 178: Direct Search" by Arthur F. Kaupe Jr., Commun- */ |
20 |
/* ications of the ACM, Vol 6. p.313 (June 1963). It includes the */ |
/* ications of the ACM, Vol 6. p.313 (June 1963). It includes the */ |
24 |
/* highly as the one by A. Kaupe, is: R. Hooke and T. A. Jeeves, */ |
/* highly as the one by A. Kaupe, is: R. Hooke and T. A. Jeeves, */ |
25 |
/* "Direct Search Solution of Numerical and Statistical Problems", */ |
/* "Direct Search Solution of Numerical and Statistical Problems", */ |
26 |
/* Journal of the ACM, Vol. 8, April 1961, pp. 212-229. */ |
/* Journal of the ACM, Vol. 8, April 1961, pp. 212-229. */ |
27 |
|
// |
28 |
/* Calling sequence: */ |
/* Calling sequence: */ |
29 |
/* int hooke(nvars, startpt, endpt, rho, epsilon, itermax) */ |
/* int hooke(nvars, startpt, endpt, rho, epsilon, itermax) */ |
30 |
/* */ |
/* */ |
59 |
/* itermax {an integer} A second, rarely used, halting */ |
/* itermax {an integer} A second, rarely used, halting */ |
60 |
/* criterion. If the algorithm uses >= itermax */ |
/* criterion. If the algorithm uses >= itermax */ |
61 |
/* iterations, halt. */ |
/* iterations, halt. */ |
62 |
|
// |
63 |
/* The user-supplied objective function f(x,n) should return a C */ |
/* The user-supplied objective function f(x,n) should return a C */ |
64 |
/* "double". Its arguments are x -- an array of doubles, and */ |
/* "double". Its arguments are x -- an array of doubles, and */ |
65 |
/* n -- an integer. x is the point at which f(x) should be */ |
/* n -- an integer. x is the point at which f(x) should be */ |
66 |
/* evaluated, and n is the number of coordinates of x. That is, */ |
/* evaluated, and n is the number of coordinates of x. That is, */ |
67 |
/* n is the number of coefficients being fitted. */ |
/* n is the number of coefficients being fitted. */ |
68 |
|
// |
69 |
/* rho, the algorithm convergence control */ |
/* rho, the algorithm convergence control */ |
70 |
|
// |
71 |
/* The algorithm works by taking "steps" from one estimate of */ |
/* The algorithm works by taking "steps" from one estimate of */ |
72 |
/* a minimum, to another (hopefully better) estimate. Taking */ |
/* a minimum, to another (hopefully better) estimate. Taking */ |
73 |
/* big steps gets to the minimum more quickly, at the risk of */ |
/* big steps gets to the minimum more quickly, at the risk of */ |
95 |
/* again with a larger value of rho such as 0.85, using the */ |
/* again with a larger value of rho such as 0.85, using the */ |
96 |
/* result of the first minimization as the starting guess to */ |
/* result of the first minimization as the starting guess to */ |
97 |
/* begin the second minimization. */ |
/* begin the second minimization. */ |
98 |
|
// |
99 |
/* Normal use: */ |
/* Normal use: */ |
100 |
/* (1) Code your function f() in the C language */ |
/* (1) Code your function f() in the C language */ |
101 |
/* (2) Install your starting guess {or read it in} */ |
/* (2) Install your starting guess {or read it in} */ |
102 |
/* (3) Run the program */ |
/* (3) Run the program */ |
103 |
/* (4) {for the skeptical}: Use the computed minimum */ |
/* (4) {for the skeptical}: Use the computed minimum */ |
104 |
/* as the starting point for another run */ |
/* as the starting point for another run */ |
105 |
|
// |
106 |
/* Data Fitting: */ |
/* Data Fitting: */ |
107 |
/* Code your function f() to be the sum of the squares of the */ |
/* Code your function f() to be the sum of the squares of the */ |
108 |
/* errors (differences) between the computed values and the */ |
/* errors (differences) between the computed values and the */ |
113 |
/* f(x) = SUM (measured_y[i] - ((A*t[i]*t[i]) + (B*exp(t[i])) */ |
/* f(x) = SUM (measured_y[i] - ((A*t[i]*t[i]) + (B*exp(t[i])) */ |
114 |
/* + (C*tan(t[i]))))^2 */ |
/* + (C*tan(t[i]))))^2 */ |
115 |
/* where x[] is a 3-vector consisting of {A, B, C}. */ |
/* where x[] is a 3-vector consisting of {A, B, C}. */ |
116 |
|
// |
117 |
/* The author of this software is M.G. Johnson. */ |
/* The author of this software is M.G. Johnson. */ |
118 |
/* Permission to use, copy, modify, and distribute this software */ |
/* Permission to use, copy, modify, and distribute this software */ |
119 |
/* for any purpose without fee is hereby granted, provided that */ |
/* for any purpose without fee is hereby granted, provided that */ |
125 |
/* AUTHOR NOR AT&T MAKE ANY REPRESENTATION OR WARRANTY OF ANY */ |
/* AUTHOR NOR AT&T MAKE ANY REPRESENTATION OR WARRANTY OF ANY */ |
126 |
/* KIND CONCERNING THE MERCHANTABILITY OF THIS SOFTWARE OR ITS */ |
/* KIND CONCERNING THE MERCHANTABILITY OF THIS SOFTWARE OR ITS */ |
127 |
/* FITNESS FOR ANY PARTICULAR PURPOSE. */ |
/* FITNESS FOR ANY PARTICULAR PURPOSE. */ |
128 |
|
// |
129 |
/* JMB this has been modified to work with the gadget object structure */ |
/* JMB this has been modified to work with the gadget object structure */ |
130 |
/* This means that the function has been replaced by a call to ecosystem */ |
/* This means that the function has been replaced by a call to ecosystem */ |
131 |
/* object, and we can use the vector objects that have been defined */ |
/* object, and we can use the vector objects that have been defined */ |
139 |
#include "ecosystem.h" |
#include "ecosystem.h" |
140 |
#include "global.h" |
#include "global.h" |
141 |
|
|
142 |
extern Ecosystem* EcoSystem; |
#ifndef NO_OPENMP |
143 |
|
#include "omp.h" |
144 |
|
#endif |
145 |
|
|
146 |
|
extern Ecosystem* EcoSystem; |
147 |
|
#ifndef NO_OPENMP |
148 |
|
extern Ecosystem** EcoSystems; |
149 |
|
#endif |
150 |
|
|
151 |
/* given a point, look for a better one nearby, one coord at a time */ |
/* given a point, look for a better one nearby, one coord at a time */ |
152 |
double OptInfoHooke::bestNearby(DoubleVector& delta, DoubleVector& point, double prevbest, IntVector& param) { |
double OptInfoHooke::bestNearby(DoubleVector& delta, DoubleVector& point, double prevbest, IntVector& param) { |
156 |
DoubleVector z(point); |
DoubleVector z(point); |
157 |
|
|
158 |
minf = prevbest; |
minf = prevbest; |
159 |
|
// for (int k=0;k<point.Size(); k++) |
160 |
|
// cout << z[k] ; |
161 |
|
// cout << endl; |
162 |
for (i = 0; i < point.Size(); i++) { |
for (i = 0; i < point.Size(); i++) { |
163 |
|
|
164 |
|
// for (int k=0;k<point.Size(); k++) |
165 |
|
// cout << z[k] << " " ; |
166 |
|
//cout << endl; |
167 |
z[param[i]] = point[param[i]] + delta[param[i]]; |
z[param[i]] = point[param[i]] + delta[param[i]]; |
168 |
ftmp = EcoSystem->SimulateAndUpdate(z); |
ftmp = EcoSystem->SimulateAndUpdate(z); |
169 |
|
// cout << i <<"-z["<< param[i]<<"]:" <<z[param[i]] << " - " << ftmp << endl; |
170 |
if (ftmp < minf) { |
if (ftmp < minf) { |
171 |
minf = ftmp; |
minf = ftmp; |
172 |
} else { |
} else { |
178 |
else |
else |
179 |
z[param[i]] = point[param[i]]; |
z[param[i]] = point[param[i]]; |
180 |
} |
} |
181 |
|
// cout << i <<"-z["<< param[i]<<"]:" <<z[param[i]] << " - " << ftmp <<" - " << prevbest << endl; |
182 |
} |
} |
183 |
|
|
184 |
for (i = 0; i < point.Size(); i++) |
for (i = 0; i < point.Size(); i++) |
186 |
return minf; |
return minf; |
187 |
} |
} |
188 |
|
|
189 |
|
/* given a point, look for a better one nearby, one coord at a time */ |
190 |
|
#ifndef NO_OPENMP |
191 |
|
double OptInfoHooke::bestNearbyOMP(DoubleVector& delta, DoubleVector& point, double prevbest, IntVector& param) { |
192 |
|
//cout << "beastNearbyINI" << endl; |
193 |
|
double minf;//, ftmp; |
194 |
|
int i, j, k; |
195 |
|
DoubleVector z(point); |
196 |
|
|
197 |
|
struct Storage { |
198 |
|
DoubleVector z; |
199 |
|
DoubleVector delta; |
200 |
|
double ftmp; |
201 |
|
int iters; |
202 |
|
}; |
203 |
|
|
204 |
|
minf = prevbest; |
205 |
|
i = 0; |
206 |
|
|
207 |
|
int paral_tokens, numThr, nvars = point.Size(); |
208 |
|
numThr = omp_get_max_threads ( ); |
209 |
|
|
210 |
|
Storage* storage = new Storage[numThr]; |
211 |
|
if ((numThr % 2) == 0) |
212 |
|
paral_tokens = numThr / 2; |
213 |
|
else { |
214 |
|
return -1; |
215 |
|
} |
216 |
|
|
217 |
|
while ( i < nvars) { |
218 |
|
// for (int k=0;k<point.Size(); k++) |
219 |
|
// cout << z[k] << " " ; |
220 |
|
// cout << endl; |
221 |
|
if ((i + paral_tokens -1) >= nvars) |
222 |
|
paral_tokens = nvars - i; |
223 |
|
//cout << "i:" << i << endl; |
224 |
|
omp_set_dynamic(0); |
225 |
|
omp_set_nested(1); |
226 |
|
#pragma omp parallel for num_threads(paral_tokens) private(k) |
227 |
|
for (j = 0; j < paral_tokens; ++j) { |
228 |
|
// cout << "thr_for:" << omp_get_num_threads()<<endl; |
229 |
|
// cout << "J:" << j << endl; |
230 |
|
storage[j].z = z; |
231 |
|
storage[j].delta = delta; |
232 |
|
DoubleVector v1(z); |
233 |
|
DoubleVector v2(z); |
234 |
|
k = param[i+j]; |
235 |
|
v1[k] += delta[k]; |
236 |
|
v2[k] -= delta[k]; |
237 |
|
|
238 |
|
#pragma omp parallel sections num_threads(2) |
239 |
|
{ |
240 |
|
// cout << "thr_sec:" << omp_get_num_threads()<<endl; |
241 |
|
#pragma omp section |
242 |
|
{ |
243 |
|
storage[j].ftmp = EcoSystems[j]->SimulateAndUpdate(v1); |
244 |
|
// cout << "->" << j << endl; |
245 |
|
} |
246 |
|
#pragma omp section |
247 |
|
{ |
248 |
|
// cout << "->" << j+paral_tokens << endl; |
249 |
|
storage[j+paral_tokens].ftmp = EcoSystems[j+paral_tokens]->SimulateAndUpdate(v2); |
250 |
|
} |
251 |
|
} |
252 |
|
//cout << "-----" << endl; |
253 |
|
// cout <<i+j<< "-z["<< param[i+j]<<"]:" << (storage[j].z)[param[i+j]] << " - " << storage[j].ftmp << endl; |
254 |
|
|
255 |
|
if (storage[j].ftmp < minf) { |
256 |
|
storage[j].iters = 1; |
257 |
|
storage[j].z[k] = v1[k]; |
258 |
|
// storage[j].delta[k] += delta[k]; |
259 |
|
// minf = ftmp; |
260 |
|
} else { |
261 |
|
storage[j].iters = 2; |
262 |
|
storage[j].delta[k] = 0.0 - delta[k]; |
263 |
|
if (storage[j+paral_tokens].ftmp < minf) { |
264 |
|
// minf = ftmp; |
265 |
|
storage[j].ftmp = storage[j+paral_tokens].ftmp; |
266 |
|
storage[j].z[k] = v2[k]; |
267 |
|
} |
268 |
|
// else |
269 |
|
// storage[j].z[k] = point[k]; |
270 |
|
} |
271 |
|
} |
272 |
|
|
273 |
|
// cout << i <<"-z["<< param[i]<<"]:" <<z[param[i]] << " - " ; |
274 |
|
for (j = 0; j < paral_tokens; ++j) { |
275 |
|
i++; |
276 |
|
iters += storage[j].iters; |
277 |
|
if (storage[j].ftmp < minf) { |
278 |
|
minf = storage[j].ftmp; |
279 |
|
z = storage[j].z; |
280 |
|
delta = storage[j].delta; |
281 |
|
// cout << storage[j].ftmp; |
282 |
|
break; |
283 |
|
} |
284 |
|
} |
285 |
|
} |
286 |
|
|
287 |
|
// omp_set_num_threads(numThr); |
288 |
|
|
289 |
|
for (i = 0; i < nvars; ++i) |
290 |
|
point[i] = z[i]; |
291 |
|
return minf; |
292 |
|
} |
293 |
|
#endif |
294 |
|
|
295 |
void OptInfoHooke::OptimiseLikelihood() { |
void OptInfoHooke::OptimiseLikelihood() { |
296 |
|
|
297 |
double oldf, newf, bestf, steplength, tmp; |
double oldf, newf, bestf, steplength, tmp; |
314 |
IntVector trapped(nvars, 0); |
IntVector trapped(nvars, 0); |
315 |
|
|
316 |
EcoSystem->scaleVariables(); |
EcoSystem->scaleVariables(); |
317 |
|
#ifndef NO_OPENMP |
318 |
|
int numThr = omp_get_max_threads ( ); |
319 |
|
for (i = 0; i < numThr; i++) |
320 |
|
EcoSystems[i]->scaleVariables(); |
321 |
|
#endif |
322 |
EcoSystem->getOptScaledValues(x); |
EcoSystem->getOptScaledValues(x); |
323 |
EcoSystem->getOptLowerBounds(lowerb); |
EcoSystem->getOptLowerBounds(lowerb); |
324 |
EcoSystem->getOptUpperBounds(upperb); |
EcoSystem->getOptUpperBounds(upperb); |
337 |
bestx[i] = x[i]; |
bestx[i] = x[i]; |
338 |
trialx[i] = x[i]; |
trialx[i] = x[i]; |
339 |
param[i] = i; |
param[i] = i; |
340 |
|
//FIXME rand_r |
341 |
delta[i] = ((2 * (rand() % 2)) - 1) * rho; //JMB - randomise the sign |
delta[i] = ((2 * (rand() % 2)) - 1) * rho; //JMB - randomise the sign |
342 |
} |
} |
343 |
|
|
356 |
if (isZero(steplength)) |
if (isZero(steplength)) |
357 |
steplength = rho; |
steplength = rho; |
358 |
|
|
359 |
|
iters = 0; |
360 |
|
|
361 |
while (1) { |
while (1) { |
362 |
if (isZero(bestf)) { |
if (isZero(bestf)) { |
363 |
|
#ifdef NO_OPENMP |
364 |
iters = EcoSystem->getFuncEval() - offset; |
iters = EcoSystem->getFuncEval() - offset; |
365 |
|
#endif |
366 |
handle.logMessage(LOGINFO, "Error in Hooke & Jeeves optimisation after", iters, "function evaluations, f(x) = 0"); |
handle.logMessage(LOGINFO, "Error in Hooke & Jeeves optimisation after", iters, "function evaluations, f(x) = 0"); |
367 |
converge = -1; |
converge = -1; |
368 |
return; |
return; |
371 |
/* randomize the order of the parameters once in a while */ |
/* randomize the order of the parameters once in a while */ |
372 |
rchange = 0; |
rchange = 0; |
373 |
while (rchange < nvars) { |
while (rchange < nvars) { |
374 |
|
//FIXME rand_r |
375 |
rnumber = rand() % nvars; |
rnumber = rand() % nvars; |
376 |
rcheck = 1; |
rcheck = 1; |
377 |
for (i = 0; i < rchange; i++) |
for (i = 0; i < rchange; i++) |
386 |
/* find best new point, one coord at a time */ |
/* find best new point, one coord at a time */ |
387 |
for (i = 0; i < nvars; i++) |
for (i = 0; i < nvars; i++) |
388 |
trialx[i] = x[i]; |
trialx[i] = x[i]; |
389 |
|
#ifndef NO_OPENMP |
390 |
|
newf = this->bestNearbyOMP(delta, trialx, bestf, param); |
391 |
|
if (newf == -1) { |
392 |
|
handle.logMessage(LOGINFO, "\nStopping Hooke & Jeeves optimisation algorithm\n"); |
393 |
|
handle.logMessage(LOGINFO, "\nThe number of threads must be a multiple of 2\n"); |
394 |
|
return; |
395 |
|
} |
396 |
|
#else |
397 |
|
newf = this->bestNearby(delta, trialx, bestf, param); |
398 |
|
#endif |
399 |
|
/* if too many function evaluations occur, terminate the algorithm */ |
400 |
|
|
401 |
|
#ifdef NO_OPENMP |
402 |
|
iters = EcoSystem->getFuncEval() - offset; |
403 |
|
#endif |
404 |
|
// cout << "newf:" << newf << "-" << iters<<endl; |
405 |
|
if (iters > hookeiter) { |
406 |
|
handle.logMessage(LOGINFO, "\nStopping Hooke & Jeeves optimisation algorithm\n"); |
407 |
|
handle.logMessage(LOGINFO, "The optimisation stopped after", iters, "function evaluations"); |
408 |
|
handle.logMessage(LOGINFO, "The steplength was reduced to", steplength); |
409 |
|
handle.logMessage(LOGINFO, "The optimisation stopped because the maximum number of function evaluations"); |
410 |
|
handle.logMessage(LOGINFO, "was reached and NOT because an optimum was found for this run"); |
411 |
|
|
412 |
|
score = EcoSystem->SimulateAndUpdate(trialx); |
413 |
|
handle.logMessage(LOGINFO, "\nHooke & Jeeves finished with a likelihood score of", score); |
414 |
|
for (i = 0; i < nvars; i++) |
415 |
|
bestx[i] = trialx[i] * init[i]; |
416 |
|
EcoSystem->storeVariables(score, bestx); |
417 |
|
return; |
418 |
|
} |
419 |
|
|
420 |
|
/* if we made some improvements, pursue that direction */ |
421 |
|
while (newf < bestf) { |
422 |
|
for (i = 0; i < nvars; i++) { |
423 |
|
/* if it has been trapped but f has now gotten better (bndcheck) */ |
424 |
|
/* we assume that we are out of the trap, reset the counters */ |
425 |
|
/* and go back to the stepsize we had when we got trapped */ |
426 |
|
if ((trapped[i]) && (newf < oldf * bndcheck)) { |
427 |
|
trapped[i] = 0; |
428 |
|
lbound[i] = 0; |
429 |
|
rbounds[i] = 0; |
430 |
|
delta[i] = initialstep[i]; |
431 |
|
|
432 |
|
} else if (trialx[i] < (lowerb[i] + verysmall)) { |
433 |
|
lbound[i]++; |
434 |
|
trialx[i] = lowerb[i]; |
435 |
|
if (!trapped[i]) { |
436 |
|
initialstep[i] = delta[i]; |
437 |
|
trapped[i] = 1; |
438 |
|
} |
439 |
|
/* if it has hit the bounds 2 times then increase the stepsize */ |
440 |
|
if (lbound[i] >= 2) |
441 |
|
delta[i] /= rho; |
442 |
|
|
443 |
|
} else if (trialx[i] > (upperb[i] - verysmall)) { |
444 |
|
rbounds[i]++; |
445 |
|
trialx[i] = upperb[i]; |
446 |
|
if (!trapped[i]) { |
447 |
|
initialstep[i] = delta[i]; |
448 |
|
trapped[i] = 1; |
449 |
|
} |
450 |
|
/* if it has hit the bounds 2 times then increase the stepsize */ |
451 |
|
if (rbounds[i] >= 2) |
452 |
|
delta[i] /= rho; |
453 |
|
} |
454 |
|
} |
455 |
|
|
456 |
|
for (i = 0; i < nvars; i++) { |
457 |
|
/* firstly, arrange the sign of delta[] */ |
458 |
|
if (trialx[i] < x[i]) |
459 |
|
delta[i] = 0.0 - fabs(delta[i]); |
460 |
|
else |
461 |
|
delta[i] = fabs(delta[i]); |
462 |
|
|
463 |
|
/* now, move further in this direction */ |
464 |
|
tmp = x[i]; |
465 |
|
x[i] = trialx[i]; |
466 |
|
trialx[i] = trialx[i] + trialx[i] - tmp; |
467 |
|
} |
468 |
|
|
469 |
|
/* only move forward if this is really an improvement */ |
470 |
|
oldf = newf; |
471 |
|
newf = EcoSystem->SimulateAndUpdate(trialx); |
472 |
|
#ifndef NO_OPENMP |
473 |
|
iters++; |
474 |
|
#endif |
475 |
|
if ((isEqual(newf, oldf)) || (newf > oldf)) { |
476 |
|
newf = oldf; //JMB no improvement, so reset the value of newf |
477 |
|
break; |
478 |
|
} |
479 |
|
|
480 |
|
/* OK, it's better, so update variables and look around */ |
481 |
|
bestf = newf; |
482 |
|
for (i = 0; i < nvars; i++) |
483 |
|
x[i] = trialx[i]; |
484 |
|
|
485 |
|
#ifndef NO_OPENMP |
486 |
|
newf = this->bestNearbyOMP(delta, trialx, bestf, param); |
487 |
|
if (newf == -1) { |
488 |
|
handle.logMessage(LOGINFO, "\nStopping Hooke & Jeeves optimisation algorithm\n"); |
489 |
|
handle.logMessage(LOGINFO, "\nThe number of threads must be a multiple of 2\n"); |
490 |
|
return; |
491 |
|
} |
492 |
|
#else |
493 |
newf = this->bestNearby(delta, trialx, bestf, param); |
newf = this->bestNearby(delta, trialx, bestf, param); |
494 |
|
#endif |
495 |
|
if (isEqual(newf, bestf)) |
496 |
|
break; |
497 |
|
|
498 |
/* if too many function evaluations occur, terminate the algorithm */ |
/* if too many function evaluations occur, terminate the algorithm */ |
499 |
|
#ifdef NO_OPENMP |
500 |
iters = EcoSystem->getFuncEval() - offset; |
iters = EcoSystem->getFuncEval() - offset; |
501 |
|
#endif |
502 |
|
// cout << "newf:" << newf << "-" << iters<<endl; |
503 |
if (iters > hookeiter) { |
if (iters > hookeiter) { |
504 |
handle.logMessage(LOGINFO, "\nStopping Hooke & Jeeves optimisation algorithm\n"); |
handle.logMessage(LOGINFO, "\nStopping Hooke & Jeeves optimisation algorithm\n"); |
505 |
handle.logMessage(LOGINFO, "The optimisation stopped after", iters, "function evaluations"); |
handle.logMessage(LOGINFO, "The optimisation stopped after", iters, "function evaluations"); |
514 |
EcoSystem->storeVariables(score, bestx); |
EcoSystem->storeVariables(score, bestx); |
515 |
return; |
return; |
516 |
} |
} |
517 |
|
} |
518 |
|
|
519 |
|
#ifdef NO_OPENMP |
520 |
|
iters = EcoSystem->getFuncEval() - offset; |
521 |
|
#endif |
522 |
|
if (newf < bestf) { |
523 |
|
for (i = 0; i < nvars; i++) |
524 |
|
bestx[i] = x[i] * init[i]; |
525 |
|
bestf = newf; |
526 |
|
handle.logMessage(LOGINFO, "\nNew optimum found after", iters, "function evaluations"); |
527 |
|
handle.logMessage(LOGINFO, "The likelihood score is", bestf, "at the point"); |
528 |
|
EcoSystem->storeVariables(bestf, bestx); |
529 |
|
EcoSystem->writeBestValues(); |
530 |
|
|
531 |
|
} else |
532 |
|
handle.logMessage(LOGINFO, "Checking convergence criteria after", iters, "function evaluations ..."); |
533 |
|
|
534 |
|
/* if the step length is less than hookeeps, terminate the algorithm */ |
535 |
|
if (steplength < hookeeps) { |
536 |
|
handle.logMessage(LOGINFO, "\nStopping Hooke & Jeeves optimisation algorithm\n"); |
537 |
|
handle.logMessage(LOGINFO, "The optimisation stopped after", iters, "function evaluations"); |
538 |
|
handle.logMessage(LOGINFO, "The steplength was reduced to", steplength); |
539 |
|
handle.logMessage(LOGINFO, "The optimisation stopped because an optimum was found for this run"); |
540 |
|
|
541 |
|
converge = 1; |
542 |
|
score = bestf; |
543 |
|
handle.logMessage(LOGINFO, "\nHooke & Jeeves finished with a likelihood score of", score); |
544 |
|
EcoSystem->storeVariables(bestf, bestx); |
545 |
|
return; |
546 |
|
} |
547 |
|
|
548 |
|
steplength *= rho; |
549 |
|
handle.logMessage(LOGINFO, "Reducing the steplength to", steplength); |
550 |
|
for (i = 0; i < nvars; i++) |
551 |
|
delta[i] *= rho; |
552 |
|
} |
553 |
|
} |
554 |
|
|
555 |
|
#ifdef GADGET_OPENMP |
556 |
|
//TODO doc |
557 |
|
double OptInfoHooke::bestNearbyOMP2(DoubleVector& delta, DoubleVector& point, double prevbest, IntVector& param) { |
558 |
|
//cout << "beastNearbyINI" << endl; |
559 |
|
double minf;//, ftmp; |
560 |
|
int i, j, k, ii; |
561 |
|
DoubleVector z(point); |
562 |
|
int bestId = 0; |
563 |
|
struct Storage { |
564 |
|
DoubleVector z; |
565 |
|
DoubleVector delta; |
566 |
|
double ftmp; |
567 |
|
int iters; |
568 |
|
}; |
569 |
|
|
570 |
|
minf = prevbest; |
571 |
|
|
572 |
|
|
573 |
|
int paral_tokens, numThr, nvars = point.Size(); |
574 |
|
numThr = omp_get_max_threads ( ); |
575 |
|
|
576 |
|
Storage* storage = new Storage[numThr]; |
577 |
|
if ((numThr % 2) == 0) |
578 |
|
paral_tokens = numThr / 2; |
579 |
|
else { |
580 |
|
return -1; |
581 |
|
} |
582 |
|
|
583 |
|
for (ii=0; ii< paral_tokens; ii++) { |
584 |
|
i = 0; |
585 |
|
while ( i < nvars) { |
586 |
|
// for (int k=0;k<point.Size(); k++) |
587 |
|
// cout << z[k] << " " ; |
588 |
|
// cout << endl; |
589 |
|
if ((i + paral_tokens -1) >= nvars) |
590 |
|
paral_tokens = nvars - i; |
591 |
|
//cout << "i:" << i << endl; |
592 |
|
omp_set_dynamic(0); |
593 |
|
omp_set_nested(1); |
594 |
|
#pragma omp parallel for num_threads(paral_tokens) private(k) |
595 |
|
for (j = 0; j < paral_tokens; ++j) { |
596 |
|
// cout << "thr_for:" << omp_get_num_threads()<<endl; |
597 |
|
// cout << "J:" << j << endl; |
598 |
|
storage[j].z = z; |
599 |
|
storage[j].delta = delta; |
600 |
|
DoubleVector v1(z); |
601 |
|
DoubleVector v2(z); |
602 |
|
k = param[i+j]; |
603 |
|
v1[k] += delta[k]; |
604 |
|
v2[k] -= delta[k]; |
605 |
|
|
606 |
|
#pragma omp parallel sections num_threads(2) |
607 |
|
{ |
608 |
|
// cout << "thr_sec:" << omp_get_num_threads()<<endl; |
609 |
|
#pragma omp section |
610 |
|
{ |
611 |
|
storage[j].ftmp = EcoSystems[j]->SimulateAndUpdate(v1); |
612 |
|
// cout << "->" << j << endl; |
613 |
|
} |
614 |
|
#pragma omp section |
615 |
|
{ |
616 |
|
// cout << "->" << j+paral_tokens << endl; |
617 |
|
storage[j+paral_tokens].ftmp = EcoSystems[j+paral_tokens]->SimulateAndUpdate(v2); |
618 |
|
} |
619 |
|
} |
620 |
|
//cout << "-----" << endl; |
621 |
|
// cout <<i+j<< "-z["<< param[i+j]<<"]:" << (storage[j].z)[param[i+j]] << " - " << storage[j].ftmp << endl; |
622 |
|
|
623 |
|
if (storage[j].ftmp < minf) { |
624 |
|
storage[j].iters = 1; |
625 |
|
storage[j].z[k] = v1[k]; |
626 |
|
// storage[j].delta[k] += delta[k]; |
627 |
|
// minf = ftmp; |
628 |
|
} else { |
629 |
|
storage[j].iters = 2; |
630 |
|
storage[j].delta[k] = 0.0 - delta[k]; |
631 |
|
if (storage[j+paral_tokens].ftmp < minf) { |
632 |
|
// minf = ftmp; |
633 |
|
storage[j].ftmp = storage[j+paral_tokens].ftmp; |
634 |
|
storage[j].z[k] = v2[k]; |
635 |
|
} |
636 |
|
else iters += 2; |
637 |
|
// storage[j].z[k] = point[k]; |
638 |
|
} |
639 |
|
} |
640 |
|
|
641 |
|
// cout << i <<"-z["<< param[i]<<"]:" <<z[param[i]] << " - " ; |
642 |
|
bestId = 0; |
643 |
|
for (j = 1; j < paral_tokens; ++j) { |
644 |
|
if (storage[j].ftmp < storage[bestId].ftmp) |
645 |
|
bestId = j; |
646 |
|
} |
647 |
|
if (storage[bestId].ftmp < minf) { |
648 |
|
iters += storage[bestId].iters; |
649 |
|
minf = storage[bestId].ftmp; |
650 |
|
z = storage[bestId].z; |
651 |
|
delta = storage[bestId].delta; |
652 |
|
// cout << storage[j].ftmp; |
653 |
|
|
654 |
|
} |
655 |
|
|
656 |
|
i += paral_tokens; |
657 |
|
} |
658 |
|
} |
659 |
|
|
660 |
|
// omp_set_num_threads(numThr); |
661 |
|
delete[] storage; |
662 |
|
for (i = 0; i < nvars; ++i) |
663 |
|
point[i] = z[i]; |
664 |
|
|
665 |
|
return minf; |
666 |
|
} |
667 |
|
|
668 |
|
void OptInfoHooke::OptimiseLikelihoodOMP() { |
669 |
|
double oldf, newf, bestf, steplength, tmp; |
670 |
|
int i, offset; |
671 |
|
int rchange, rcheck, rnumber; //Used to randomise the order of the parameters |
672 |
|
|
673 |
|
handle.logMessage(LOGINFO, "\nStarting Hooke & Jeeves optimisation algorithm\n"); |
674 |
|
int nvars = EcoSystem->numOptVariables(); |
675 |
|
DoubleVector x(nvars); |
676 |
|
DoubleVector trialx(nvars); |
677 |
|
DoubleVector bestx(nvars); |
678 |
|
DoubleVector lowerb(nvars); |
679 |
|
DoubleVector upperb(nvars); |
680 |
|
DoubleVector init(nvars); |
681 |
|
DoubleVector initialstep(nvars, rho); |
682 |
|
DoubleVector delta(nvars); |
683 |
|
IntVector param(nvars, 0); |
684 |
|
IntVector lbound(nvars, 0); |
685 |
|
IntVector rbounds(nvars, 0); |
686 |
|
IntVector trapped(nvars, 0); |
687 |
|
|
688 |
|
EcoSystem->scaleVariables(); |
689 |
|
#ifndef NO_OPENMP |
690 |
|
int numThr = omp_get_max_threads ( ); |
691 |
|
for (i = 0; i < numThr; i++) |
692 |
|
EcoSystems[i]->scaleVariables(); |
693 |
|
#endif |
694 |
|
EcoSystem->getOptScaledValues(x); |
695 |
|
EcoSystem->getOptLowerBounds(lowerb); |
696 |
|
EcoSystem->getOptUpperBounds(upperb); |
697 |
|
EcoSystem->getOptInitialValues(init); |
698 |
|
|
699 |
|
for (i = 0; i < nvars; i++) { |
700 |
|
// Scaling the bounds, because the parameters are scaled |
701 |
|
lowerb[i] = lowerb[i] / init[i]; |
702 |
|
upperb[i] = upperb[i] / init[i]; |
703 |
|
if (lowerb[i] > upperb[i]) { |
704 |
|
tmp = lowerb[i]; |
705 |
|
lowerb[i] = upperb[i]; |
706 |
|
upperb[i] = tmp; |
707 |
|
} |
708 |
|
|
709 |
|
bestx[i] = x[i]; |
710 |
|
trialx[i] = x[i]; |
711 |
|
param[i] = i; |
712 |
|
//FIXME rand_r |
713 |
|
delta[i] = ((2 * (rand() % 2)) - 1) * rho; //JMB - randomise the sign |
714 |
|
} |
715 |
|
|
716 |
|
bestf = EcoSystem->SimulateAndUpdate(trialx); |
717 |
|
if (bestf != bestf) { //check for NaN |
718 |
|
handle.logMessage(LOGINFO, "Error starting Hooke & Jeeves optimisation with f(x) = infinity"); |
719 |
|
converge = -1; |
720 |
|
iters = 1; |
721 |
|
return; |
722 |
|
} |
723 |
|
|
724 |
|
offset = EcoSystem->getFuncEval(); //number of function evaluations done before loop |
725 |
|
newf = bestf; |
726 |
|
oldf = bestf; |
727 |
|
steplength = lambda; |
728 |
|
if (isZero(steplength)) |
729 |
|
steplength = rho; |
730 |
|
|
731 |
|
iters = 0; |
732 |
|
|
733 |
|
while (1) { |
734 |
|
if (isZero(bestf)) { |
735 |
|
#ifdef NO_OPENMP |
736 |
|
iters = EcoSystem->getFuncEval() - offset; |
737 |
|
#endif |
738 |
|
handle.logMessage(LOGINFO, "Error in Hooke & Jeeves optimisation after", iters, "function evaluations, f(x) = 0"); |
739 |
|
converge = -1; |
740 |
|
return; |
741 |
|
} |
742 |
|
|
743 |
|
/* randomize the order of the parameters once in a while */ |
744 |
|
rchange = 0; |
745 |
|
while (rchange < nvars) { |
746 |
|
//FIXME rand_r |
747 |
|
rnumber = rand() % nvars; |
748 |
|
rcheck = 1; |
749 |
|
for (i = 0; i < rchange; i++) |
750 |
|
if (param[i] == rnumber) |
751 |
|
rcheck = 0; |
752 |
|
if (rcheck) { |
753 |
|
param[rchange] = rnumber; |
754 |
|
rchange++; |
755 |
|
} |
756 |
|
} |
757 |
|
|
758 |
|
/* find best new point, one coord at a time */ |
759 |
|
for (i = 0; i < nvars; i++) |
760 |
|
trialx[i] = x[i]; |
761 |
|
#ifndef NO_OPENMP |
762 |
|
newf = this->bestNearbyOMP2(delta, trialx, bestf, param); |
763 |
|
if (newf == -1) { |
764 |
|
handle.logMessage(LOGINFO, "\nStopping Hooke & Jeeves optimisation algorithm\n"); |
765 |
|
handle.logMessage(LOGINFO, "\nThe number of threads must be a multiple of 2\n"); |
766 |
|
return; |
767 |
|
} |
768 |
|
#else |
769 |
|
newf = this->bestNearby(delta, trialx, bestf, param); |
770 |
|
#endif |
771 |
|
/* if too many function evaluations occur, terminate the algorithm */ |
772 |
|
|
773 |
|
#ifdef NO_OPENMP |
774 |
|
iters = EcoSystem->getFuncEval() - offset; |
775 |
|
#endif |
776 |
|
// cout << "newf:" << newf << "-" << iters<<endl; |
777 |
|
if (iters > hookeiter) { |
778 |
|
handle.logMessage(LOGINFO, "\nStopping Hooke & Jeeves optimisation algorithm\n"); |
779 |
|
handle.logMessage(LOGINFO, "The optimisation stopped after", iters, "function evaluations"); |
780 |
|
handle.logMessage(LOGINFO, "The steplength was reduced to", steplength); |
781 |
|
handle.logMessage(LOGINFO, "The optimisation stopped because the maximum number of function evaluations"); |
782 |
|
handle.logMessage(LOGINFO, "was reached and NOT because an optimum was found for this run"); |
783 |
|
|
784 |
|
score = EcoSystem->SimulateAndUpdate(trialx); |
785 |
|
handle.logMessage(LOGINFO, "\nHooke & Jeeves finished with a likelihood score of", score); |
786 |
|
for (i = 0; i < nvars; i++) |
787 |
|
bestx[i] = trialx[i] * init[i]; |
788 |
|
EcoSystem->storeVariables(score, bestx); |
789 |
|
// EcoSystem->writeOptValuesOMP(); |
790 |
|
return; |
791 |
|
} |
792 |
|
|
793 |
/* if we made some improvements, pursue that direction */ |
/* if we made some improvements, pursue that direction */ |
794 |
while (newf < bestf) { |
while (newf < bestf) { |
842 |
/* only move forward if this is really an improvement */ |
/* only move forward if this is really an improvement */ |
843 |
oldf = newf; |
oldf = newf; |
844 |
newf = EcoSystem->SimulateAndUpdate(trialx); |
newf = EcoSystem->SimulateAndUpdate(trialx); |
845 |
|
#ifndef NO_OPENMP |
846 |
|
iters++; |
847 |
|
#endif |
848 |
if ((isEqual(newf, oldf)) || (newf > oldf)) { |
if ((isEqual(newf, oldf)) || (newf > oldf)) { |
849 |
newf = oldf; //JMB no improvement, so reset the value of newf |
newf = oldf; //JMB no improvement, so reset the value of newf |
850 |
break; |
break; |
854 |
bestf = newf; |
bestf = newf; |
855 |
for (i = 0; i < nvars; i++) |
for (i = 0; i < nvars; i++) |
856 |
x[i] = trialx[i]; |
x[i] = trialx[i]; |
857 |
|
|
858 |
|
#ifndef NO_OPENMP |
859 |
|
newf = this->bestNearbyOMP2(delta, trialx, bestf, param); |
860 |
|
if (newf == -1) { |
861 |
|
handle.logMessage(LOGINFO, "\nStopping Hooke & Jeeves optimisation algorithm\n"); |
862 |
|
handle.logMessage(LOGINFO, "\nThe number of threads must be a multiple of 2\n"); |
863 |
|
return; |
864 |
|
} |
865 |
|
#else |
866 |
newf = this->bestNearby(delta, trialx, bestf, param); |
newf = this->bestNearby(delta, trialx, bestf, param); |
867 |
|
#endif |
868 |
if (isEqual(newf, bestf)) |
if (isEqual(newf, bestf)) |
869 |
break; |
break; |
870 |
|
|
871 |
/* if too many function evaluations occur, terminate the algorithm */ |
/* if too many function evaluations occur, terminate the algorithm */ |
872 |
|
#ifdef NO_OPENMP |
873 |
iters = EcoSystem->getFuncEval() - offset; |
iters = EcoSystem->getFuncEval() - offset; |
874 |
|
#endif |
875 |
|
// cout << "newf:" << newf << "-" << iters<<endl; |
876 |
if (iters > hookeiter) { |
if (iters > hookeiter) { |
877 |
handle.logMessage(LOGINFO, "\nStopping Hooke & Jeeves optimisation algorithm\n"); |
handle.logMessage(LOGINFO, "\nStopping Hooke & Jeeves optimisation algorithm\n"); |
878 |
handle.logMessage(LOGINFO, "The optimisation stopped after", iters, "function evaluations"); |
handle.logMessage(LOGINFO, "The optimisation stopped after", iters, "function evaluations"); |
885 |
for (i = 0; i < nvars; i++) |
for (i = 0; i < nvars; i++) |
886 |
bestx[i] = trialx[i] * init[i]; |
bestx[i] = trialx[i] * init[i]; |
887 |
EcoSystem->storeVariables(score, bestx); |
EcoSystem->storeVariables(score, bestx); |
888 |
|
// EcoSystem->writeOptValuesOMP(); |
889 |
return; |
return; |
890 |
} |
} |
891 |
} |
} |
892 |
|
|
893 |
|
#ifdef NO_OPENMP |
894 |
iters = EcoSystem->getFuncEval() - offset; |
iters = EcoSystem->getFuncEval() - offset; |
895 |
|
#endif |
896 |
if (newf < bestf) { |
if (newf < bestf) { |
897 |
for (i = 0; i < nvars; i++) |
for (i = 0; i < nvars; i++) |
898 |
bestx[i] = x[i] * init[i]; |
bestx[i] = x[i] * init[i]; |
916 |
score = bestf; |
score = bestf; |
917 |
handle.logMessage(LOGINFO, "\nHooke & Jeeves finished with a likelihood score of", score); |
handle.logMessage(LOGINFO, "\nHooke & Jeeves finished with a likelihood score of", score); |
918 |
EcoSystem->storeVariables(bestf, bestx); |
EcoSystem->storeVariables(bestf, bestx); |
919 |
|
// EcoSystem->writeOptValuesOMP(); |
920 |
return; |
return; |
921 |
} |
} |
922 |
|
|
926 |
delta[i] *= rho; |
delta[i] *= rho; |
927 |
} |
} |
928 |
} |
} |
929 |
|
#endif |