13 |
/* can be used as a local optimizer for difficult functions. */ |
/* can be used as a local optimizer for difficult functions. */ |
14 |
/* */ |
/* */ |
15 |
/* The author can be contacted at h2zr1001@vm.cis.smu.edu */ |
/* The author can be contacted at h2zr1001@vm.cis.smu.edu */ |
16 |
|
// |
17 |
/* This file is a translation of a fortran code, which is an example of the*/ |
/* This file is a translation of a fortran code, which is an example of the*/ |
18 |
/* Corana et al. simulated annealing algorithm for multimodal and robust */ |
/* Corana et al. simulated annealing algorithm for multimodal and robust */ |
19 |
/* optimization as implemented and modified by by Goffe et al. */ |
/* optimization as implemented and modified by by Goffe et al. */ |
46 |
/* parameters one should adjust to modify the runtime of the */ |
/* parameters one should adjust to modify the runtime of the */ |
47 |
/* algorithm and its robustness. */ |
/* algorithm and its robustness. */ |
48 |
/* 5. Try constraining the algorithm with either LB or UB. */ |
/* 5. Try constraining the algorithm with either LB or UB. */ |
49 |
|
// |
50 |
/* Synopsis: */ |
/* Synopsis: */ |
51 |
/* This routine implements the continuous simulated annealing global */ |
/* This routine implements the continuous simulated annealing global */ |
52 |
/* optimization algorithm described in Corana et al.'s article */ |
/* optimization algorithm described in Corana et al.'s article */ |
54 |
/* "Simulated Annealing" Algorithm" in the September 1987 (vol. 13, */ |
/* "Simulated Annealing" Algorithm" in the September 1987 (vol. 13, */ |
55 |
/* no. 3, pp. 262-280) issue of the ACM Transactions on Mathematical */ |
/* no. 3, pp. 262-280) issue of the ACM Transactions on Mathematical */ |
56 |
/* Software. */ |
/* Software. */ |
57 |
|
// |
58 |
/* A very quick (perhaps too quick) overview of SA: */ |
/* A very quick (perhaps too quick) overview of SA: */ |
59 |
/* SA tries to find the global optimum of an N dimensional function. */ |
/* SA tries to find the global optimum of an N dimensional function. */ |
60 |
/* It moves both up and downhill and as the optimization process */ |
/* It moves both up and downhill and as the optimization process */ |
79 |
/* rejections rise. Given the scheme for the selection for VM, VM falls. */ |
/* rejections rise. Given the scheme for the selection for VM, VM falls. */ |
80 |
/* Thus, as T declines, VM falls and SA focuses upon the most promising */ |
/* Thus, as T declines, VM falls and SA focuses upon the most promising */ |
81 |
/* area for optimization. */ |
/* area for optimization. */ |
82 |
|
// |
83 |
/* The importance of the parameter T: */ |
/* The importance of the parameter T: */ |
84 |
/* The parameter T is crucial in using SA successfully. It influences */ |
/* The parameter T is crucial in using SA successfully. It influences */ |
85 |
/* VM, the step length over which the algorithm searches for optima. For */ |
/* VM, the step length over which the algorithm searches for optima. For */ |
93 |
/* optimizing a function, it is worthwhile to run a trial run first. Set */ |
/* optimizing a function, it is worthwhile to run a trial run first. Set */ |
94 |
/* RT = 1.5 and T = 1.0. With RT > 1.0, the temperature increases and VM */ |
/* RT = 1.5 and T = 1.0. With RT > 1.0, the temperature increases and VM */ |
95 |
/* rises as well. Then select the T that produces a large enough VM. */ |
/* rises as well. Then select the T that produces a large enough VM. */ |
96 |
|
// |
97 |
/* For modifications to the algorithm and many details on its use, */ |
/* For modifications to the algorithm and many details on its use, */ |
98 |
/* (particularly for econometric applications) see Goffe, Ferrier */ |
/* (particularly for econometric applications) see Goffe, Ferrier */ |
99 |
/* and Rogers, "Global Optimization of Statistical Functions with */ |
/* and Rogers, "Global Optimization of Statistical Functions with */ |
105 |
/* Dallas, TX 75275 */ |
/* Dallas, TX 75275 */ |
106 |
/* h2zr1001 @ smuvm1 (Bitnet) */ |
/* h2zr1001 @ smuvm1 (Bitnet) */ |
107 |
/* h2zr1001 @ vm.cis.smu.edu (Internet) */ |
/* h2zr1001 @ vm.cis.smu.edu (Internet) */ |
108 |
|
// |
109 |
/* As far as possible, the parameters here have the same name as in */ |
/* As far as possible, the parameters here have the same name as in */ |
110 |
/* the description of the algorithm on pp. 266-8 of Corana et al. */ |
/* the description of the algorithm on pp. 266-8 of Corana et al. */ |
111 |
|
// |
112 |
/* Input Parameters: */ |
/* Input Parameters: */ |
113 |
/* Note: The suggested values generally come from Corana et al. To */ |
/* Note: The suggested values generally come from Corana et al. To */ |
114 |
/* drastically reduce runtime, see Goffe et al., pp. 17-8 for */ |
/* drastically reduce runtime, see Goffe et al., pp. 17-8 for */ |
160 |
/* of all points are accepted, the input value is not very */ |
/* of all points are accepted, the input value is not very */ |
161 |
/* important (i.e. is the value is off, SA adjusts VM to the */ |
/* important (i.e. is the value is off, SA adjusts VM to the */ |
162 |
/* correct value). (DP(N)) */ |
/* correct value). (DP(N)) */ |
163 |
|
// |
164 |
/* Output Parameters: */ |
/* Output Parameters: */ |
165 |
/* xopt - The variables that optimize the function. (DP(N)) */ |
/* xopt - The variables that optimize the function. (DP(N)) */ |
166 |
/* fopt - The optimal value of the function. (DP) */ |
/* fopt - The optimal value of the function. (DP) */ |
167 |
|
// |
168 |
/* JMB this has been modified to work with the gadget object structure */ |
/* JMB this has been modified to work with the gadget object structure */ |
169 |
/* This means that the function has been replaced by a call to ecosystem */ |
/* This means that the function has been replaced by a call to ecosystem */ |
170 |
/* object, and we can use the vector objects that have been defined */ |
/* object, and we can use the vector objects that have been defined */ |
177 |
#include "errorhandler.h" |
#include "errorhandler.h" |
178 |
#include "ecosystem.h" |
#include "ecosystem.h" |
179 |
#include "global.h" |
#include "global.h" |
180 |
|
#include "seq_optimize_template.h" |
181 |
|
#ifdef GADGET_OPENMP |
182 |
|
#include <omp.h> |
183 |
|
#endif |
184 |
|
|
185 |
extern Ecosystem* EcoSystem; |
extern Ecosystem* EcoSystem; |
186 |
|
#ifndef NO_OPENMP |
187 |
|
extern Ecosystem** EcoSystems; |
188 |
|
#endif |
189 |
|
//extern StochasticData* data; |
190 |
|
|
191 |
|
|
192 |
|
//void OptInfoSimann::OptimiseLikelihood() { |
193 |
|
// |
194 |
|
// //set initial values |
195 |
|
// int nacc = 0; //The number of accepted function evaluations |
196 |
|
// int nrej = 0; //The number of rejected function evaluations |
197 |
|
// int naccmet = 0; //The number of metropolis accepted function evaluations |
198 |
|
// |
199 |
|
// double tmp, p, pp, ratio, nsdiv; |
200 |
|
// double fopt, funcval, trialf; |
201 |
|
// int a, i, j, k, l, offset, quit; |
202 |
|
// int rchange, rcheck, rnumber; //Used to randomise the order of the parameters |
203 |
|
// |
204 |
|
// handle.logMessage(LOGINFO, "\nStarting Simulated Annealing optimisation algorithm\n"); |
205 |
|
// int nvars = EcoSystem->numOptVariables(); |
206 |
|
// DoubleVector x(nvars); |
207 |
|
// DoubleVector init(nvars); |
208 |
|
// DoubleVector trialx(nvars, 0.0); |
209 |
|
// DoubleVector bestx(nvars); |
210 |
|
// DoubleVector scalex(nvars); |
211 |
|
// DoubleVector lowerb(nvars); |
212 |
|
// DoubleVector upperb(nvars); |
213 |
|
// DoubleVector fstar(tempcheck); |
214 |
|
// DoubleVector vm(nvars, vminit); |
215 |
|
// IntVector param(nvars, 0); |
216 |
|
// IntVector nacp(nvars, 0); |
217 |
|
// |
218 |
|
// EcoSystem->resetVariables(); //JMB need to reset variables in case they have been scaled |
219 |
|
// if (scale) |
220 |
|
// EcoSystem->scaleVariables(); |
221 |
|
// EcoSystem->getOptScaledValues(x); |
222 |
|
// EcoSystem->getOptLowerBounds(lowerb); |
223 |
|
// EcoSystem->getOptUpperBounds(upperb); |
224 |
|
// EcoSystem->getOptInitialValues(init); |
225 |
|
// |
226 |
|
// for (i = 0; i < nvars; i++) { |
227 |
|
// bestx[i] = x[i]; |
228 |
|
// param[i] = i; |
229 |
|
// } |
230 |
|
// |
231 |
|
// if (scale) { |
232 |
|
// for (i = 0; i < nvars; i++) { |
233 |
|
// scalex[i] = x[i]; |
234 |
|
// // Scaling the bounds, because the parameters are scaled |
235 |
|
// lowerb[i] = lowerb[i] / init[i]; |
236 |
|
// upperb[i] = upperb[i] / init[i]; |
237 |
|
// if (lowerb[i] > upperb[i]) { |
238 |
|
// tmp = lowerb[i]; |
239 |
|
// lowerb[i] = upperb[i]; |
240 |
|
// upperb[i] = tmp; |
241 |
|
// } |
242 |
|
// } |
243 |
|
// } |
244 |
|
// |
245 |
|
// //funcval is the function value at x |
246 |
|
// funcval = EcoSystem->SimulateAndUpdate(x); |
247 |
|
// if (funcval != funcval) { //check for NaN |
248 |
|
// handle.logMessage(LOGINFO, "Error starting Simulated Annealing optimisation with f(x) = infinity"); |
249 |
|
// converge = -1; |
250 |
|
// iters = 1; |
251 |
|
// return; |
252 |
|
// } |
253 |
|
// |
254 |
|
// //the function is to be minimised so switch the sign of funcval (and trialf) |
255 |
|
// funcval = -funcval; |
256 |
|
// offset = EcoSystem->getFuncEval(); //number of function evaluations done before loop |
257 |
|
// nacc++; |
258 |
|
// cs /= lratio; //JMB save processing time |
259 |
|
// nsdiv = 1.0 / ns; |
260 |
|
// fopt = funcval; |
261 |
|
// for (i = 0; i < tempcheck; i++) |
262 |
|
// fstar[i] = funcval; |
263 |
|
// |
264 |
|
// //Start the main loop. Note that it terminates if |
265 |
|
// //(i) the algorithm succesfully optimises the function or |
266 |
|
// //(ii) there are too many function evaluations |
267 |
|
// while (1) { |
268 |
|
// for (a = 0; a < nt; a++) { |
269 |
|
// //Randomize the order of the parameters once in a while, to avoid |
270 |
|
// //the order having an influence on which changes are accepted |
271 |
|
// rchange = 0; |
272 |
|
// while (rchange < nvars) { |
273 |
|
// rnumber = rand_r(&seedP) % nvars; |
274 |
|
// rcheck = 1; |
275 |
|
// for (i = 0; i < rchange; i++) |
276 |
|
// if (param[i] == rnumber) |
277 |
|
// rcheck = 0; |
278 |
|
// if (rcheck) { |
279 |
|
// param[rchange] = rnumber; |
280 |
|
// rchange++; |
281 |
|
// } |
282 |
|
// } |
283 |
|
// |
284 |
|
// for (j = 0; j < ns; j++) { |
285 |
|
// for (l = 0; l < nvars; l++) { |
286 |
|
// //Generate trialx, the trial value of x |
287 |
|
// newValue(nvars, l, param, trialx, x, lowerb, upperb, vm); |
288 |
|
//// for (i = 0; i < nvars; i++) { |
289 |
|
//// if (i == param[l]) { |
290 |
|
//// trialx[i] = x[i] + ((randomNumber() * 2.0) - 1.0) * vm[i]; |
291 |
|
//// |
292 |
|
//// //If trialx is out of bounds, try again until we find a point that is OK |
293 |
|
//// if ((trialx[i] < lowerb[i]) || (trialx[i] > upperb[i])) { |
294 |
|
//// //JMB - this used to just select a random point between the bounds |
295 |
|
//// k = 0; |
296 |
|
//// while ((trialx[i] < lowerb[i]) || (trialx[i] > upperb[i])) { |
297 |
|
//// trialx[i] = x[i] + ((randomNumber() * 2.0) - 1.0) * vm[i]; |
298 |
|
//// k++; |
299 |
|
//// if (k > 10) //we've had 10 tries to find a point neatly, so give up |
300 |
|
//// trialx[i] = lowerb[i] + (upperb[i] - lowerb[i]) * randomNumber(); |
301 |
|
//// } |
302 |
|
//// } |
303 |
|
//// |
304 |
|
//// } else |
305 |
|
//// trialx[i] = x[i]; |
306 |
|
//// } |
307 |
|
// |
308 |
|
//// cout << "param:" << param[l] << "-" << trialx[param[l]] << endl; |
309 |
|
// |
310 |
|
// //Evaluate the function with the trial point trialx and return as -trialf |
311 |
|
// trialf = EcoSystem->SimulateAndUpdate(trialx); |
312 |
|
// trialf = -trialf; |
313 |
|
// |
314 |
|
// //If too many function evaluations occur, terminate the algorithm |
315 |
|
// iters = EcoSystem->getFuncEval() - offset; |
316 |
|
// if (iters > simanniter) { |
317 |
|
// handle.logMessage(LOGINFO, "\nStopping Simulated Annealing optimisation algorithm\n"); |
318 |
|
// handle.logMessage(LOGINFO, "The optimisation stopped after", iters, "function evaluations"); |
319 |
|
// handle.logMessage(LOGINFO, "The temperature was reduced to", t); |
320 |
|
// handle.logMessage(LOGINFO, "The optimisation stopped because the maximum number of function evaluations"); |
321 |
|
// handle.logMessage(LOGINFO, "was reached and NOT because an optimum was found for this run"); |
322 |
|
// handle.logMessage(LOGINFO, "Number of directly accepted points", nacc); |
323 |
|
// handle.logMessage(LOGINFO, "Number of metropolis accepted points", naccmet); |
324 |
|
// handle.logMessage(LOGINFO, "Number of rejected points", nrej); |
325 |
|
// |
326 |
|
// score = EcoSystem->SimulateAndUpdate(bestx); |
327 |
|
// handle.logMessage(LOGINFO, "\nSimulated Annealing finished with a likelihood score of", score); |
328 |
|
// return; |
329 |
|
// } |
330 |
|
//// cout << "f:" << trialf << endl; |
331 |
|
// //Accept the new point if the new function value better |
332 |
|
//// cout << "mustAccept:" << trialf << "|" << funcval<< endl; |
333 |
|
// if ((trialf - funcval) > verysmall) { |
334 |
|
// for (i = 0; i < nvars; i++) |
335 |
|
// x[i] = trialx[i]; |
336 |
|
// funcval = trialf; |
337 |
|
// nacc++; |
338 |
|
// nacp[param[l]]++; //JMB - not sure about this ... |
339 |
|
// |
340 |
|
// } else { |
341 |
|
// //Accept according to metropolis condition |
342 |
|
// p = expRep((trialf - funcval) / t); |
343 |
|
// pp = randomNumber(&seedM); |
344 |
|
// if (pp < p) { |
345 |
|
// //Accept point |
346 |
|
// for (i = 0; i < nvars; i++) |
347 |
|
// x[i] = trialx[i]; |
348 |
|
// funcval = trialf; |
349 |
|
// naccmet++; |
350 |
|
// nacp[param[l]]++; |
351 |
|
// } else { |
352 |
|
// //Reject point |
353 |
|
// nrej++; |
354 |
|
// } |
355 |
|
// } |
356 |
|
//// cout << "goog VALUE:" << funcval << endl; |
357 |
|
// // JMB added check for really silly values |
358 |
|
// if (isZero(trialf)) { |
359 |
|
// handle.logMessage(LOGINFO, "Error in Simulated Annealing optimisation after", iters, "function evaluations, f(x) = 0"); |
360 |
|
// converge = -1; |
361 |
|
// return; |
362 |
|
// } |
363 |
|
// |
364 |
|
// //If greater than any other point, record as new optimum |
365 |
|
// if ((trialf > fopt) && (trialf == trialf)) { |
366 |
|
// for (i = 0; i < nvars; i++) |
367 |
|
// bestx[i] = trialx[i]; |
368 |
|
// fopt = trialf; |
369 |
|
// |
370 |
|
// if (scale) { |
371 |
|
// for (i = 0; i < nvars; i++) |
372 |
|
// scalex[i] = bestx[i] * init[i]; |
373 |
|
// EcoSystem->storeVariables(-fopt, scalex); |
374 |
|
// } else |
375 |
|
// EcoSystem->storeVariables(-fopt, bestx); |
376 |
|
// |
377 |
|
// handle.logMessage(LOGINFO, "\nNew optimum found after", iters, "function evaluations"); |
378 |
|
// handle.logMessage(LOGINFO, "The likelihood score is", -fopt, "at the point"); |
379 |
|
// EcoSystem->writeBestValues(); |
380 |
|
// } |
381 |
|
// } |
382 |
|
// } |
383 |
|
// |
384 |
|
// //Adjust vm so that approximately half of all evaluations are accepted |
385 |
|
// for (i = 0; i < nvars; i++) { |
386 |
|
// ratio = nsdiv * nacp[i]; |
387 |
|
// nacp[i] = 0; |
388 |
|
// if (ratio > uratio) { |
389 |
|
// vm[i] = vm[i] * (1.0 + cs * (ratio - uratio)); |
390 |
|
// } else if (ratio < lratio) { |
391 |
|
// vm[i] = vm[i] / (1.0 + cs * (lratio - ratio)); |
392 |
|
// } |
393 |
|
// |
394 |
|
// if (vm[i] < rathersmall) |
395 |
|
// vm[i] = rathersmall; |
396 |
|
// if (vm[i] > (upperb[i] - lowerb[i])) |
397 |
|
// vm[i] = upperb[i] - lowerb[i]; |
398 |
|
// } |
399 |
|
// } |
400 |
|
// |
401 |
|
// //Check termination criteria |
402 |
|
// for (i = tempcheck - 1; i > 0; i--) |
403 |
|
// fstar[i] = fstar[i - 1]; |
404 |
|
// fstar[0] = funcval; |
405 |
|
// |
406 |
|
// quit = 0; |
407 |
|
// if (fabs(fopt - funcval) < simanneps) { |
408 |
|
// quit = 1; |
409 |
|
// for (i = 0; i < tempcheck - 1; i++) |
410 |
|
// if (fabs(fstar[i + 1] - fstar[i]) > simanneps) |
411 |
|
// quit = 0; |
412 |
|
// } |
413 |
|
// |
414 |
|
// handle.logMessage(LOGINFO, "Checking convergence criteria after", iters, "function evaluations ..."); |
415 |
|
// |
416 |
|
// //Terminate SA if appropriate |
417 |
|
// if (quit) { |
418 |
|
// handle.logMessage(LOGINFO, "\nStopping Simulated Annealing optimisation algorithm\n"); |
419 |
|
// handle.logMessage(LOGINFO, "The optimisation stopped after", iters, "function evaluations"); |
420 |
|
// handle.logMessage(LOGINFO, "The temperature was reduced to", t); |
421 |
|
// handle.logMessage(LOGINFO, "The optimisation stopped because an optimum was found for this run"); |
422 |
|
// handle.logMessage(LOGINFO, "Number of directly accepted points", nacc); |
423 |
|
// handle.logMessage(LOGINFO, "Number of metropolis accepted points", naccmet); |
424 |
|
// handle.logMessage(LOGINFO, "Number of rejected points", nrej); |
425 |
|
// |
426 |
|
// converge = 1; |
427 |
|
// score = EcoSystem->SimulateAndUpdate(bestx); |
428 |
|
// handle.logMessage(LOGINFO, "\nSimulated Annealing finished with a likelihood score of", score); |
429 |
|
// return; |
430 |
|
// } |
431 |
|
// |
432 |
|
// //If termination criteria is not met, prepare for another loop. |
433 |
|
// t *= rt; |
434 |
|
// if (t < rathersmall) |
435 |
|
// t = rathersmall; //JMB make sure temperature doesnt get too small |
436 |
|
// |
437 |
|
// handle.logMessage(LOGINFO, "Reducing the temperature to", t); |
438 |
|
// funcval = fopt; |
439 |
|
// for (i = 0; i < nvars; i++) |
440 |
|
// x[i] = bestx[i]; |
441 |
|
// } |
442 |
|
//} |
443 |
|
|
444 |
|
|
445 |
void OptInfoSimann::OptimiseLikelihood() { |
#ifdef GADGET_OPENMP |
446 |
|
void OptInfoSimann::OptimiseLikelihoodOMP() { |
447 |
|
|
448 |
//set initial values |
//set initial values |
449 |
int nacc = 0; //The number of accepted function evaluations |
int nacc = 0; //The number of accepted function evaluations |
455 |
int a, i, j, k, l, offset, quit; |
int a, i, j, k, l, offset, quit; |
456 |
int rchange, rcheck, rnumber; //Used to randomise the order of the parameters |
int rchange, rcheck, rnumber; //Used to randomise the order of the parameters |
457 |
|
|
458 |
|
struct Storage { |
459 |
|
DoubleVector trialx; |
460 |
|
double newLikelihood; |
461 |
|
}; |
462 |
|
|
463 |
handle.logMessage(LOGINFO, "\nStarting Simulated Annealing optimisation algorithm\n"); |
handle.logMessage(LOGINFO, "\nStarting Simulated Annealing optimisation algorithm\n"); |
464 |
int nvars = EcoSystem->numOptVariables(); |
int nvars = EcoSystem->numOptVariables(); |
465 |
DoubleVector x(nvars); |
DoubleVector x(nvars); |
482 |
EcoSystem->getOptUpperBounds(upperb); |
EcoSystem->getOptUpperBounds(upperb); |
483 |
EcoSystem->getOptInitialValues(init); |
EcoSystem->getOptInitialValues(init); |
484 |
|
|
485 |
for (i = 0; i < nvars; i++) { |
for (i = 0; i < nvars; ++i) { |
486 |
bestx[i] = x[i]; |
bestx[i] = x[i]; |
487 |
param[i] = i; |
param[i] = i; |
488 |
} |
} |
489 |
|
|
490 |
if (scale) { |
if (scale) { |
491 |
for (i = 0; i < nvars; i++) { |
for (i = 0; i < nvars; ++i) { |
492 |
scalex[i] = x[i]; |
scalex[i] = x[i]; |
493 |
// Scaling the bounds, because the parameters are scaled |
// Scaling the bounds, because the parameters are scaled |
494 |
lowerb[i] = lowerb[i] / init[i]; |
lowerb[i] = lowerb[i] / init[i]; |
507 |
handle.logMessage(LOGINFO, "Error starting Simulated Annealing optimisation with f(x) = infinity"); |
handle.logMessage(LOGINFO, "Error starting Simulated Annealing optimisation with f(x) = infinity"); |
508 |
converge = -1; |
converge = -1; |
509 |
iters = 1; |
iters = 1; |
510 |
|
// EcoSystem->writeOptValuesOMP(); |
511 |
return; |
return; |
512 |
} |
} |
513 |
|
|
518 |
cs /= lratio; //JMB save processing time |
cs /= lratio; //JMB save processing time |
519 |
nsdiv = 1.0 / ns; |
nsdiv = 1.0 / ns; |
520 |
fopt = funcval; |
fopt = funcval; |
521 |
for (i = 0; i < tempcheck; i++) |
for (i = 0; i < tempcheck; ++i) |
522 |
fstar[i] = funcval; |
fstar[i] = funcval; |
523 |
|
|
524 |
|
|
525 |
|
|
526 |
|
int numThr = omp_get_max_threads ( ); |
527 |
|
int bestId=0; |
528 |
|
int ini=0; |
529 |
|
|
530 |
|
Storage* storage = new Storage[numThr]; |
531 |
|
|
532 |
|
for (i=0; i<numThr; ++i) |
533 |
|
storage[i].trialx = trialx; |
534 |
|
|
535 |
//Start the main loop. Note that it terminates if |
//Start the main loop. Note that it terminates if |
536 |
//(i) the algorithm succesfully optimises the function or |
//(i) the algorithm succesfully optimises the function or |
537 |
//(ii) there are too many function evaluations |
//(ii) there are too many function evaluations |
538 |
|
int aux; |
539 |
|
|
540 |
|
DoubleVector vns(nvars, 0); |
541 |
|
int ns_ = ceil(numThr/2.); |
542 |
|
double res; |
543 |
|
aux=0; |
544 |
while (1) { |
while (1) { |
545 |
for (a = 0; a < nt; a++) { |
for (a = 0; a < nt; ++a) { |
546 |
//Randomize the order of the parameters once in a while, to avoid |
//Randomize the order of the parameters once in a while, to avoid |
547 |
//the order having an influence on which changes are accepted |
//the order having an influence on which changes are accepted |
548 |
rchange = 0; |
rchange = 0; |
549 |
while (rchange < nvars) { |
while (rchange < nvars) { |
550 |
rnumber = rand() % nvars; |
rnumber = rand_r(&seedP) % nvars; |
551 |
rcheck = 1; |
rcheck = 1; |
552 |
for (i = 0; i < rchange; i++) |
for (i = 0; i < rchange; ++i) |
553 |
if (param[i] == rnumber) |
if (param[i] == rnumber) |
554 |
rcheck = 0; |
rcheck = 0; |
555 |
if (rcheck) { |
if (rcheck) { |
558 |
} |
} |
559 |
} |
} |
560 |
|
|
|
for (j = 0; j < ns; j++) { |
|
|
for (l = 0; l < nvars; l++) { |
|
|
//Generate trialx, the trial value of x |
|
|
for (i = 0; i < nvars; i++) { |
|
|
if (i == param[l]) { |
|
|
trialx[i] = x[i] + ((randomNumber() * 2.0) - 1.0) * vm[i]; |
|
561 |
|
|
562 |
//If trialx is out of bounds, try again until we find a point that is OK |
for (j = 0; j < (ns*ns_); ++j) { |
563 |
if ((trialx[i] < lowerb[i]) || (trialx[i] > upperb[i])) { |
for (l = ini; l < nvars; l+=numThr) { |
564 |
//JMB - this used to just select a random point between the bounds |
for (i=0; i<numThr;++i) |
565 |
k = 0; |
{ |
566 |
while ((trialx[i] < lowerb[i]) || (trialx[i] > upperb[i])) { |
if ((l+i) < nvars) |
567 |
trialx[i] = x[i] + ((randomNumber() * 2.0) - 1.0) * vm[i]; |
newValue(nvars, l+i, param, storage[i].trialx, x, lowerb, upperb, vm); |
568 |
k++; |
else { |
569 |
if (k > 10) //we've had 10 tries to find a point neatly, so give up |
newValue(nvars, ini, param, storage[i].trialx, x, lowerb, upperb, vm); |
570 |
trialx[i] = lowerb[i] + (upperb[i] - lowerb[i]) * randomNumber(); |
ini++; |
571 |
|
if (ini >= nvars) |
572 |
|
ini=0; |
573 |
} |
} |
574 |
} |
} |
575 |
|
|
576 |
} else |
# pragma omp parallel private(res) |
577 |
trialx[i] = x[i]; |
{ |
|
} |
|
|
|
|
578 |
//Evaluate the function with the trial point trialx and return as -trialf |
//Evaluate the function with the trial point trialx and return as -trialf |
579 |
trialf = EcoSystem->SimulateAndUpdate(trialx); |
int id = omp_get_thread_num (); |
580 |
trialf = -trialf; |
res = EcoSystems[id]->SimulateAndUpdate(storage[id].trialx); |
581 |
|
storage[id].newLikelihood = -res; |
582 |
|
} |
583 |
|
//best value from omp |
584 |
|
trialf = storage[0].newLikelihood; |
585 |
|
bestId=0; |
586 |
|
for (i=0;i<numThr;++i) |
587 |
|
{ |
588 |
|
if (storage[i].newLikelihood > trialf) |
589 |
|
{ |
590 |
|
trialf=storage[i].newLikelihood; |
591 |
|
bestId=i; |
592 |
|
} |
593 |
|
k = param[(l+i)%nvars]; |
594 |
|
|
595 |
|
if ((storage[i].newLikelihood - funcval) > verysmall) |
596 |
|
{ |
597 |
|
nacp[k]++; |
598 |
|
aux++; |
599 |
|
vns[k]++; |
600 |
|
} |
601 |
|
else { |
602 |
|
//Accept according to metropolis condition |
603 |
|
p = expRep((storage[i].newLikelihood - funcval) / t); |
604 |
|
pp = randomNumber(&seedM); |
605 |
|
if (pp < p) |
606 |
|
aux++; |
607 |
|
else { |
608 |
|
vns[k]++; |
609 |
|
nrej++; |
610 |
|
} |
611 |
|
} |
612 |
|
|
613 |
//If too many function evaluations occur, terminate the algorithm |
if (vns[k] >= ns) { |
614 |
iters = EcoSystem->getFuncEval() - offset; |
ratio = nsdiv * nacp[k]; |
615 |
|
nacp[k] = 0; |
616 |
|
if (ratio > uratio) { |
617 |
|
vm[k] = vm[k] * (1.0 + cs * (ratio - uratio)); |
618 |
|
} else if (ratio < lratio) { |
619 |
|
vm[k] = vm[k] / (1.0 + cs * (lratio - ratio)); |
620 |
|
} |
621 |
|
if (vm[k] < rathersmall){ |
622 |
|
vm[k] = rathersmall; |
623 |
|
} |
624 |
|
if (vm[k] > (upperb[k] - lowerb[k])) |
625 |
|
{ |
626 |
|
vm[k] = upperb[k] - lowerb[k]; |
627 |
|
} |
628 |
|
vns[k]=0; |
629 |
|
} |
630 |
|
} |
631 |
|
aux--; |
632 |
|
iters = (EcoSystems[bestId]->getFuncEval() * numThr) -aux; |
633 |
if (iters > simanniter) { |
if (iters > simanniter) { |
634 |
handle.logMessage(LOGINFO, "\nStopping Simulated Annealing optimisation algorithm\n"); |
handle.logMessage(LOGINFO, "\nStopping Simulated Annealing optimisation algorithm\n"); |
635 |
handle.logMessage(LOGINFO, "The optimisation stopped after", iters, "function evaluations"); |
handle.logMessage(LOGINFO, "The optimisation stopped after", iters, "function evaluations"); |
642 |
|
|
643 |
score = EcoSystem->SimulateAndUpdate(bestx); |
score = EcoSystem->SimulateAndUpdate(bestx); |
644 |
handle.logMessage(LOGINFO, "\nSimulated Annealing finished with a likelihood score of", score); |
handle.logMessage(LOGINFO, "\nSimulated Annealing finished with a likelihood score of", score); |
645 |
|
delete[] storage; |
646 |
return; |
return; |
647 |
} |
} |
648 |
|
|
649 |
//Accept the new point if the new function value better |
//Accept the new point if the new function value better |
650 |
if ((trialf - funcval) > verysmall) { |
if ((trialf - funcval) > verysmall) { |
651 |
for (i = 0; i < nvars; i++) |
for (i = 0; i < nvars; ++i) |
652 |
x[i] = trialx[i]; |
x[i] = storage[bestId].trialx[i]; |
653 |
funcval = trialf; |
funcval = trialf; |
654 |
nacc++; |
nacc++; |
655 |
nacp[param[l]]++; //JMB - not sure about this ... |
// vns[param[l+bestId]]++; |
656 |
|
//nacp[param[l+bestId]]+=numThr; //JMB - not sure about this ... |
657 |
|
// for (i = 0; i < numThr; ++i) |
658 |
|
// nacp[param[l+i]]++; |
659 |
|
// nacp[param[l+bestId]]++; |
660 |
|
|
661 |
} else { |
} else { |
662 |
//Accept according to metropolis condition |
//Accept according to metropolis condition |
663 |
p = expRep((trialf - funcval) / t); |
p = expRep((trialf - funcval) / t); |
664 |
pp = randomNumber(); |
pp = randomNumber(&seedP); |
665 |
if (pp < p) { |
if (pp < p) { |
666 |
//Accept point |
//Accept point |
667 |
for (i = 0; i < nvars; i++) |
for (i = 0; i < nvars; ++i) |
668 |
x[i] = trialx[i]; |
x[i] = storage[bestId].trialx[i]; |
669 |
funcval = trialf; |
funcval = trialf; |
670 |
naccmet++; |
naccmet++; |
671 |
nacp[param[l]]++; |
nacp[param[(l+bestId)%nvars]]++; |
672 |
|
// vns[param[l+bestId]]++; |
673 |
|
// nacp[param[l+bestId]]+=numThr; |
674 |
|
// for (i = 0; i < numThr; ++i) |
675 |
|
// nacp[param[l+i]]++; |
676 |
|
// nacp[param[l+bestId]]++; |
677 |
} else { |
} else { |
678 |
//Reject point |
//Reject point |
679 |
nrej++; |
nrej++; |
680 |
} |
} |
681 |
} |
} |
|
|
|
682 |
// JMB added check for really silly values |
// JMB added check for really silly values |
683 |
if (isZero(trialf)) { |
if (isZero(trialf)) { |
684 |
handle.logMessage(LOGINFO, "Error in Simulated Annealing optimisation after", iters, "function evaluations, f(x) = 0"); |
handle.logMessage(LOGINFO, "Error in Simulated Annealing optimisation after", iters, "function evaluations, f(x) = 0"); |
685 |
converge = -1; |
converge = -1; |
686 |
|
// EcoSystem->writeOptValuesOMP(); |
687 |
|
delete[] storage; |
688 |
return; |
return; |
689 |
} |
} |
690 |
|
|
691 |
//If greater than any other point, record as new optimum |
//If greater than any other point, record as new optimum |
692 |
if ((trialf > fopt) && (trialf == trialf)) { |
if ((trialf > fopt) && (trialf == trialf)) { |
693 |
for (i = 0; i < nvars; i++) |
for (i = 0; i < nvars; ++i) |
694 |
bestx[i] = trialx[i]; |
bestx[i] = storage[bestId].trialx[i]; |
695 |
fopt = trialf; |
fopt = trialf; |
696 |
|
|
697 |
if (scale) { |
if (scale) { |
698 |
for (i = 0; i < nvars; i++) |
for (i = 0; i < nvars; ++i) |
699 |
scalex[i] = bestx[i] * init[i]; |
scalex[i] = bestx[i] * init[i]; |
700 |
|
//FIXME EcoSystems[]-> |
701 |
EcoSystem->storeVariables(-fopt, scalex); |
EcoSystem->storeVariables(-fopt, scalex); |
702 |
} else |
} else |
703 |
EcoSystem->storeVariables(-fopt, bestx); |
EcoSystem->storeVariables(-fopt, bestx); |
710 |
} |
} |
711 |
|
|
712 |
//Adjust vm so that approximately half of all evaluations are accepted |
//Adjust vm so that approximately half of all evaluations are accepted |
713 |
for (i = 0; i < nvars; i++) { |
// for (i = 0; i < nvars; ++i) { |
714 |
ratio = nsdiv * nacp[i]; |
//// cout << "nacp["<<i<<"]:"<<nacp[i]<<endl; |
715 |
nacp[i] = 0; |
// ratio = nsdiv * nacp[i]; |
716 |
if (ratio > uratio) { |
// nacp[i] = 0; |
717 |
vm[i] = vm[i] * (1.0 + cs * (ratio - uratio)); |
// if (ratio > uratio) { |
718 |
} else if (ratio < lratio) { |
// vm[i] = vm[i] * (1.0 + cs * (ratio - uratio)); |
719 |
vm[i] = vm[i] / (1.0 + cs * (lratio - ratio)); |
// } else if (ratio < lratio) { |
720 |
} |
// vm[i] = vm[i] / (1.0 + cs * (lratio - ratio)); |
721 |
|
// } |
722 |
if (vm[i] < rathersmall) |
// |
723 |
vm[i] = rathersmall; |
// if (vm[i] < rathersmall) |
724 |
if (vm[i] > (upperb[i] - lowerb[i])) |
// vm[i] = rathersmall; |
725 |
vm[i] = upperb[i] - lowerb[i]; |
// if (vm[i] > (upperb[i] - lowerb[i])) |
726 |
} |
// vm[i] = upperb[i] - lowerb[i]; |
727 |
|
// |
728 |
|
//// cout << "vm["<<i<<"]:"<<vm[i]<<endl; |
729 |
|
// |
730 |
|
// } |
731 |
} |
} |
732 |
|
|
733 |
//Check termination criteria |
//Check termination criteria |
738 |
quit = 0; |
quit = 0; |
739 |
if (fabs(fopt - funcval) < simanneps) { |
if (fabs(fopt - funcval) < simanneps) { |
740 |
quit = 1; |
quit = 1; |
741 |
for (i = 0; i < tempcheck - 1; i++) |
for (i = 0; i < tempcheck - 1; ++i) |
742 |
if (fabs(fstar[i + 1] - fstar[i]) > simanneps) |
if (fabs(fstar[i + 1] - fstar[i]) > simanneps) |
743 |
quit = 0; |
quit = 0; |
744 |
} |
} |
758 |
converge = 1; |
converge = 1; |
759 |
score = EcoSystem->SimulateAndUpdate(bestx); |
score = EcoSystem->SimulateAndUpdate(bestx); |
760 |
handle.logMessage(LOGINFO, "\nSimulated Annealing finished with a likelihood score of", score); |
handle.logMessage(LOGINFO, "\nSimulated Annealing finished with a likelihood score of", score); |
761 |
|
// EcoSystem->writeOptValuesOMP(); |
762 |
|
delete[] storage; |
763 |
return; |
return; |
764 |
} |
} |
765 |
|
|
770 |
|
|
771 |
handle.logMessage(LOGINFO, "Reducing the temperature to", t); |
handle.logMessage(LOGINFO, "Reducing the temperature to", t); |
772 |
funcval = fopt; |
funcval = fopt; |
773 |
for (i = 0; i < nvars; i++) |
for (i = 0; i < nvars; ++i) |
774 |
x[i] = bestx[i]; |
x[i] = bestx[i]; |
775 |
} |
} |
776 |
|
// EcoSystem->writeOptValuesOMP(); |
777 |
|
} |
778 |
|
#endif |
779 |
|
|
780 |
|
void OptInfoSimann::newValue(int nvars, int l, IntVector& param, DoubleVector& trialx, |
781 |
|
DoubleVector& x, DoubleVector& lowerb, DoubleVector& upperb, DoubleVector& vm) |
782 |
|
{ |
783 |
|
int i, k; |
784 |
|
// double old = trialx[param[l]]; |
785 |
|
// cout << "[" << endl; |
786 |
|
for (i = 0; i < nvars; ++i) { |
787 |
|
if (i == param[l]) { |
788 |
|
trialx[i] = x[i] + ((randomNumber(&seed) * 2.0) - 1.0) * vm[i]; |
789 |
|
|
790 |
|
//If trialx is out of bounds, try again until we find a point that is OK |
791 |
|
if ((trialx[i] < lowerb[i]) || (trialx[i] > upperb[i])) { |
792 |
|
//JMB - this used to just select a random point between the bounds |
793 |
|
k = 0; |
794 |
|
while ((trialx[i] < lowerb[i]) || (trialx[i] > upperb[i])) { |
795 |
|
trialx[i] = x[i] + ((randomNumber(&seed) * 2.0) - 1.0) * vm[i]; |
796 |
|
k++; |
797 |
|
if (k > 10) //we've had 10 tries to find a point neatly, so give up |
798 |
|
trialx[i] = lowerb[i] + (upperb[i] - lowerb[i]) * randomNumber(&seed); |
799 |
|
} |
800 |
|
} |
801 |
|
} else |
802 |
|
trialx[i] = x[i]; |
803 |
|
// cout <<" " << trialx[i]; |
804 |
|
} |
805 |
|
|
806 |
|
// cout << endl<< "old[" <<param[l] << "]:" << old <<"-" << trialx[param[l]]<< " - " << vm[param[l]]<<endl; |
807 |
|
} |
808 |
|
|
809 |
|
|
810 |
|
//Generate trialx, the trial value of x |
811 |
|
void newValue(int nvars, int l, IntVector& param, DoubleVector& trialx, |
812 |
|
DoubleVector& x, DoubleVector& lowerb, DoubleVector& upperb, DoubleVector& vm, unsigned* seed) |
813 |
|
{ |
814 |
|
int i, k; |
815 |
|
// double old = trialx[param[l]]; |
816 |
|
// cout << "[" << endl; |
817 |
|
for (i = 0; i < nvars; ++i) { |
818 |
|
if (i == param[l]) { |
819 |
|
trialx[i] = x[i] + ((randomNumber(&*seed) * 2.0) - 1.0) * vm[i]; |
820 |
|
|
821 |
|
//If trialx is out of bounds, try again until we find a point that is OK |
822 |
|
if ((trialx[i] < lowerb[i]) || (trialx[i] > upperb[i])) { |
823 |
|
//JMB - this used to just select a random point between the bounds |
824 |
|
k = 0; |
825 |
|
while ((trialx[i] < lowerb[i]) || (trialx[i] > upperb[i])) { |
826 |
|
trialx[i] = x[i] + ((randomNumber(&*seed) * 2.0) - 1.0) * vm[i]; |
827 |
|
k++; |
828 |
|
if (k > 10) //we've had 10 tries to find a point neatly, so give up |
829 |
|
trialx[i] = lowerb[i] + (upperb[i] - lowerb[i]) * randomNumber(&*seed); |
830 |
|
} |
831 |
|
} |
832 |
|
} else |
833 |
|
trialx[i] = x[i]; |
834 |
|
// cout << trialx[i]<<" "; |
835 |
|
} |
836 |
|
|
837 |
|
// cout << endl <<l<< "-old[" <<param[l] << "]:" << old <<"-" << trialx[param[l]]<< " - " << vm[param[l]]<<endl; |
838 |
|
} |
839 |
|
|
840 |
|
void buildNewParams_f(Siman& seed, DoubleVector& params) { |
841 |
|
|
842 |
|
newValue(seed.getNvars(), seed.getL(), seed.getParam(), params, seed.getX(), |
843 |
|
seed.getLowerb(), seed.getUpperb(), seed.getVm(), seed.getSeed()); |
844 |
|
|
845 |
|
} |
846 |
|
|
847 |
|
/// Represents the function that computes how good the parameters are |
848 |
|
double evaluate_f(const DoubleVector& params) { |
849 |
|
double trialf; |
850 |
|
#ifndef NO_OPENMP |
851 |
|
int id = omp_get_thread_num (); |
852 |
|
trialf = EcoSystems[id]->SimulateAndUpdate(params); |
853 |
|
// cout << id << "-"; |
854 |
|
#else |
855 |
|
trialf = EcoSystem->SimulateAndUpdate(params); |
856 |
|
#endif |
857 |
|
// cout << "trailf:" << trialf<< endl; |
858 |
|
return -trialf; |
859 |
|
} |
860 |
|
|
861 |
|
struct ControlClass { |
862 |
|
|
863 |
|
void adjustVm(Siman& seed) { |
864 |
|
//Adjust vm so that approximately half of all evaluations are accepted |
865 |
|
int i; |
866 |
|
double ratio, nsdiv = seed.getNsdiv(); |
867 |
|
|
868 |
|
DoubleVector vm = seed.getVm(); |
869 |
|
DoubleVector upperb = seed.getUpperb(); |
870 |
|
DoubleVector lowerb = seed.getLowerb(); |
871 |
|
|
872 |
|
double uratio = seed.getUratio(); |
873 |
|
double lratio = seed.getLratio(); |
874 |
|
|
875 |
|
// cout << "uratio:" << uratio << endl; |
876 |
|
// cout << "lratio:" << lratio << endl; |
877 |
|
|
878 |
|
//cout << "adjust vm" << endl; |
879 |
|
for (i = 0; i < seed.getNvars(); i++) { |
880 |
|
// cout << "nacp[" << i << "]:" << seed.getNacp(i) << endl; |
881 |
|
ratio = nsdiv * seed.getNacp(i); |
882 |
|
seed.setNacp(i,0); |
883 |
|
if (ratio > uratio) { |
884 |
|
(vm)[i] = (vm)[i] * (1.0 + seed.getCs() * (ratio - seed.getUratio())); |
885 |
|
} else if (ratio < lratio) { |
886 |
|
(vm)[i] = (vm)[i] / (1.0 + seed.getCs() * (seed.getLratio() - ratio)); |
887 |
|
} |
888 |
|
|
889 |
|
if ((vm)[i] < rathersmall) |
890 |
|
(vm)[i] = rathersmall; |
891 |
|
if ((vm)[i] > (upperb[i] - lowerb[i])) |
892 |
|
(vm)[i] = upperb[i] - lowerb[i]; |
893 |
|
} |
894 |
|
seed.setVm(vm); |
895 |
|
} |
896 |
|
|
897 |
|
void temperature(Siman& seed, DoubleVector& x){ |
898 |
|
int i; |
899 |
|
double t = seed.getT(); |
900 |
|
t *= seed.getRt(); |
901 |
|
if (t < rathersmall) |
902 |
|
t = rathersmall; //JMB make sure temperature doesnt get too small |
903 |
|
|
904 |
|
handle.logMessage(LOGINFO, "Reducing the temperature to", t); |
905 |
|
seed.setT(t); |
906 |
|
|
907 |
|
DoubleVector* bestx = seed.getBestx(); |
908 |
|
|
909 |
|
//funcval = fopt; |
910 |
|
for (i = 0; i < seed.getNvars(); i++) |
911 |
|
x[i] = (*bestx)[i]; |
912 |
|
} |
913 |
|
|
914 |
|
void optimum(double trialf, double &fopt, int iters, DoubleVector trialx, |
915 |
|
DoubleVector init, Siman siman){ |
916 |
|
//If greater than any other point, record as new optimum |
917 |
|
int i, nvars = siman.getNvars(); |
918 |
|
DoubleVector scalex(nvars); |
919 |
|
DoubleVector* bestx = siman.getBestx(); |
920 |
|
|
921 |
|
if ((trialf > fopt) && (trialf == trialf)) { |
922 |
|
for (i = 0; i < nvars; i++) |
923 |
|
(*bestx)[i] = trialx[i]; |
924 |
|
fopt = trialf; |
925 |
|
|
926 |
|
if (siman.getScale()) { |
927 |
|
for (i = 0; i < nvars; i++) |
928 |
|
scalex[i] = (*bestx)[i] * init[i]; |
929 |
|
EcoSystem->storeVariables(-fopt, scalex); |
930 |
|
} else |
931 |
|
EcoSystem->storeVariables(-fopt, (*bestx)); |
932 |
|
|
933 |
|
handle.logMessage(LOGINFO, "\nNew optimum found after", iters, "function evaluations"); |
934 |
|
handle.logMessage(LOGINFO, "The likelihood score is", -fopt, "at the point"); |
935 |
|
EcoSystem->writeBestValues(); |
936 |
|
} |
937 |
|
} |
938 |
|
|
939 |
|
/** |
940 |
|
@brief Decides wheter the current item evaluated must be chosen as new optimum |
941 |
|
@param funcval value for old optimum |
942 |
|
@param trialf value for current item evaluated |
943 |
|
*/ |
944 |
|
bool mustAccept(double funcval, double trialf, Siman &siman, int iters) { |
945 |
|
//Accept the new point if the new function value better |
946 |
|
bool ret = true; |
947 |
|
int i; |
948 |
|
// cout << "mustAccept:" << trialf << "|" << funcval<< endl; |
949 |
|
int aux = siman.getParam()[siman.getL()]; |
950 |
|
if ((trialf - funcval) > verysmall) { |
951 |
|
siman.incrementNacp(aux); |
952 |
|
siman.incrementNacc(); |
953 |
|
//JMB - not sure about this ... |
954 |
|
} else { |
955 |
|
double p, pp; |
956 |
|
//Accept according to metropolis condition |
957 |
|
|
958 |
|
// cout << "t:" << siman.getT() << endl; |
959 |
|
|
960 |
|
p = expRep((trialf - funcval) / siman.getT()); |
961 |
|
pp = randomNumber(siman.getSeedM()); |
962 |
|
if (pp < p) { |
963 |
|
// cout << pp << "<" << p << endl; |
964 |
|
siman.incrementNacp(aux); |
965 |
|
siman.incrementNaccmet(); |
966 |
|
// //Accept point |
967 |
|
// for (i = 0; i < nvars; i++) |
968 |
|
// x[i] = trialx[i]; |
969 |
|
// funcval = trialf; |
970 |
|
// naccmet++; |
971 |
|
// nacp[param[l]]++; |
972 |
|
} else { |
973 |
|
//Reject point |
974 |
|
ret = false; |
975 |
|
siman.incrementNrej(); |
976 |
} |
} |
977 |
|
} |
978 |
|
// cout << "nacp[" << aux << "]:" << siman.getNacp(aux) << endl; |
979 |
|
// JMB added check for really silly values |
980 |
|
if (isZero(trialf)) { |
981 |
|
handle.logMessage(LOGINFO, "Error in Simulated Annealing optimisation after", |
982 |
|
iters, "function evaluations, f(x) = 0"); |
983 |
|
|
984 |
|
siman.setConverge(-1); |
985 |
|
return false; |
986 |
|
} |
987 |
|
|
988 |
|
siman.incrementL(); |
989 |
|
if (siman.getL() == 0) |
990 |
|
siman.incrementNS(); |
991 |
|
if (siman.getNS() >= siman.getNs()){ |
992 |
|
|
993 |
|
siman.setNS(0); |
994 |
|
|
995 |
|
siman.incrementNT(); |
996 |
|
adjustVm(siman); |
997 |
|
siman.randomizeParams(); |
998 |
|
// if (seed.getNT() >= seed.getNt()){ |
999 |
|
// |
1000 |
|
// } |
1001 |
|
|
1002 |
|
} |
1003 |
|
|
1004 |
|
return ret; |
1005 |
|
|
1006 |
|
} |
1007 |
|
|
1008 |
|
/** |
1009 |
|
@brief Decides whether the search must stop. |
1010 |
|
It does not take into account the number of iterations, which is already considered by the template |
1011 |
|
@param prev old optimum |
1012 |
|
@param funcval new/current optimum |
1013 |
|
*/ |
1014 |
|
bool mustTerminate(double prev, double& funcval, Siman &siman, int iters) { |
1015 |
|
bool quit = false; |
1016 |
|
if (siman.getNT() >= siman.getNt()) |
1017 |
|
{ |
1018 |
|
// cout << "nt:" << siman.getNT(); |
1019 |
|
//Check termination criteria |
1020 |
|
int i; |
1021 |
|
DoubleVector fstar = siman.getFstar(); |
1022 |
|
|
1023 |
|
siman.setNT(0); |
1024 |
|
|
1025 |
|
for (i = siman.getTempcheck() - 1; i > 0; i--) |
1026 |
|
fstar[i] = fstar[i - 1]; |
1027 |
|
fstar[0] = funcval; |
1028 |
|
|
1029 |
|
if (fabs(prev - funcval) < siman.getSimanneps()) { |
1030 |
|
quit = true; |
1031 |
|
for (i = 0; i < siman.getTempcheck() - 1; i++) |
1032 |
|
if (fabs(fstar[i + 1] - fstar[i]) > siman.getSimanneps()) |
1033 |
|
quit = false; |
1034 |
|
} |
1035 |
|
|
1036 |
|
handle.logMessage(LOGINFO, "Checking convergence criteria after", iters, |
1037 |
|
"function evaluations ..."); |
1038 |
|
|
1039 |
|
temperature(siman, siman.getX()); |
1040 |
|
|
1041 |
|
funcval = prev; |
1042 |
|
} |
1043 |
|
return quit; |
1044 |
|
} |
1045 |
|
|
1046 |
|
void printResult(bool quit, Siman siman, int iters) |
1047 |
|
{ |
1048 |
|
double * score = siman.getScore(); |
1049 |
|
DoubleVector * bestX = siman.getBestx(); |
1050 |
|
|
1051 |
|
handle.logMessage(LOGINFO, "\nStopping Simulated Annealing optimisation algorithm\n"); |
1052 |
|
handle.logMessage(LOGINFO, "The optimisation stopped after", iters, "function evaluations"); |
1053 |
|
handle.logMessage(LOGINFO, "The temperature was reduced to", siman.getT()); |
1054 |
|
if (quit) { |
1055 |
|
int* converge = siman.getConverge(); |
1056 |
|
handle.logMessage(LOGINFO, "The optimisation stopped because an optimum was found for this run"); |
1057 |
|
|
1058 |
|
|
1059 |
|
*converge = 1; |
1060 |
|
} |
1061 |
|
else { |
1062 |
|
handle.logMessage(LOGINFO, "The optimisation stopped because the maximum number of function evaluations"); |
1063 |
|
handle.logMessage(LOGINFO, "was reached and NOT because an optimum was found for this run"); |
1064 |
|
} |
1065 |
|
handle.logMessage(LOGINFO, "Number of directly accepted points", siman.getNacc()); |
1066 |
|
handle.logMessage(LOGINFO, "Number of metropolis accepted points", siman.getNaccmet()); |
1067 |
|
handle.logMessage(LOGINFO, "Number of rejected points", siman.getNrej()); |
1068 |
|
*score = EcoSystem->SimulateAndUpdate(*bestX); |
1069 |
|
handle.logMessage(LOGINFO, "\nSimulated Annealing finished with a likelihood score of", *score); |
1070 |
|
// EcoSystem->writeOptValues(); |
1071 |
|
} |
1072 |
|
}; |
1073 |
|
|
1074 |
|
// Required |
1075 |
|
std::ostream &operator<<(std::ostream &os, const DoubleVector &p) |
1076 |
|
{ |
1077 |
|
// os << "[ AUCH"; |
1078 |
|
// for (int i = 0; i< NPARAMS; ++i) { |
1079 |
|
// std::cout << p[i] << ' '; |
1080 |
|
// } |
1081 |
|
// std::cout << "]"; |
1082 |
|
os << ""; |
1083 |
|
return os; |
1084 |
|
} |
1085 |
|
|
1086 |
|
|
1087 |
|
void OptInfoSimann::OptimiseLikelihood() { |
1088 |
|
|
1089 |
|
//set initial values |
1090 |
|
|
1091 |
|
double tmp, p, pp; |
1092 |
|
double fopt, funcval, trialf; |
1093 |
|
int a, i, j, k, l, quit; |
1094 |
|
int rchange, rcheck, rnumber; //Used to randomise the order of the parameters |
1095 |
|
|
1096 |
|
handle.logMessage(LOGINFO, |
1097 |
|
"\nStarting Simulated Annealing optimisation algorithm----\n"); |
1098 |
|
int nvars = EcoSystem->numOptVariables(); |
1099 |
|
DoubleVector x(nvars); |
1100 |
|
DoubleVector init(nvars); |
1101 |
|
DoubleVector trialx(nvars, 0.0); |
1102 |
|
DoubleVector bestx(nvars); |
1103 |
|
DoubleVector scalex(nvars); |
1104 |
|
DoubleVector lowerb(nvars); |
1105 |
|
DoubleVector upperb(nvars); |
1106 |
|
DoubleVector fstar(tempcheck); |
1107 |
|
DoubleVector vm(nvars, vminit); |
1108 |
|
IntVector param(nvars, 0); |
1109 |
|
|
1110 |
|
EcoSystem->resetVariables(); //JMB need to reset variables in case they have been scaled |
1111 |
|
if (scale) { |
1112 |
|
EcoSystem->scaleVariables(); |
1113 |
|
#ifndef NO_OPENMP |
1114 |
|
int numThr = omp_get_max_threads ( ); |
1115 |
|
for(i = 0; i < numThr; i++) |
1116 |
|
EcoSystems[i]->scaleVariables(); |
1117 |
|
#endif |
1118 |
|
} |
1119 |
|
EcoSystem->getOptScaledValues(x); |
1120 |
|
EcoSystem->getOptLowerBounds(lowerb); |
1121 |
|
EcoSystem->getOptUpperBounds(upperb); |
1122 |
|
EcoSystem->getOptInitialValues(init); |
1123 |
|
|
1124 |
|
for (i = 0; i < nvars; i++) { |
1125 |
|
bestx[i] = x[i]; |
1126 |
|
param[i] = i; |
1127 |
|
} |
1128 |
|
|
1129 |
|
if (scale) { |
1130 |
|
for (i = 0; i < nvars; i++) { |
1131 |
|
scalex[i] = x[i]; |
1132 |
|
// Scaling the bounds, because the parameters are scaled |
1133 |
|
lowerb[i] = lowerb[i] / init[i]; |
1134 |
|
upperb[i] = upperb[i] / init[i]; |
1135 |
|
if (lowerb[i] > upperb[i]) { |
1136 |
|
tmp = lowerb[i]; |
1137 |
|
lowerb[i] = upperb[i]; |
1138 |
|
upperb[i] = tmp; |
1139 |
|
} |
1140 |
|
} |
1141 |
|
} |
1142 |
|
|
1143 |
|
//funcval is the function value at x |
1144 |
|
funcval = EcoSystem->SimulateAndUpdate(x); |
1145 |
|
if (funcval != funcval) { //check for NaN |
1146 |
|
handle.logMessage(LOGINFO, |
1147 |
|
"Error starting Simulated Annealing optimisation with f(x) = infinity"); |
1148 |
|
converge = -1; |
1149 |
|
iters = 1; |
1150 |
|
return; |
1151 |
|
} |
1152 |
|
|
1153 |
|
//the function is to be minimised so switch the sign of funcval (and trialf) |
1154 |
|
funcval = -funcval; |
1155 |
|
// offset = EcoSystem->getFuncEval(); //number of function evaluations done before loop |
1156 |
|
// nacc++; |
1157 |
|
cs /= lratio; //JMB save processing time |
1158 |
|
// nsdiv = 1.0 / ns; |
1159 |
|
fopt = funcval; |
1160 |
|
for (i = 0; i < tempcheck; i++) |
1161 |
|
fstar[i] = funcval; |
1162 |
|
|
1163 |
|
//unsigned seed = 1234; |
1164 |
|
|
1165 |
|
Siman s(seed, seedM, seedP, nvars, nt, ns, param, &x, &lowerb, &upperb, vm, t, rt, (1.0 / ns), |
1166 |
|
tempcheck, simanneps, fstar, lratio, uratio, cs, &bestx, scale, &converge, &score); |
1167 |
|
|
1168 |
|
ReproducibleSearch<Siman, DoubleVector, ControlClass, evaluate_f, buildNewParams_f> |
1169 |
|
pa(s, x, simanniter); |
1170 |
|
|
1171 |
|
// cout << "Sequential run" << endl; |
1172 |
|
// pa.seq_opt(); |
1173 |
|
|
1174 |
|
#ifndef NO_OPENMP |
1175 |
|
int numThr = omp_get_max_threads ( ); |
1176 |
|
pa.paral_opt_omp(numThr,numThr); |
1177 |
|
#else |
1178 |
|
pa.seq_opt(); |
1179 |
|
#endif |
1180 |
|
|
1181 |
|
} |
1182 |
|
|
1183 |
|
|
1184 |
|
|
1185 |
|
|