Log In | Get Help   
Home My Page Projects Code Snippets Project Openings Mareframe
Summary Activity Forums Tracker Lists Tasks Docs Surveys News SCM Files
[mareframe] Annotation of /trunk/gadget/catchdistribution.cc
[mareframe] / trunk / gadget / catchdistribution.cc Repository:
ViewVC logotype

Annotation of /trunk/gadget/catchdistribution.cc

Parent Directory Parent Directory | Revision Log Revision Log


Revision 1 - (view) (download)

1 : agomez 1 #include "catchdistribution.h"
2 :     #include "readfunc.h"
3 :     #include "readword.h"
4 :     #include "readaggregation.h"
5 :     #include "errorhandler.h"
6 :     #include "areatime.h"
7 :     #include "fleet.h"
8 :     #include "stock.h"
9 :     #include "multinomial.h"
10 :     #include "mathfunc.h"
11 :     #include "stockprey.h"
12 :     #include "ludecomposition.h"
13 :     #include "gadget.h"
14 :     #include "global.h"
15 :    
16 :     CatchDistribution::CatchDistribution(CommentStream& infile, const AreaClass* const Area,
17 :     const TimeClass* const TimeInfo, Keeper* const keeper, double weight, const char* name)
18 :     : Likelihood(CATCHDISTRIBUTIONLIKELIHOOD, weight, name), alptr(0) {
19 :    
20 :     int i, j;
21 :     char text[MaxStrLength];
22 :     strncpy(text, "", MaxStrLength);
23 :     int numarea = 0, numage = 0, numlen = 0;
24 :    
25 :     char datafilename[MaxStrLength];
26 :     char aggfilename[MaxStrLength];
27 :     strncpy(datafilename, "", MaxStrLength);
28 :     strncpy(aggfilename, "", MaxStrLength);
29 :     ifstream datafile;
30 :     CommentStream subdata(datafile);
31 :    
32 :     timeindex = 0;
33 :     yearly = 0;
34 :     functionname = new char[MaxStrLength];
35 :     strncpy(functionname, "", MaxStrLength);
36 :    
37 :     readWordAndValue(infile, "datafile", datafilename);
38 :     readWordAndValue(infile, "function", functionname);
39 :    
40 :     functionnumber = 0;
41 :     if (strcasecmp(functionname, "multinomial") == 0) {
42 :     MN = Multinomial();
43 :     functionnumber = 1;
44 :     } else if (strcasecmp(functionname, "pearson") == 0) {
45 :     functionnumber = 2;
46 :     } else if (strcasecmp(functionname, "gamma") == 0) {
47 :     functionnumber = 3;
48 :     } else if (strcasecmp(functionname, "sumofsquares") == 0) {
49 :     functionnumber = 4;
50 :     } else if (strcasecmp(functionname, "mvn") == 0) {
51 :     functionnumber = 5;
52 :    
53 :     readWordAndVariable(infile, "lag", lag);
54 :     readWordAndVariable(infile, "sigma", sigma);
55 :     sigma.Inform(keeper);
56 :    
57 :     params.resize(lag, keeper);
58 :     for (i = 0; i < lag; i++)
59 :     readWordAndVariable(infile, "param", params[i]);
60 :     params.Inform(keeper);
61 :    
62 :     } else if (strcasecmp(functionname, "mvlogistic") == 0) {
63 :     functionnumber = 6;
64 :    
65 :     readWordAndVariable(infile, "sigma", sigma);
66 :     sigma.Inform(keeper);
67 :    
68 :     } else if (strcasecmp(functionname, "log") == 0) {
69 :     //JMB moved the logcatch function to here instead of it being a seperate class
70 :     functionnumber = 7;
71 :    
72 :     } else if (strcasecmp(functionname, "stratified") == 0) {
73 :     //JMB experimental version of the sum of squares function for stratified samples
74 :     functionnumber = 8;
75 :    
76 :     } else
77 :     handle.logFileMessage(LOGFAIL, "\nError in catchdistribution - unrecognised function", functionname);
78 :    
79 :     infile >> ws;
80 :     char c = infile.peek();
81 :     if ((c == 'a') || (c == 'A')) {
82 :     //we have found either aggregationlevel or areaaggfile ...
83 :     streampos pos = infile.tellg();
84 :    
85 :     infile >> text >> ws;
86 :     if ((strcasecmp(text, "aggregation_level") == 0) || (strcasecmp(text, "aggregationlevel") == 0))
87 :     infile >> yearly >> ws;
88 :     else if (strcasecmp(text, "areaaggfile") == 0)
89 :     infile.seekg(pos);
90 :     else
91 :     handle.logFileUnexpected(LOGFAIL, "areaaggfile", text);
92 :    
93 :     //JMB - peek at the next char
94 :     c = infile.peek();
95 :    
96 :     if (yearly != 0 && yearly != 1)
97 :     handle.logFileMessage(LOGFAIL, "\nError in catchdistribution - aggregationlevel must be 0 or 1");
98 :     }
99 :    
100 :     //JMB - changed to make the reading of overconsumption optional
101 :     if ((c == 'o') || (c == 'O')) {
102 :     readWordAndVariable(infile, "overconsumption", overconsumption);
103 :     infile >> ws;
104 :     c = infile.peek();
105 :     } else
106 :     overconsumption = 0;
107 :    
108 :     if (overconsumption != 0 && overconsumption != 1)
109 :     handle.logFileMessage(LOGFAIL, "\nError in catchdistribution - overconsumption must be 0 or 1");
110 :    
111 :     //JMB - changed to make the reading of minimum probability optional
112 :     if ((c == 'm') || (c == 'M'))
113 :     readWordAndVariable(infile, "minimumprobability", epsilon);
114 :     else if ((c == 'e') || (c == 'E'))
115 :     readWordAndVariable(infile, "epsilon", epsilon);
116 :     else
117 :     epsilon = 10.0;
118 :    
119 :     if (epsilon < verysmall) {
120 :     handle.logFileMessage(LOGWARN, "epsilon should be a positive integer - set to default value 10");
121 :     epsilon = 10.0;
122 :     }
123 :    
124 :     //read in area aggregation from file
125 :     readWordAndValue(infile, "areaaggfile", aggfilename);
126 :     datafile.open(aggfilename, ios::in);
127 :     handle.checkIfFailure(datafile, aggfilename);
128 :     handle.Open(aggfilename);
129 :     numarea = readAggregation(subdata, areas, areaindex);
130 :     handle.Close();
131 :     datafile.close();
132 :     datafile.clear();
133 :    
134 :     //read in age aggregation from file
135 :     readWordAndValue(infile, "ageaggfile", aggfilename);
136 :     datafile.open(aggfilename, ios::in);
137 :     handle.checkIfFailure(datafile, aggfilename);
138 :     handle.Open(aggfilename);
139 :     numage = readAggregation(subdata, ages, ageindex);
140 :     handle.Close();
141 :     datafile.close();
142 :     datafile.clear();
143 :    
144 :     //read in length aggregation from file
145 :     readWordAndValue(infile, "lenaggfile", aggfilename);
146 :     datafile.open(aggfilename, ios::in);
147 :     handle.checkIfFailure(datafile, aggfilename);
148 :     handle.Open(aggfilename);
149 :     numlen = readLengthAggregation(subdata, lengths, lenindex);
150 :     handle.Close();
151 :     datafile.close();
152 :     datafile.clear();
153 :    
154 :     LgrpDiv = new LengthGroupDivision(lengths);
155 :     if (LgrpDiv->Error())
156 :     handle.logMessage(LOGFAIL, "Error in catchdistribution - failed to create length group");
157 :    
158 :     //Must change from outer areas to inner areas.
159 :     for (i = 0; i < areas.Nrow(); i++)
160 :     for (j = 0; j < areas.Ncol(i); j++)
161 :     areas[i][j] = Area->getInnerArea(areas[i][j]);
162 :    
163 :     //read in the fleetnames
164 :     i = 0;
165 :     infile >> text >> ws;
166 :     if (strcasecmp(text, "fleetnames") != 0)
167 :     handle.logFileUnexpected(LOGFAIL, "fleetnames", text);
168 :     infile >> text >> ws;
169 :     while (!infile.eof() && (strcasecmp(text, "stocknames") != 0)) {
170 :     fleetnames.resize(new char[strlen(text) + 1]);
171 :     strcpy(fleetnames[i++], text);
172 :     infile >> text >> ws;
173 :     }
174 :     if (fleetnames.Size() == 0)
175 :     handle.logFileMessage(LOGFAIL, "\nError in catchdistribution - failed to read fleets");
176 :     handle.logMessage(LOGMESSAGE, "Read fleet data - number of fleets", fleetnames.Size());
177 :    
178 :     //read in the stocknames
179 :     i = 0;
180 :     if (strcasecmp(text, "stocknames") != 0)
181 :     handle.logFileUnexpected(LOGFAIL, "stocknames", text);
182 :     infile >> text;
183 :     while (!infile.eof() && (strcasecmp(text, "[component]") != 0)) {
184 :     infile >> ws;
185 :     stocknames.resize(new char[strlen(text) + 1]);
186 :     strcpy(stocknames[i++], text);
187 :     infile >> text;
188 :     }
189 :     if (stocknames.Size() == 0)
190 :     handle.logFileMessage(LOGFAIL, "\nError in catchdistribution - failed to read stocks");
191 :     handle.logMessage(LOGMESSAGE, "Read stock data - number of stocks", stocknames.Size());
192 :    
193 :     //We have now read in all the data from the main likelihood file
194 :     //But we have to read in the statistics data from datafilename
195 :     datafile.open(datafilename, ios::in);
196 :     handle.checkIfFailure(datafile, datafilename);
197 :     handle.Open(datafilename);
198 :     readDistributionData(subdata, TimeInfo, numarea, numage, numlen);
199 :     handle.Close();
200 :     datafile.close();
201 :     datafile.clear();
202 :    
203 :     switch (functionnumber) {
204 :     case 2:
205 :     case 3:
206 :     case 4:
207 :     case 7:
208 :     case 8:
209 :     for (i = 0; i < numarea; i++) {
210 :     modelYearData.resize(new DoubleMatrix(numage, numlen, 0.0));
211 :     obsYearData.resize(new DoubleMatrix(numage, numlen, 0.0));
212 :     }
213 :     break;
214 :     case 1:
215 :     case 5:
216 :     case 6:
217 :     if (yearly)
218 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - yearly aggregation is ignored for function", functionname);
219 :     yearly = 0;
220 :     break;
221 :     default:
222 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - unrecognised function", functionname);
223 :     break;
224 :     }
225 :     }
226 :    
227 :     void CatchDistribution::readDistributionData(CommentStream& infile,
228 :     const TimeClass* TimeInfo, int numarea, int numage, int numlen) {
229 :    
230 :     int i, year, step;
231 :     double tmpnumber;
232 :     char tmparea[MaxStrLength], tmpage[MaxStrLength], tmplen[MaxStrLength];
233 :     strncpy(tmparea, "", MaxStrLength);
234 :     strncpy(tmpage, "", MaxStrLength);
235 :     strncpy(tmplen, "", MaxStrLength);
236 :     int keepdata, timeid, ageid, areaid, lenid, count, reject;
237 :    
238 :     //Check the number of columns in the inputfile
239 :     infile >> ws;
240 :     if (countColumns(infile) != 6)
241 :     handle.logFileMessage(LOGFAIL, "wrong number of columns in inputfile - should be 6");
242 :    
243 :     year = step = count = reject = 0;
244 :     while (!infile.eof()) {
245 :     keepdata = 1;
246 :     infile >> year >> step >> tmparea >> tmpage >> tmplen >> tmpnumber >> ws;
247 :    
248 :     //crude check to see if something has gone wrong and avoid infinite loops
249 :     if (strlen(tmparea) == 0)
250 :     handle.logFileMessage(LOGFAIL, "failed to read data from file");
251 :    
252 :     //if tmparea is in areaindex find areaid, else dont keep the data
253 :     areaid = -1;
254 :     for (i = 0; i < areaindex.Size(); i++)
255 :     if (strcasecmp(areaindex[i], tmparea) == 0)
256 :     areaid = i;
257 :    
258 :     if (areaid == -1)
259 :     keepdata = 0;
260 :    
261 :     //if tmpage is in ageindex find ageid, else dont keep the data
262 :     ageid = -1;
263 :     for (i = 0; i < ageindex.Size(); i++)
264 :     if (strcasecmp(ageindex[i], tmpage) == 0)
265 :     ageid = i;
266 :    
267 :     if (ageid == -1)
268 :     keepdata = 0;
269 :    
270 :     //if tmplen is in lenindex find lenid, else dont keep the data
271 :     lenid = -1;
272 :     for (i = 0; i < lenindex.Size(); i++)
273 :     if (strcasecmp(lenindex[i], tmplen) == 0)
274 :     lenid = i;
275 :    
276 :     if (lenid == -1)
277 :     keepdata = 0;
278 :    
279 :     //check if the year and step are in the simulation
280 :     timeid = -1;
281 :     if ((TimeInfo->isWithinPeriod(year, step)) && (keepdata == 1)) {
282 :     //if this is a new timestep, resize to store the data
283 :     for (i = 0; i < Years.Size(); i++)
284 :     if ((Years[i] == year) && (Steps[i] == step))
285 :     timeid = i;
286 :    
287 :     if (timeid == -1) {
288 :     Years.resize(1, year);
289 :     Steps.resize(1, step);
290 :     timeid = (Years.Size() - 1);
291 :    
292 :     obsDistribution.resize();
293 :     modelDistribution.resize();
294 :     likelihoodValues.AddRows(1, numarea, 0.0);
295 :     for (i = 0; i < numarea; i++) {
296 :     obsDistribution[timeid].resize(new DoubleMatrix(numage, numlen, 0.0));
297 :     modelDistribution[timeid].resize(new DoubleMatrix(numage, numlen, 0.0));
298 :     }
299 :     }
300 :    
301 :     } else
302 :     keepdata = 0;
303 :    
304 :     if (keepdata == 1) {
305 :     //distribution data is required, so store it
306 :     count++;
307 :     (*obsDistribution[timeid][areaid])[ageid][lenid] = tmpnumber;
308 :     } else
309 :     reject++; //count number of rejected data points read from file
310 :     }
311 :    
312 :     AAT.addActions(Years, Steps, TimeInfo);
313 :     if (count == 0)
314 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - found no data in the data file for", this->getName());
315 :     if (reject != 0)
316 :     handle.logMessage(LOGMESSAGE, "Discarded invalid catchdistribution data - number of invalid entries", reject);
317 :     handle.logMessage(LOGMESSAGE, "Read catchdistribution data file - number of entries", count);
318 :     }
319 :    
320 :     CatchDistribution::~CatchDistribution() {
321 :     int i, j;
322 :     for (i = 0; i < stocknames.Size(); i++)
323 :     delete[] stocknames[i];
324 :     for (i = 0; i < fleetnames.Size(); i++)
325 :     delete[] fleetnames[i];
326 :     for (i = 0; i < areaindex.Size(); i++)
327 :     delete[] areaindex[i];
328 :     for (i = 0; i < ageindex.Size(); i++)
329 :     delete[] ageindex[i];
330 :     for (i = 0; i < lenindex.Size(); i++)
331 :     delete[] lenindex[i];
332 :     for (i = 0; i < obsDistribution.Nrow(); i++)
333 :     for (j = 0; j < obsDistribution.Ncol(i); j++) {
334 :     delete obsDistribution[i][j];
335 :     delete modelDistribution[i][j];
336 :     }
337 :     for (i = 0; i < modelYearData.Size(); i++) {
338 :     delete modelYearData[i];
339 :     delete obsYearData[i];
340 :     }
341 :     delete aggregator;
342 :     delete LgrpDiv;
343 :     delete[] functionname;
344 :     }
345 :    
346 :     void CatchDistribution::Reset(const Keeper* const keeper) {
347 :     Likelihood::Reset(keeper);
348 :     if (isZero(weight))
349 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - zero weight for", this->getName());
350 :    
351 :     int i, j;
352 :     for (i = 0; i < modelDistribution.Nrow(); i++)
353 :     for (j = 0; j < modelDistribution.Ncol(i); j++)
354 :     (*modelDistribution[i][j]).setToZero();
355 :     if (yearly)
356 :     for (i = 0; i < modelYearData.Size(); i++) {
357 :     (*modelYearData[i]).setToZero();
358 :     (*obsYearData[i]).setToZero();
359 :     }
360 :    
361 :     switch (functionnumber) {
362 :     case 2:
363 :     case 3:
364 :     case 4:
365 :     case 6:
366 :     case 7:
367 :     case 8:
368 :     break;
369 :     case 1:
370 :     MN.setValue(epsilon);
371 :     break;
372 :     case 5:
373 :     illegal = 0;
374 :     this->calcCorrelation();
375 :     if ((illegal) || (LU.isIllegal()))
376 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - multivariate normal out of bounds");
377 :     break;
378 :     default:
379 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - unrecognised function", functionname);
380 :     break;
381 :     }
382 :    
383 :     if (handle.getLogLevel() >= LOGMESSAGE)
384 :     handle.logMessage(LOGMESSAGE, "Reset catchdistribution component", this->getName());
385 :     }
386 :    
387 :     void CatchDistribution::Print(ofstream& outfile) const {
388 :    
389 :     int i;
390 :     outfile << "\nCatch Distribution " << this->getName() << " - likelihood value " << likelihood
391 :     << "\n\tFunction " << functionname << "\n\tStock names:";
392 :     for (i = 0; i < stocknames.Size(); i++)
393 :     outfile << sep << stocknames[i];
394 :     outfile << "\n\tFleet names:";
395 :     for (i = 0; i < fleetnames.Size(); i++)
396 :     outfile << sep << fleetnames[i];
397 :     outfile << endl;
398 :    
399 :     switch (functionnumber) {
400 :     case 1:
401 :     case 2:
402 :     case 3:
403 :     case 4:
404 :     case 7:
405 :     case 8:
406 :     break;
407 :     case 5:
408 :     outfile << "\tMultivariate normal distribution parameters: sigma " << sigma;
409 :     for (i = 0; i < lag; i++)
410 :     outfile << " param" << i + 1 << " " << params[i];
411 :     outfile << endl;
412 :     break;
413 :     case 6:
414 :     outfile << "\tMultivariate logistic distribution parameter: sigma " << sigma << endl;
415 :     break;
416 :     default:
417 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - unrecognised function", functionname);
418 :     break;
419 :     }
420 :    
421 :     aggregator->Print(outfile);
422 :     outfile.flush();
423 :     }
424 :    
425 :     void CatchDistribution::printLikelihood(ofstream& outfile, const TimeClass* const TimeInfo) {
426 :    
427 :     if (!AAT.atCurrentTime(TimeInfo))
428 :     return;
429 :    
430 :     int i, area, age, len;
431 :     timeindex = -1;
432 :     for (i = 0; i < Years.Size(); i++)
433 :     if ((Years[i] == TimeInfo->getYear()) && (Steps[i] == TimeInfo->getStep()))
434 :     timeindex = i;
435 :     if (timeindex == -1)
436 :     handle.logMessage(LOGFAIL, "Error in catchdistribution - invalid timestep");
437 :    
438 :     for (area = 0; area < modelDistribution.Ncol(timeindex); area++) {
439 :     for (age = 0; age < modelDistribution[timeindex][area]->Nrow(); age++) {
440 :     for (len = 0; len < modelDistribution[timeindex][area]->Ncol(age); len++) {
441 :     outfile << setw(lowwidth) << Years[timeindex] << sep << setw(lowwidth)
442 :     << Steps[timeindex] << sep << setw(printwidth) << areaindex[area] << sep
443 :     << setw(printwidth) << ageindex[age] << sep << setw(printwidth)
444 :     << lenindex[len] << sep << setprecision(largeprecision) << setw(largewidth);
445 :    
446 :     //JMB crude filter to remove the 'silly' values from the output
447 :     if ((*modelDistribution[timeindex][area])[age][len] < rathersmall)
448 :     outfile << 0 << endl;
449 :     else
450 :     outfile << (*modelDistribution[timeindex][area])[age][len] << endl;
451 :     }
452 :     }
453 :     }
454 :     }
455 :    
456 :     void CatchDistribution::setFleetsAndStocks(FleetPtrVector& Fleets, StockPtrVector& Stocks) {
457 :     int i, j, k, found, minage, maxage;
458 :     FleetPtrVector fleets;
459 :     StockPtrVector stocks;
460 :    
461 :     for (i = 0; i < fleetnames.Size(); i++) {
462 :     found = 0;
463 :     for (j = 0; j < Fleets.Size(); j++) {
464 :     if (strcasecmp(fleetnames[i], Fleets[j]->getName()) == 0) {
465 :     found ++;
466 :     fleets.resize(Fleets[j]);
467 :     }
468 :     }
469 :     if (found == 0)
470 :     handle.logMessage(LOGFAIL, "Error in catchdistribution - unrecognised fleet", fleetnames[i]);
471 :     }
472 :    
473 :     for (i = 0; i < fleets.Size(); i++)
474 :     for (j = 0; j < fleets.Size(); j++)
475 :     if ((strcasecmp(fleets[i]->getName(), fleets[j]->getName()) == 0) && (i != j))
476 :     handle.logMessage(LOGFAIL, "Error in catchdistribution - repeated fleet", fleets[i]->getName());
477 :    
478 :     for (i = 0; i < stocknames.Size(); i++) {
479 :     found = 0;
480 :     for (j = 0; j < Stocks.Size(); j++) {
481 :     if (Stocks[j]->isEaten()) {
482 :     if (strcasecmp(stocknames[i], Stocks[j]->getName()) == 0) {
483 :     found++;
484 :     stocks.resize(Stocks[j]);
485 :     }
486 :     }
487 :     }
488 :     if (found == 0)
489 :     handle.logMessage(LOGFAIL, "Error in catchdistribution - unrecognised stock", stocknames[i]);
490 :     }
491 :    
492 :     for (i = 0; i < stocks.Size(); i++)
493 :     for (j = 0; j < stocks.Size(); j++)
494 :     if ((strcasecmp(stocks[i]->getName(), stocks[j]->getName()) == 0) && (i != j))
495 :     handle.logMessage(LOGFAIL, "Error in catchdistribution - repeated stock", stocks[i]->getName());
496 :    
497 :     //check fleet areas and stock areas, ages and lengths
498 :     if (handle.getLogLevel() >= LOGWARN) {
499 :     for (j = 0; j < areas.Nrow(); j++) {
500 :     found = 0;
501 :     for (i = 0; i < fleets.Size(); i++)
502 :     for (k = 0; k < areas.Ncol(j); k++)
503 :     if (fleets[i]->isInArea(areas[j][k]))
504 :     found++;
505 :     if (found == 0)
506 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - fleet not defined on all areas");
507 :     }
508 :    
509 :     for (j = 0; j < areas.Nrow(); j++) {
510 :     found = 0;
511 :     for (i = 0; i < stocks.Size(); i++)
512 :     for (k = 0; k < areas.Ncol(j); k++)
513 :     if (stocks[i]->isInArea(areas[j][k]))
514 :     found++;
515 :     if (found == 0)
516 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - stock not defined on all areas");
517 :     }
518 :    
519 :     minage = 9999;
520 :     maxage = 0;
521 :     for (i = 0; i < ages.Nrow(); i++) {
522 :     for (j = 0; j < ages.Ncol(i); j++) {
523 :     minage = min(ages[i][j], minage);
524 :     maxage = max(ages[i][j], maxage);
525 :     }
526 :     }
527 :    
528 :     found = 0;
529 :     for (i = 0; i < stocks.Size(); i++)
530 :     if (minage >= stocks[i]->minAge())
531 :     found++;
532 :     if (found == 0)
533 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - minimum age less than stock age");
534 :    
535 :     found = 0;
536 :     for (i = 0; i < stocks.Size(); i++)
537 :     if (maxage <= stocks[i]->maxAge())
538 :     found++;
539 :     if (found == 0)
540 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - maximum age greater than stock age");
541 :    
542 :     found = 0;
543 :     for (i = 0; i < stocks.Size(); i++)
544 :     if (LgrpDiv->maxLength(0) > stocks[i]->getLengthGroupDiv()->minLength())
545 :     found++;
546 :     if (found == 0)
547 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - minimum length group less than stock length");
548 :    
549 :     found = 0;
550 :     for (i = 0; i < stocks.Size(); i++)
551 :     if (LgrpDiv->minLength(LgrpDiv->numLengthGroups()) < stocks[i]->getLengthGroupDiv()->maxLength())
552 :     found++;
553 :     if (found == 0)
554 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - maximum length group greater than stock length");
555 :     }
556 :    
557 :     aggregator = new FleetPreyAggregator(fleets, stocks, LgrpDiv, areas, ages, overconsumption);
558 :     }
559 :    
560 :     void CatchDistribution::addLikelihood(const TimeClass* const TimeInfo) {
561 :    
562 :     if ((!(AAT.atCurrentTime(TimeInfo))) || (isZero(weight)))
563 :     return;
564 :    
565 :     if ((handle.getLogLevel() >= LOGMESSAGE) && ((!yearly) || (TimeInfo->getStep() == TimeInfo->numSteps())))
566 :     handle.logMessage(LOGMESSAGE, "Calculating likelihood score for catchdistribution component", this->getName());
567 :    
568 :     int i;
569 :     timeindex = -1;
570 :     for (i = 0; i < Years.Size(); i++)
571 :     if ((Years[i] == TimeInfo->getYear()) && (Steps[i] == TimeInfo->getStep()))
572 :     timeindex = i;
573 :     if (timeindex == -1)
574 :     handle.logMessage(LOGFAIL, "Error in catchdistribution - invalid timestep");
575 :    
576 :     aggregator->Sum();
577 :     if ((handle.getLogLevel() >= LOGWARN) && (aggregator->checkCatchData()))
578 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - zero catch found");
579 :     alptr = &aggregator->getSum();
580 :    
581 :     double l = 0.0;
582 :     switch (functionnumber) {
583 :     case 1:
584 :     l = calcLikMultinomial();
585 :     break;
586 :     case 2:
587 :     l = calcLikPearson(TimeInfo);
588 :     break;
589 :     case 3:
590 :     l = calcLikGamma(TimeInfo);
591 :     break;
592 :     case 4:
593 :     l = calcLikSumSquares(TimeInfo);
594 :     break;
595 :     case 5:
596 :     l = calcLikMVNormal();
597 :     break;
598 :     case 6:
599 :     l = calcLikMVLogistic();
600 :     break;
601 :     case 7:
602 :     l = calcLikLog(TimeInfo);
603 :     break;
604 :     case 8:
605 :     l = calcLikStratified(TimeInfo);
606 :     break;
607 :     default:
608 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - unrecognised function", functionname);
609 :     break;
610 :     }
611 :    
612 :     if ((!yearly) || (TimeInfo->getStep() == TimeInfo->numSteps())) {
613 :     likelihood += l;
614 :     if (handle.getLogLevel() >= LOGMESSAGE)
615 :     handle.logMessage(LOGMESSAGE, "The likelihood score for this component on this timestep is", l);
616 :     }
617 :     }
618 :    
619 :     double CatchDistribution::calcLikMultinomial() {
620 :     int area, age, len;
621 :     int numage = ages.Nrow();
622 :     int numlen = LgrpDiv->numLengthGroups();
623 :     DoubleVector dist(numage, 0.0);
624 :     DoubleVector data(numage, 0.0);
625 :    
626 :     MN.Reset();
627 :     //the object MN does most of the work, accumulating likelihood
628 :     for (area = 0; area < areas.Nrow(); area++) {
629 :     likelihoodValues[timeindex][area] = 0.0;
630 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++)
631 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++)
632 :     (*modelDistribution[timeindex][area])[age][len] = ((*alptr)[area][age][len]).N;
633 :    
634 :     if (numage == 1) {
635 :     //only one age-group, so calculate multinomial based on length distribution
636 :     likelihoodValues[timeindex][area] +=
637 :     MN.calcLogLikelihood((*obsDistribution[timeindex][area])[0],
638 :     (*modelDistribution[timeindex][area])[0]);
639 :    
640 :     } else {
641 :     //many age-groups, so calculate multinomial based on age distribution per length group
642 :     for (len = 0; len < numlen; len++) {
643 :     for (age = 0; age < numage; age++) {
644 :     dist[age] = (*modelDistribution[timeindex][area])[age][len];
645 :     data[age] = (*obsDistribution[timeindex][area])[age][len];
646 :     }
647 :     likelihoodValues[timeindex][area] += MN.calcLogLikelihood(data, dist);
648 :     }
649 :     }
650 :     }
651 :     return MN.getLogLikelihood();
652 :     }
653 :    
654 :     double CatchDistribution::calcLikPearson(const TimeClass* const TimeInfo) {
655 :     /* written by Hoskuldur Bjornsson 29/8 98
656 :     * corrected by kgf 16/9 98
657 :     * modified by kgf 11/11 98 to make it possible to sum up catches
658 :     * and calculated catches on year basis.
659 :     * Modified 3/5 99 by kgf to check the age intervals of the stock
660 :     * and the catch data, and make use of the ages that are common
661 :     * for the stock and the catch data.*/
662 :    
663 :     double totallikelihood = 0.0;
664 :     int age, len, area;
665 :    
666 :     for (area = 0; area < areas.Nrow(); area++) {
667 :     likelihoodValues[timeindex][area] = 0.0;
668 :    
669 :     //JMB - changed to remove the need to store minrow and mincol stuff ...
670 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++)
671 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++)
672 :     (*modelDistribution[timeindex][area])[age][len] = (*alptr)[area][age][len].N;
673 :    
674 :     if (!yearly) { //calculate likelihood on all steps
675 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
676 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
677 :     likelihoodValues[timeindex][area] +=
678 :     ((*modelDistribution[timeindex][area])[age][len] -
679 :     (*obsDistribution[timeindex][area])[age][len]) *
680 :     ((*modelDistribution[timeindex][area])[age][len] -
681 :     (*obsDistribution[timeindex][area])[age][len]) /
682 :     fabs(((*modelDistribution[timeindex][area])[age][len] + epsilon));
683 :     }
684 :     }
685 :     totallikelihood += likelihoodValues[timeindex][area];
686 :    
687 :     } else { //calculate likelihood on year basis
688 :    
689 :     if (TimeInfo->getStep() == 1) { //start of a new year
690 :     (*modelYearData[area]).setToZero();
691 :     (*obsYearData[area]).setToZero();
692 :     }
693 :    
694 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
695 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
696 :     (*modelYearData[area])[age][len] += (*modelDistribution[timeindex][area])[age][len];
697 :     (*obsYearData[area])[age][len] += (*obsDistribution[timeindex][area])[age][len];
698 :     }
699 :     }
700 :    
701 :     if (TimeInfo->getStep() < TimeInfo->numSteps())
702 :     likelihoodValues[timeindex][area] = 0.0;
703 :     else { //last step in year, so need to calc likelihood contribution
704 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
705 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
706 :     likelihoodValues[timeindex][area] +=
707 :     ((*modelYearData[area])[age][len] - (*obsYearData[area])[age][len]) *
708 :     ((*modelYearData[area])[age][len] - (*obsYearData[area])[age][len]) /
709 :     fabs(((*modelYearData[area])[age][len] + epsilon));
710 :     }
711 :     }
712 :     totallikelihood += likelihoodValues[timeindex][area];
713 :     }
714 :     }
715 :     }
716 :     return totallikelihood;
717 :     }
718 :    
719 :     double CatchDistribution::calcLikGamma(const TimeClass* const TimeInfo) {
720 :     //written kgf 24/5 00
721 :     //Formula by Hans J Skaug, 15/3 00 No weighting at present.
722 :     //This function is scale independent.
723 :    
724 :     double totallikelihood = 0.0;
725 :     int age, len, area;
726 :    
727 :     for (area = 0; area < areas.Nrow(); area++) {
728 :     likelihoodValues[timeindex][area] = 0.0;
729 :    
730 :     //JMB - changed to remove the need to store minrow and mincol stuff ...
731 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++)
732 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++)
733 :     (*modelDistribution[timeindex][area])[age][len] = (*alptr)[area][age][len].N;
734 :    
735 :     if (!yearly) { //calculate likelihood on all steps
736 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
737 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
738 :     likelihoodValues[timeindex][area] +=
739 :     (*obsDistribution[timeindex][area])[age][len] /
740 :     ((*modelDistribution[timeindex][area])[age][len] + epsilon) +
741 :     log((*modelDistribution[timeindex][area])[age][len] + epsilon);
742 :     }
743 :     }
744 :     totallikelihood += likelihoodValues[timeindex][area];
745 :    
746 :     } else { //calculate likelihood on year basis
747 :    
748 :     if (TimeInfo->getStep() == 1) { //start of a new year
749 :     (*modelYearData[area]).setToZero();
750 :     (*obsYearData[area]).setToZero();
751 :     }
752 :    
753 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
754 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
755 :     (*modelYearData[area])[age][len] += (*modelDistribution[timeindex][area])[age][len];
756 :     (*obsYearData[area])[age][len] += (*obsDistribution[timeindex][area])[age][len];
757 :     }
758 :     }
759 :    
760 :     if (TimeInfo->getStep() < TimeInfo->numSteps())
761 :     likelihoodValues[timeindex][area] = 0.0;
762 :     else { //last step in year, so need to calc likelihood contribution
763 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
764 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
765 :     likelihoodValues[timeindex][area] +=
766 :     (*obsYearData[area])[age][len] / ((*modelYearData[area])[age][len] + epsilon) +
767 :     log((*modelYearData[area])[age][len] + epsilon);
768 :     }
769 :     }
770 :     totallikelihood += likelihoodValues[timeindex][area];
771 :     }
772 :     }
773 :     }
774 :     return totallikelihood;
775 :     }
776 :    
777 :     double CatchDistribution::calcLikLog(const TimeClass* const TimeInfo) {
778 :     //written by kgf 23/11 98 to get a better scaling of the stocks.
779 :     //modified by kgf 27/11 98 to sum first and then take the logarithm
780 :    
781 :     double totallikelihood = 0.0;
782 :     int area, age, len;
783 :     double totalmodel, totaldata, ratio;
784 :    
785 :     for (area = 0; area < areas.Nrow(); area++) {
786 :     likelihoodValues[timeindex][area] = 0.0;
787 :     totalmodel = 0.0;
788 :     totaldata = 0.0;
789 :    
790 :     //JMB - changed to remove the need to store minrow and mincol stuff ...
791 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++)
792 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++)
793 :     (*modelDistribution[timeindex][area])[age][len] = (*alptr)[area][age][len].N;
794 :    
795 :     if (!yearly) { //calculate likelihood on all steps
796 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
797 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
798 :     totalmodel += (*modelDistribution[timeindex][area])[age][len];
799 :     totaldata += (*obsDistribution[timeindex][area])[age][len];
800 :     }
801 :     }
802 :     ratio = log(totaldata / totalmodel);
803 :     likelihoodValues[timeindex][area] += (ratio * ratio);
804 :    
805 :     } else { //calculate likelihood on year basis
806 :    
807 :     if (TimeInfo->getStep() == 1) { //start of a new year
808 :     (*modelYearData[area]).setToZero();
809 :     (*obsYearData[area]).setToZero();
810 :     }
811 :    
812 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
813 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
814 :     (*modelYearData[area])[age][len] += (*modelDistribution[timeindex][area])[age][len];
815 :     (*obsYearData[area])[age][len] += (*obsDistribution[timeindex][area])[age][len];
816 :     }
817 :     }
818 :    
819 :     if (TimeInfo->getStep() < TimeInfo->numSteps())
820 :     likelihoodValues[timeindex][area] = 0.0;
821 :     else { //last step in year, so need to calculate likelihood contribution
822 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
823 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
824 :     totalmodel += (*modelYearData[area])[age][len];
825 :     totaldata += (*obsYearData[area])[age][len];
826 :     }
827 :     }
828 :     ratio = log(totaldata / totalmodel);
829 :     likelihoodValues[timeindex][area] += (ratio * ratio);
830 :     }
831 :     }
832 :     totallikelihood += likelihoodValues[timeindex][area];
833 :     }
834 :     return totallikelihood;
835 :     }
836 :    
837 :     double CatchDistribution::calcLikSumSquares(const TimeClass* const TimeInfo) {
838 :    
839 :     double temp, totallikelihood, totalmodel, totaldata;
840 :     int age, len, area;
841 :    
842 :     totallikelihood = 0.0;
843 :     for (area = 0; area < areas.Nrow(); area++) {
844 :     likelihoodValues[timeindex][area] = 0.0;
845 :    
846 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++)
847 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++)
848 :     (*modelDistribution[timeindex][area])[age][len] = ((*alptr)[area][age][len]).N;
849 :    
850 :     totalmodel = 0.0;
851 :     totaldata = 0.0;
852 :     if (!yearly) { //calculate likelihood on all steps
853 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
854 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
855 :     totalmodel += (*modelDistribution[timeindex][area])[age][len];
856 :     totaldata += (*obsDistribution[timeindex][area])[age][len];
857 :     }
858 :     }
859 :    
860 :     if (!(isZero(totalmodel)))
861 :     totalmodel = 1.0 / totalmodel;
862 :     if (!(isZero(totaldata)))
863 :     totaldata = 1.0 / totaldata;
864 :    
865 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
866 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
867 :     temp = (((*obsDistribution[timeindex][area])[age][len] * totaldata)
868 :     - ((*modelDistribution[timeindex][area])[age][len] * totalmodel));
869 :     likelihoodValues[timeindex][area] += (temp * temp);
870 :     }
871 :     }
872 :     totallikelihood += likelihoodValues[timeindex][area];
873 :    
874 :     } else { //calculate likelihood on year basis
875 :    
876 :     if (TimeInfo->getStep() == 1) { //start of a new year
877 :     (*modelYearData[area]).setToZero();
878 :     (*obsYearData[area]).setToZero();
879 :     }
880 :    
881 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
882 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
883 :     (*modelYearData[area])[age][len] += (*modelDistribution[timeindex][area])[age][len];
884 :     (*obsYearData[area])[age][len] += (*obsDistribution[timeindex][area])[age][len];
885 :     }
886 :     }
887 :    
888 :     if (TimeInfo->getStep() < TimeInfo->numSteps())
889 :     likelihoodValues[timeindex][area] = 0.0;
890 :     else { //last step in year, so need to calculate likelihood contribution
891 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
892 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
893 :     totalmodel += (*modelYearData[area])[age][len];
894 :     totaldata += (*obsYearData[area])[age][len];
895 :     }
896 :     }
897 :    
898 :     if (!(isZero(totalmodel)))
899 :     totalmodel = 1.0 / totalmodel;
900 :     if (!(isZero(totaldata)))
901 :     totaldata = 1.0 / totaldata;
902 :    
903 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
904 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
905 :     temp = (((*obsYearData[area])[age][len] * totaldata)
906 :     - ((*modelYearData[area])[age][len] * totalmodel));
907 :     likelihoodValues[timeindex][area] += (temp * temp);
908 :     }
909 :     }
910 :     totallikelihood += likelihoodValues[timeindex][area];
911 :     }
912 :     }
913 :     }
914 :     return totallikelihood;
915 :     }
916 :    
917 :     double CatchDistribution::calcLikStratified(const TimeClass* const TimeInfo) {
918 :    
919 :     int numage = ages.Nrow();
920 :     int numlen = LgrpDiv->numLengthGroups();
921 :     double temp, totallikelihood, totalmodel, totaldata;
922 :     int age, len, area;
923 :    
924 :     totallikelihood = 0.0;
925 :     for (area = 0; area < areas.Nrow(); area++) {
926 :     likelihoodValues[timeindex][area] = 0.0;
927 :    
928 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++)
929 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++)
930 :     (*modelDistribution[timeindex][area])[age][len] = ((*alptr)[area][age][len]).N;
931 :    
932 :     if (!yearly) { //calculate likelihood on all steps
933 :     //calculate an age distribution for each length class
934 :     for (len = 0; len < numlen; len++) {
935 :     totalmodel = 0.0;
936 :     totaldata = 0.0;
937 :     for (age = 0; age < numage; age++) {
938 :     totalmodel += (*modelDistribution[timeindex][area])[age][len];
939 :     totaldata += (*obsDistribution[timeindex][area])[age][len];
940 :     }
941 :    
942 :     if (!(isZero(totalmodel)))
943 :     totalmodel = 1.0 / totalmodel;
944 :     if (!(isZero(totaldata)))
945 :     totaldata = 1.0 / totaldata;
946 :    
947 :     for (age = 0; age < numage; age++) {
948 :     temp = (((*obsDistribution[timeindex][area])[age][len] * totaldata)
949 :     - ((*modelDistribution[timeindex][area])[age][len] * totalmodel));
950 :     likelihoodValues[timeindex][area] += (temp * temp);
951 :     }
952 :     }
953 :     totallikelihood += likelihoodValues[timeindex][area];
954 :    
955 :     } else { //calculate likelihood on year basis
956 :    
957 :     if (TimeInfo->getStep() == 1) { //start of a new year
958 :     (*modelYearData[area]).setToZero();
959 :     (*obsYearData[area]).setToZero();
960 :     }
961 :    
962 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
963 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
964 :     (*modelYearData[area])[age][len] += (*modelDistribution[timeindex][area])[age][len];
965 :     (*obsYearData[area])[age][len] += (*obsDistribution[timeindex][area])[age][len];
966 :     }
967 :     }
968 :    
969 :     if (TimeInfo->getStep() < TimeInfo->numSteps())
970 :     likelihoodValues[timeindex][area] = 0.0;
971 :     else { //last step in year, so need to calculate likelihood contribution
972 :     //calculate an age distribution for each length class
973 :     for (len = 0; len < numlen; len++) {
974 :     totalmodel = 0.0;
975 :     totaldata = 0.0;
976 :     for (age = 0; age < numage; age++) {
977 :     totalmodel += (*modelYearData[area])[age][len];
978 :     totaldata += (*obsYearData[area])[age][len];
979 :     }
980 :    
981 :     if (!(isZero(totalmodel)))
982 :     totalmodel = 1.0 / totalmodel;
983 :     if (!(isZero(totaldata)))
984 :     totaldata = 1.0 / totaldata;
985 :    
986 :     for (age = 0; age < numage; age++) {
987 :     temp = (((*obsYearData[area])[age][len] * totaldata)
988 :     - ((*modelYearData[area])[age][len] * totalmodel));
989 :     likelihoodValues[timeindex][area] += (temp * temp);
990 :     }
991 :     }
992 :     totallikelihood += likelihoodValues[timeindex][area];
993 :     }
994 :     }
995 :     }
996 :     return totallikelihood;
997 :     }
998 :    
999 :     void CatchDistribution::calcCorrelation() {
1000 :     int i, j, l, p;
1001 :     p = LgrpDiv->numLengthGroups();
1002 :     DoubleMatrix correlation(p, p, 0.0);
1003 :    
1004 :     for (i = 0; i < lag; i++)
1005 :     if (fabs(params[i] - 1.0) > 1.0)
1006 :     illegal = 1;
1007 :    
1008 :     if (!illegal) {
1009 :     for (i = 0; i < p; i++) {
1010 :     for (j = 0; j <= i; j++) {
1011 :     for (l = 1; l <= lag; l++) {
1012 :     if ((i - l) >= 0) {
1013 :     correlation[i][j] += (params[l - 1] - 1.0) * correlation[i - l][j];
1014 :     correlation[j][i] += (params[l - 1] - 1.0) * correlation[i - l][j];
1015 :     }
1016 :     }
1017 :     }
1018 :     correlation[i][i] += sigma * sigma;
1019 :     }
1020 :     LU = LUDecomposition(correlation);
1021 :     }
1022 :     }
1023 :    
1024 :    
1025 :     double CatchDistribution::calcLikMVNormal() {
1026 :    
1027 :     double totallikelihood = 0.0;
1028 :     double sumdata, sumdist;
1029 :     int age, len, area;
1030 :    
1031 :     if ((illegal) || (LU.isIllegal()) || isZero(sigma))
1032 :     return verybig;
1033 :    
1034 :     DoubleVector diff(LgrpDiv->numLengthGroups(), 0.0);
1035 :     for (area = 0; area < areas.Nrow(); area++) {
1036 :     sumdata = 0.0;
1037 :     sumdist = 0.0;
1038 :     likelihoodValues[timeindex][area] = 0.0;
1039 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
1040 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
1041 :     (*modelDistribution[timeindex][area])[age][len] = ((*alptr)[area][age][len]).N;
1042 :     sumdata += (*obsDistribution[timeindex][area])[age][len];
1043 :     sumdist += (*modelDistribution[timeindex][area])[age][len];
1044 :     }
1045 :     }
1046 :    
1047 :     if (isZero(sumdata))
1048 :     sumdata = verybig;
1049 :     else
1050 :     sumdata = 1.0 / sumdata;
1051 :     if (isZero(sumdist))
1052 :     sumdist = verybig;
1053 :     else
1054 :     sumdist = 1.0 / sumdist;
1055 :    
1056 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
1057 :     diff.setToZero();
1058 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++)
1059 :     diff[len] = ((*obsDistribution[timeindex][area])[age][len] * sumdata)
1060 :     - ((*modelDistribution[timeindex][area])[age][len] * sumdist);
1061 :    
1062 :     likelihoodValues[timeindex][area] += diff * LU.Solve(diff);
1063 :     }
1064 :     totallikelihood += likelihoodValues[timeindex][area];
1065 :     }
1066 :    
1067 :     totallikelihood += LU.getLogDet() * alptr->Size();
1068 :     return totallikelihood;
1069 :     }
1070 :    
1071 :     double CatchDistribution::calcLikMVLogistic() {
1072 :    
1073 :     double totallikelihood = 0.0;
1074 :     double sumdata = 0.0, sumdist = 0.0, sumnu = 0.0;
1075 :     int age, len, area, p;
1076 :    
1077 :     p = LgrpDiv->numLengthGroups();
1078 :     DoubleVector nu(p, 0.0);
1079 :    
1080 :     for (area = 0; area < areas.Nrow(); area++) {
1081 :     likelihoodValues[timeindex][area] = 0.0;
1082 :     for (age = (*alptr)[area].minAge(); age <= (*alptr)[area].maxAge(); age++) {
1083 :     sumdata = 0.0;
1084 :     sumdist = 0.0;
1085 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
1086 :     (*modelDistribution[timeindex][area])[age][len] = ((*alptr)[area][age][len]).N;
1087 :     sumdata += (*obsDistribution[timeindex][area])[age][len];
1088 :     sumdist += (*modelDistribution[timeindex][area])[age][len];
1089 :     }
1090 :    
1091 :     if (isZero(sumdata))
1092 :     sumdata = verybig;
1093 :     else
1094 :     sumdata = 1.0 / sumdata;
1095 :     if (isZero(sumdist))
1096 :     sumdist = verybig;
1097 :     else
1098 :     sumdist = 1.0 / sumdist;
1099 :    
1100 :     sumnu = 0.0;
1101 :     nu.setToZero();
1102 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++) {
1103 :     nu[len] = log(((*obsDistribution[timeindex][area])[age][len] * sumdata) + verysmall)
1104 :     - log(((*modelDistribution[timeindex][area])[age][len] * sumdist) + verysmall);
1105 :    
1106 :     sumnu += nu[len];
1107 :     }
1108 :     sumnu = sumnu / p;
1109 :    
1110 :     for (len = (*alptr)[area].minLength(age); len < (*alptr)[area].maxLength(age); len++)
1111 :     likelihoodValues[timeindex][area] += (nu[len] - sumnu) * (nu[len] - sumnu);
1112 :     }
1113 :     totallikelihood += likelihoodValues[timeindex][area];
1114 :     }
1115 :    
1116 :     if (isZero(sigma)) {
1117 :     handle.logMessage(LOGWARN, "Warning in catchdistribution - multivariate logistic sigma is zero");
1118 :     return verybig;
1119 :     }
1120 :    
1121 :     totallikelihood = (totallikelihood / (sigma * sigma)) + (log(sigma) * (p - 1));
1122 :     return totallikelihood;
1123 :     }
1124 :    
1125 :     void CatchDistribution::printSummary(ofstream& outfile) {
1126 :     int year, area;
1127 :    
1128 :     for (year = 0; year < likelihoodValues.Nrow(); year++) {
1129 :     for (area = 0; area < likelihoodValues.Ncol(year); area++) {
1130 :     if (!yearly) {
1131 :     outfile << setw(lowwidth) << Years[year] << sep << setw(lowwidth)
1132 :     << Steps[year] << sep << setw(printwidth) << areaindex[area] << sep
1133 :     << setw(largewidth) << this->getName() << sep << setw(smallwidth) << weight
1134 :     << sep << setprecision(largeprecision) << setw(largewidth)
1135 :     << likelihoodValues[year][area] << endl;
1136 :     } else {
1137 :     if (isZero(likelihoodValues[year][area])) {
1138 :     // assume that this isnt the last step for that year and ignore
1139 :     } else {
1140 :     outfile << setw(lowwidth) << Years[year] << " all "
1141 :     << setw(printwidth) << areaindex[area] << sep
1142 :     << setw(largewidth) << this->getName() << sep << setprecision(smallprecision)
1143 :     << setw(smallwidth) << weight << sep << setprecision(largeprecision)
1144 :     << setw(largewidth) << likelihoodValues[year][area] << endl;
1145 :     }
1146 :     }
1147 :     }
1148 :     }
1149 :     outfile.flush();
1150 :     }

root@forge.cesga.es
ViewVC Help
Powered by ViewVC 1.0.0  

Powered By FusionForge