1 : |
agomez |
1 |
#include "areatime.h" |
2 : |
|
|
#include "recapture.h" |
3 : |
|
|
#include "fleet.h" |
4 : |
|
|
#include "stock.h" |
5 : |
|
|
#include "stockprey.h" |
6 : |
|
|
#include "errorhandler.h" |
7 : |
|
|
#include "mathfunc.h" |
8 : |
|
|
#include "readfunc.h" |
9 : |
|
|
#include "readword.h" |
10 : |
|
|
#include "readaggregation.h" |
11 : |
|
|
#include "gadget.h" |
12 : |
|
|
#include "global.h" |
13 : |
|
|
|
14 : |
|
|
Recaptures::Recaptures(CommentStream& infile, const AreaClass* const Area,
|
15 : |
|
|
const TimeClass* const TimeInfo, double weight, TagPtrVector Tag, const char* name)
|
16 : |
|
|
: Likelihood(TAGLIKELIHOOD, weight, name), alptr(0) {
|
17 : |
|
|
|
18 : |
|
|
aggregator = 0;
|
19 : |
|
|
char text[MaxStrLength];
|
20 : |
|
|
strncpy(text, "", MaxStrLength);
|
21 : |
|
|
int i, j, check;
|
22 : |
|
|
int numarea = 0, numlen = 0;
|
23 : |
|
|
|
24 : |
|
|
char datafilename[MaxStrLength];
|
25 : |
|
|
char aggfilename[MaxStrLength];
|
26 : |
|
|
strncpy(datafilename, "", MaxStrLength);
|
27 : |
|
|
strncpy(aggfilename, "", MaxStrLength);
|
28 : |
|
|
ifstream datafile;
|
29 : |
|
|
CommentStream subdata(datafile);
|
30 : |
|
|
|
31 : |
|
|
functionname = new char[MaxStrLength];
|
32 : |
|
|
strncpy(functionname, "", MaxStrLength);
|
33 : |
|
|
|
34 : |
|
|
readWordAndValue(infile, "datafile", datafilename);
|
35 : |
|
|
readWordAndValue(infile, "function", functionname);
|
36 : |
|
|
if (strcasecmp(functionname, "poisson") == 0)
|
37 : |
|
|
functionnumber = 1;
|
38 : |
|
|
else
|
39 : |
|
|
handle.logFileMessage(LOGFAIL, "\nError in recaptures - unrecognised function", functionname);
|
40 : |
|
|
|
41 : |
|
|
//read in area aggregation from file
|
42 : |
|
|
readWordAndValue(infile, "areaaggfile", aggfilename);
|
43 : |
|
|
datafile.open(aggfilename, ios::in);
|
44 : |
|
|
handle.checkIfFailure(datafile, aggfilename);
|
45 : |
|
|
handle.Open(aggfilename);
|
46 : |
|
|
numarea = readAggregation(subdata, areas, areaindex);
|
47 : |
|
|
handle.Close();
|
48 : |
|
|
datafile.close();
|
49 : |
|
|
datafile.clear();
|
50 : |
|
|
|
51 : |
|
|
//Must change from outer areas to inner areas.
|
52 : |
|
|
for (i = 0; i < areas.Nrow(); i++)
|
53 : |
|
|
for (j = 0; j < areas.Ncol(i); j++)
|
54 : |
|
|
areas[i][j] = Area->getInnerArea(areas[i][j]);
|
55 : |
|
|
|
56 : |
|
|
//read in length aggregation from file
|
57 : |
|
|
readWordAndValue(infile, "lenaggfile", aggfilename);
|
58 : |
|
|
datafile.open(aggfilename, ios::in);
|
59 : |
|
|
handle.checkIfFailure(datafile, aggfilename);
|
60 : |
|
|
handle.Open(aggfilename);
|
61 : |
|
|
numlen = readLengthAggregation(subdata, lengths, lenindex);
|
62 : |
|
|
handle.Close();
|
63 : |
|
|
datafile.close();
|
64 : |
|
|
datafile.clear();
|
65 : |
|
|
|
66 : |
|
|
//read in the fleetnames
|
67 : |
|
|
i = 0;
|
68 : |
|
|
infile >> text >> ws;
|
69 : |
|
|
if (strcasecmp(text, "fleetnames") != 0)
|
70 : |
|
|
handle.logFileUnexpected(LOGFAIL, "fleetnames", text);
|
71 : |
|
|
infile >> text;
|
72 : |
|
|
while (!infile.eof() && (strcasecmp(text, "[component]") != 0)) {
|
73 : |
|
|
fleetnames.resize(new char[strlen(text) + 1]);
|
74 : |
|
|
strcpy(fleetnames[i++], text);
|
75 : |
|
|
infile >> text >> ws;
|
76 : |
|
|
}
|
77 : |
|
|
if (fleetnames.Size() == 0)
|
78 : |
|
|
handle.logFileMessage(LOGFAIL, "\nError in recaptures - failed to read fleets");
|
79 : |
|
|
handle.logMessage(LOGMESSAGE, "Read fleet data - number of fleets", fleetnames.Size());
|
80 : |
|
|
|
81 : |
|
|
//We have now read in all the data from the main likelihood file
|
82 : |
|
|
//But we have to read in the recapture data from datafilename
|
83 : |
|
|
datafile.open(datafilename, ios::in);
|
84 : |
|
|
handle.checkIfFailure(datafile, datafilename);
|
85 : |
|
|
handle.Open(datafilename);
|
86 : |
|
|
readRecaptureData(subdata, TimeInfo, numarea, numlen);
|
87 : |
|
|
handle.Close();
|
88 : |
|
|
datafile.close();
|
89 : |
|
|
datafile.clear();
|
90 : |
|
|
|
91 : |
|
|
for (j = 0; j < tagnames.Size(); j++) {
|
92 : |
|
|
check = 0;
|
93 : |
|
|
for (i = 0; i < Tag.Size(); i++) {
|
94 : |
|
|
if (strcasecmp(tagnames[j], Tag[i]->getName()) == 0) {
|
95 : |
|
|
check++;
|
96 : |
|
|
tagvec.resize(Tag[i]);
|
97 : |
|
|
//likelihoodValues.AddRows(1, Tag[i]->getNumTagTimeSteps(), 0.0);
|
98 : |
|
|
}
|
99 : |
|
|
}
|
100 : |
|
|
if (check == 0)
|
101 : |
|
|
handle.logMessage(LOGFAIL, "Error in recaptures - failed to match tag", tagnames[j]);
|
102 : |
|
|
}
|
103 : |
|
|
}
|
104 : |
|
|
|
105 : |
|
|
void Recaptures::readRecaptureData(CommentStream& infile,
|
106 : |
|
|
const TimeClass* const TimeInfo, int numarea, int numlen) {
|
107 : |
|
|
|
108 : |
|
|
int i, j, k;
|
109 : |
|
|
double tmpnumber;
|
110 : |
|
|
char tmparea[MaxStrLength], tmplength[MaxStrLength], tmptagid[MaxStrLength];
|
111 : |
|
|
strncpy(tmparea, "", MaxStrLength);
|
112 : |
|
|
strncpy(tmplength, "", MaxStrLength);
|
113 : |
|
|
strncpy(tmptagid, "", MaxStrLength);
|
114 : |
|
|
int keepdata, timeid, areaid, lenid, tid;
|
115 : |
|
|
int year, step, count, reject;
|
116 : |
|
|
char* tagName;
|
117 : |
|
|
|
118 : |
|
|
infile >> ws;
|
119 : |
|
|
//Check the number of columns in the inputfile
|
120 : |
|
|
if (countColumns(infile) != 6)
|
121 : |
|
|
handle.logFileMessage(LOGFAIL, "wrong number of columns in inputfile - should be 6");
|
122 : |
|
|
|
123 : |
|
|
year = step = count = reject = 0;
|
124 : |
|
|
while (!infile.eof()) {
|
125 : |
|
|
keepdata = 1;
|
126 : |
|
|
infile >> tmptagid >> year >> step >> tmparea >> tmplength >> tmpnumber >> ws;
|
127 : |
|
|
|
128 : |
|
|
//crude check to see if something has gone wrong and avoid infinite loops
|
129 : |
|
|
if (strlen(tmparea) == 0)
|
130 : |
|
|
handle.logFileMessage(LOGFAIL, "failed to read data from file");
|
131 : |
|
|
|
132 : |
|
|
//if tmparea is in areaindex find areaid, else dont keep the data
|
133 : |
|
|
areaid = -1;
|
134 : |
|
|
for (i = 0; i < areaindex.Size(); i++)
|
135 : |
|
|
if (strcasecmp(areaindex[i], tmparea) == 0)
|
136 : |
|
|
areaid = i;
|
137 : |
|
|
|
138 : |
|
|
if (areaid == -1)
|
139 : |
|
|
keepdata = 0;
|
140 : |
|
|
|
141 : |
|
|
//if tmplength is in lenindex find lenid, else dont keep the data
|
142 : |
|
|
lenid = -1;
|
143 : |
|
|
for (i = 0; i < lenindex.Size(); i++)
|
144 : |
|
|
if (strcasecmp(lenindex[i], tmplength) == 0)
|
145 : |
|
|
lenid = i;
|
146 : |
|
|
|
147 : |
|
|
if (lenid == -1)
|
148 : |
|
|
keepdata = 0;
|
149 : |
|
|
|
150 : |
|
|
timeid = -1;
|
151 : |
|
|
tid = -1;
|
152 : |
|
|
if ((TimeInfo->isWithinPeriod(year, step)) && (keepdata == 1)) {
|
153 : |
|
|
//if this is a new tagging exp. then resize
|
154 : |
|
|
for (i = 0; i < tagnames.Size(); i++)
|
155 : |
|
|
if (strcasecmp(tagnames[i], tmptagid) == 0)
|
156 : |
|
|
tid = i;
|
157 : |
|
|
|
158 : |
|
|
if (tid == -1) {
|
159 : |
|
|
//if this is a new tagging experiment, resize to store the data
|
160 : |
|
|
tagName = new char[strlen(tmptagid) + 1];
|
161 : |
|
|
strcpy(tagName, tmptagid);
|
162 : |
|
|
tagnames.resize(tagName);
|
163 : |
|
|
tid = tagnames.Size() - 1;
|
164 : |
|
|
obsYears.AddRows(1, 1, year);
|
165 : |
|
|
obsSteps.AddRows(1, 1, step);
|
166 : |
|
|
timeid = 0;
|
167 : |
|
|
obsDistribution.resize();
|
168 : |
|
|
obsDistribution[tid].resize(new DoubleMatrix(numarea, numlen, 0.0));
|
169 : |
|
|
modelDistribution.resize();
|
170 : |
|
|
modelDistribution[tid].resize(new DoubleMatrix(numarea, numlen, 0.0));
|
171 : |
|
|
//JMB - add objects to allow for timesteps when no recpatures are found
|
172 : |
|
|
modYears.AddRows(1, 0, 0);
|
173 : |
|
|
modSteps.AddRows(1, 0, 0);
|
174 : |
|
|
newDistribution.resize();
|
175 : |
|
|
|
176 : |
|
|
} else {
|
177 : |
|
|
for (i = 0; i < obsYears[tid].Size(); i++)
|
178 : |
|
|
if ((obsYears[tid][i] == year) && (obsSteps[tid][i] == step))
|
179 : |
|
|
timeid = i;
|
180 : |
|
|
|
181 : |
|
|
//if this is a new timestep, resize to store the data
|
182 : |
|
|
if (timeid == -1) {
|
183 : |
|
|
obsYears[tid].resize(1, year);
|
184 : |
|
|
obsSteps[tid].resize(1, step);
|
185 : |
|
|
timeid = obsYears.Ncol(tid) - 1;
|
186 : |
|
|
obsDistribution[tid].resize(new DoubleMatrix(numarea, numlen, 0.0));
|
187 : |
|
|
modelDistribution[tid].resize(new DoubleMatrix(numarea, numlen, 0.0));
|
188 : |
|
|
}
|
189 : |
|
|
}
|
190 : |
|
|
|
191 : |
|
|
//finally store the number of the obsDistribution
|
192 : |
|
|
count++;
|
193 : |
|
|
(*obsDistribution[tid][timeid])[areaid][lenid] = tmpnumber;
|
194 : |
|
|
|
195 : |
|
|
} else
|
196 : |
|
|
reject++; //count number of rejected data points read from file
|
197 : |
|
|
}
|
198 : |
|
|
|
199 : |
|
|
if (count == 0)
|
200 : |
|
|
handle.logMessage(LOGWARN, "Warning in recaptures - found no data in the data file for", this->getName());
|
201 : |
|
|
if (reject != 0)
|
202 : |
|
|
handle.logMessage(LOGMESSAGE, "Discarded invalid recaptures data - number of invalid entries", reject);
|
203 : |
|
|
handle.logMessage(LOGMESSAGE, "Read recaptures data file - number of entries", count);
|
204 : |
|
|
}
|
205 : |
|
|
|
206 : |
|
|
Recaptures::~Recaptures() {
|
207 : |
|
|
int i, j;
|
208 : |
|
|
for (i = 0; i < fleetnames.Size(); i++)
|
209 : |
|
|
delete[] fleetnames[i];
|
210 : |
|
|
for (i = 0; i < areaindex.Size(); i++)
|
211 : |
|
|
delete[] areaindex[i];
|
212 : |
|
|
for (i = 0; i < lenindex.Size(); i++)
|
213 : |
|
|
delete[] lenindex[i];
|
214 : |
|
|
for (i = 0; i < tagnames.Size(); i++)
|
215 : |
|
|
delete[] tagnames[i];
|
216 : |
|
|
for (i = 0; i < obsDistribution.Nrow(); i++) {
|
217 : |
|
|
for (j = 0; j < obsDistribution.Ncol(i); j++) {
|
218 : |
|
|
delete obsDistribution[i][j];
|
219 : |
|
|
delete modelDistribution[i][j];
|
220 : |
|
|
}
|
221 : |
|
|
}
|
222 : |
|
|
for (i = 0; i < newDistribution.Nrow(); i++)
|
223 : |
|
|
for (j = 0; j < newDistribution.Ncol(i); j++)
|
224 : |
|
|
delete newDistribution[i][j];
|
225 : |
|
|
if (aggregator != 0) {
|
226 : |
|
|
for (i = 0; i < tagvec.Size(); i++)
|
227 : |
|
|
delete aggregator[i];
|
228 : |
|
|
delete[] aggregator;
|
229 : |
|
|
aggregator = 0;
|
230 : |
|
|
}
|
231 : |
|
|
delete[] functionname;
|
232 : |
|
|
}
|
233 : |
|
|
|
234 : |
|
|
void Recaptures::Reset(const Keeper* const keeper) {
|
235 : |
|
|
int i, j;
|
236 : |
|
|
Likelihood::Reset(keeper);
|
237 : |
|
|
for (i = 0; i < newDistribution.Nrow(); i++)
|
238 : |
|
|
for (j = 0; j < newDistribution.Ncol(i); j++)
|
239 : |
|
|
delete newDistribution[i][j];
|
240 : |
|
|
for (i = 0; i < modelDistribution.Nrow(); i++)
|
241 : |
|
|
for (j = 0; j < modelDistribution.Ncol(i); j++)
|
242 : |
|
|
(*modelDistribution[i][j]).setToZero();
|
243 : |
|
|
if (handle.getLogLevel() >= LOGMESSAGE)
|
244 : |
|
|
handle.logMessage(LOGMESSAGE, "Reset recaptures component", this->getName());
|
245 : |
|
|
}
|
246 : |
|
|
|
247 : |
|
|
void Recaptures::setFleetsAndStocks(FleetPtrVector& Fleets, StockPtrVector& Stocks) {
|
248 : |
|
|
int i, j, k, l, found;
|
249 : |
|
|
FleetPtrVector fleets;
|
250 : |
|
|
StockPtrVector stocks;
|
251 : |
|
|
CharPtrVector stocknames;
|
252 : |
|
|
|
253 : |
|
|
for (i = 0; i < fleetnames.Size(); i++) {
|
254 : |
|
|
found = 0;
|
255 : |
|
|
for (j = 0; j < Fleets.Size(); j++)
|
256 : |
|
|
if (strcasecmp(fleetnames[i], Fleets[j]->getName()) == 0) {
|
257 : |
|
|
found++;
|
258 : |
|
|
fleets.resize(Fleets[j]);
|
259 : |
|
|
}
|
260 : |
|
|
|
261 : |
|
|
if (found == 0)
|
262 : |
|
|
handle.logMessage(LOGFAIL, "Error in recaptures - failed to match fleet", fleetnames[i]);
|
263 : |
|
|
}
|
264 : |
|
|
|
265 : |
|
|
for (i = 0; i < fleets.Size(); i++)
|
266 : |
|
|
for (j = 0; j < fleets.Size(); j++)
|
267 : |
|
|
if ((strcasecmp(fleets[i]->getName(), fleets[j]->getName()) == 0) && (i != j))
|
268 : |
|
|
handle.logMessage(LOGFAIL, "Error in recaptures - repeated fleet", fleets[i]->getName());
|
269 : |
|
|
|
270 : |
|
|
double minlen, maxlen;
|
271 : |
|
|
int minage, maxage, size;
|
272 : |
|
|
|
273 : |
|
|
aggregator = new RecAggregator*[tagvec.Size()];
|
274 : |
|
|
for (k = 0; k < tagvec.Size(); k++) {
|
275 : |
|
|
stocknames = tagvec[k]->getStockNames();
|
276 : |
|
|
for (i = 0; i < stocknames.Size(); i++) {
|
277 : |
|
|
found = 0;
|
278 : |
|
|
for (j = 0; j < Stocks.Size(); j++) {
|
279 : |
|
|
if (Stocks[j]->isEaten()) {
|
280 : |
|
|
if (strcasecmp(stocknames[i], Stocks[j]->getName()) == 0) {
|
281 : |
|
|
found++;
|
282 : |
|
|
stocks.resize(Stocks[j]);
|
283 : |
|
|
}
|
284 : |
|
|
}
|
285 : |
|
|
}
|
286 : |
|
|
if (found == 0)
|
287 : |
|
|
handle.logMessage(LOGFAIL, "Error in recaptures - failed to match stock", stocknames[i]);
|
288 : |
|
|
}
|
289 : |
|
|
|
290 : |
|
|
//Check the stock has been tagged
|
291 : |
|
|
for (j = 0; j < stocks.Size(); j++)
|
292 : |
|
|
if (!stocks[j]->isTagged())
|
293 : |
|
|
handle.logMessage(LOGFAIL, "Error in recaptures - stocks hasnt been tagged", stocks[j]->getName());
|
294 : |
|
|
|
295 : |
|
|
//Check if the stock lives on all the areas that were read in
|
296 : |
|
|
for (i = 0; i < areas.Nrow(); i++)
|
297 : |
|
|
for (l = 0; l < areas.Ncol(i); l++)
|
298 : |
|
|
for (j = 0; j < stocks.Size(); j++)
|
299 : |
|
|
if (!stocks[j]->isInArea(areas[i][l]))
|
300 : |
|
|
handle.logMessage(LOGFAIL, "Error in recaptures - stocks arent defined on all areas");
|
301 : |
|
|
|
302 : |
|
|
IntMatrix agematrix(1, 0, 0);
|
303 : |
|
|
minage = 999;
|
304 : |
|
|
maxage = 0;
|
305 : |
|
|
for (i = 0; i < stocks.Size(); i++) {
|
306 : |
|
|
minage = min(stocks[i]->minAge(), minage);
|
307 : |
|
|
maxage = max(stocks[i]->maxAge(), maxage);
|
308 : |
|
|
}
|
309 : |
|
|
|
310 : |
|
|
for (i = 0; i <= maxage - minage; i++)
|
311 : |
|
|
agematrix[0].resize(1, minage + i);
|
312 : |
|
|
|
313 : |
|
|
LengthGroupDivision* lgrpdiv = new LengthGroupDivision(lengths);
|
314 : |
|
|
if (lgrpdiv->Error())
|
315 : |
|
|
handle.logMessage(LOGFAIL, "Error in recaptures - failed to create length group");
|
316 : |
|
|
aggregator[k] = new RecAggregator(fleets, stocks, lgrpdiv, areas, agematrix, tagvec[k]);
|
317 : |
|
|
|
318 : |
|
|
delete lgrpdiv;
|
319 : |
|
|
stocks.Reset();
|
320 : |
|
|
}
|
321 : |
|
|
}
|
322 : |
|
|
|
323 : |
|
|
void Recaptures::addLikelihood(const TimeClass* const TimeInfo) {
|
324 : |
|
|
double l = 0.0;
|
325 : |
|
|
switch (functionnumber) {
|
326 : |
|
|
case 1:
|
327 : |
|
|
l = calcLikPoisson(TimeInfo);
|
328 : |
|
|
break;
|
329 : |
|
|
default:
|
330 : |
|
|
handle.logMessage(LOGWARN, "Warning in recaptures - unrecognised function", functionname);
|
331 : |
|
|
break;
|
332 : |
|
|
}
|
333 : |
|
|
|
334 : |
|
|
if (!(isZero(l))) {
|
335 : |
|
|
likelihood += l;
|
336 : |
|
|
if (handle.getLogLevel() >= LOGMESSAGE)
|
337 : |
|
|
handle.logMessage(LOGMESSAGE, "The likelihood score for this component on this timestep is", l);
|
338 : |
|
|
}
|
339 : |
|
|
}
|
340 : |
|
|
|
341 : |
|
|
double Recaptures::calcLikPoisson(const TimeClass* const TimeInfo) {
|
342 : |
|
|
double x, n, lik, total;
|
343 : |
|
|
int t, a, ti, len, timeid, checktag, checktime;
|
344 : |
|
|
int year = TimeInfo->getYear();
|
345 : |
|
|
int step = TimeInfo->getStep();
|
346 : |
|
|
|
347 : |
|
|
total = 0.0;
|
348 : |
|
|
for (t = 0; t < tagvec.Size(); t++) {
|
349 : |
|
|
checktag = 0;
|
350 : |
|
|
lik = 0.0;
|
351 : |
|
|
if (tagvec[t]->isWithinPeriod(year, step)) {
|
352 : |
|
|
|
353 : |
|
|
if (!checktag) {
|
354 : |
|
|
if (handle.getLogLevel() >= LOGMESSAGE)
|
355 : |
|
|
handle.logMessage(LOGMESSAGE, "Calculating likelihood score for recaptures component", this->getName());
|
356 : |
|
|
checktag++;
|
357 : |
|
|
}
|
358 : |
|
|
|
359 : |
|
|
aggregator[t]->Sum();
|
360 : |
|
|
alptr = &aggregator[t]->getSum();
|
361 : |
|
|
|
362 : |
|
|
checktime = 0;
|
363 : |
|
|
timeid = -1;
|
364 : |
|
|
for (ti = 0; ti < obsYears.Ncol(t); ti++)
|
365 : |
|
|
if ((obsYears[t][ti] == year) && (obsSteps[t][ti] == step))
|
366 : |
|
|
timeid = ti;
|
367 : |
|
|
|
368 : |
|
|
if (timeid == -1) {
|
369 : |
|
|
// this is a modelled return that doesnt have a corresponding observed return
|
370 : |
|
|
checktime++;
|
371 : |
|
|
for (ti = 0; ti < modYears.Ncol(t); ti++)
|
372 : |
|
|
if ((modYears[t][ti] == year) && (modSteps[t][ti] == step))
|
373 : |
|
|
timeid = ti;
|
374 : |
|
|
|
375 : |
|
|
// resize objects if needed
|
376 : |
|
|
if (timeid == -1) {
|
377 : |
|
|
modYears[t].resize(1, year);
|
378 : |
|
|
modSteps[t].resize(1, step);
|
379 : |
|
|
timeid = modYears.Ncol(t) - 1;
|
380 : |
|
|
newDistribution[t].resize(new DoubleMatrix(areaindex.Size(), lenindex.Size(), 0.0));
|
381 : |
|
|
}
|
382 : |
|
|
}
|
383 : |
|
|
|
384 : |
|
|
for (a = 0; a < areas.Nrow(); a++) {
|
385 : |
|
|
for (len = 0; len < lengths.Size() - 1; len++) {
|
386 : |
|
|
x = ((*alptr)[a][0][len]).N;
|
387 : |
|
|
|
388 : |
|
|
if (!checktime) {
|
389 : |
|
|
// this is a modelled return that has a non-zero observed return
|
390 : |
|
|
n = (*obsDistribution[t][timeid])[a][len];
|
391 : |
|
|
(*modelDistribution[t][timeid])[a][len] = x;
|
392 : |
|
|
} else {
|
393 : |
|
|
// this is a modelled return that doesnt have a corresponding observed return
|
394 : |
|
|
n = 0.0;
|
395 : |
|
|
(*newDistribution[t][timeid])[a][len] = x;
|
396 : |
|
|
}
|
397 : |
|
|
|
398 : |
|
|
if (isZero(n))
|
399 : |
|
|
lik += x;
|
400 : |
|
|
else if (x < verysmall)
|
401 : |
|
|
lik += verybig;
|
402 : |
|
|
else
|
403 : |
|
|
lik -= -x + (n * log(x)) - logFactorial(n);
|
404 : |
|
|
|
405 : |
|
|
}
|
406 : |
|
|
}
|
407 : |
|
|
}
|
408 : |
|
|
total += lik;
|
409 : |
|
|
}
|
410 : |
|
|
return total;
|
411 : |
|
|
}
|
412 : |
|
|
|
413 : |
|
|
void Recaptures::Print(ofstream& outfile) const {
|
414 : |
|
|
int t, ti, area, len;
|
415 : |
|
|
outfile << "\nRecaptures Data " << this->getName() << " - likelihood value " << likelihood
|
416 : |
|
|
<< "\n\tFunction " << functionname << endl;
|
417 : |
|
|
for (t = 0; t < tagvec.Size(); t++) {
|
418 : |
|
|
outfile << "\tTagging experiment:\t" << tagnames[t];
|
419 : |
|
|
for (ti = 0; ti < obsYears.Ncol(t); ti++) {
|
420 : |
|
|
outfile << "\n\tyear " << obsYears[t][ti] << " and step " << obsSteps[t][ti] << "\n\tobserved recaptures";
|
421 : |
|
|
for (area = 0; area < (*obsDistribution[t][ti]).Nrow(); area++)
|
422 : |
|
|
for (len = 0; len < (*obsDistribution[t][ti]).Ncol(area); len++)
|
423 : |
|
|
outfile << TAB << (*obsDistribution[t][ti])[area][len];
|
424 : |
|
|
|
425 : |
|
|
outfile << "\n\tmodelled recaptures";
|
426 : |
|
|
for (area = 0; area < (*modelDistribution[t][ti]).Nrow(); area++)
|
427 : |
|
|
for (len = 0; len < (*modelDistribution[t][ti]).Ncol(area); len++)
|
428 : |
|
|
outfile << TAB << (*modelDistribution[t][ti])[area][len];
|
429 : |
|
|
}
|
430 : |
|
|
outfile << endl;
|
431 : |
|
|
}
|
432 : |
|
|
outfile.flush();
|
433 : |
|
|
}
|
434 : |
|
|
|
435 : |
|
|
void Recaptures::printSummary(ofstream& outfile) {
|
436 : |
|
|
//JMB to start with just print a summary of all likelihood scores
|
437 : |
|
|
//This needs to be changed to print something for each timestep and area
|
438 : |
|
|
//but we need to sort out how to get the information in the same format as other components
|
439 : |
|
|
if (!(isZero(likelihood))) {
|
440 : |
|
|
outfile << "all all all" << sep << setw(largewidth) << this->getName() << sep
|
441 : |
|
|
<< setprecision(smallprecision) << setw(smallwidth) << weight << sep
|
442 : |
|
|
<< setprecision(largeprecision) << setw(largewidth) << likelihood << endl;
|
443 : |
|
|
outfile.flush();
|
444 : |
|
|
}
|
445 : |
|
|
}
|
446 : |
|
|
|
447 : |
|
|
void Recaptures::printLikelihood(ofstream& outfile, const TimeClass* const TimeInfo) {
|
448 : |
|
|
int year = TimeInfo->getYear();
|
449 : |
|
|
int step = TimeInfo->getStep();
|
450 : |
|
|
int t, ti, timeid, area, len;
|
451 : |
|
|
|
452 : |
|
|
for (t = 0; t < tagvec.Size(); t++) {
|
453 : |
|
|
if (tagvec[t]->isWithinPeriod(year, step)) {
|
454 : |
|
|
timeid = -1;
|
455 : |
|
|
for (ti = 0; ti < obsYears.Ncol(t); ti++)
|
456 : |
|
|
if (obsYears[t][ti] == year && obsSteps[t][ti] == step)
|
457 : |
|
|
timeid = ti;
|
458 : |
|
|
|
459 : |
|
|
if (timeid > -1) {
|
460 : |
|
|
for (area = 0; area < modelDistribution[t][timeid]->Nrow(); area++) {
|
461 : |
|
|
for (len = 0; len < modelDistribution[t][timeid]->Ncol(area); len++) {
|
462 : |
|
|
outfile << setw(printwidth) << tagnames[t] << sep << setw(lowwidth)
|
463 : |
|
|
<< obsYears[t][timeid] << sep << setw(lowwidth) << obsSteps[t][timeid] << sep
|
464 : |
|
|
<< setw(printwidth) << areaindex[area] << sep << setw(printwidth)
|
465 : |
|
|
<< lenindex[len] << sep << setprecision(largeprecision) << setw(largewidth);
|
466 : |
|
|
|
467 : |
|
|
//JMB crude filter to remove the 'silly' values from the output
|
468 : |
|
|
if ((*modelDistribution[t][timeid])[area][len] < rathersmall)
|
469 : |
|
|
outfile << 0 << endl;
|
470 : |
|
|
else
|
471 : |
|
|
outfile << (*modelDistribution[t][timeid])[area][len] << endl;
|
472 : |
|
|
}
|
473 : |
|
|
}
|
474 : |
|
|
|
475 : |
|
|
} else {
|
476 : |
|
|
for (ti = 0; ti < modYears.Ncol(t); ti++)
|
477 : |
|
|
if ((modYears[t][ti] == year) && (modSteps[t][ti] == step))
|
478 : |
|
|
timeid = ti;
|
479 : |
|
|
|
480 : |
|
|
if (timeid > -1) {
|
481 : |
|
|
for (area = 0; area < newDistribution[t][timeid]->Nrow(); area++) {
|
482 : |
|
|
for (len = 0; len < newDistribution[t][timeid]->Ncol(area); len++) {
|
483 : |
|
|
outfile << setw(printwidth) << tagnames[t] << sep << setw(lowwidth)
|
484 : |
|
|
<< modYears[t][timeid] << sep << setw(lowwidth) << modSteps[t][timeid] << sep
|
485 : |
|
|
<< setw(printwidth) << areaindex[area] << sep << setw(printwidth)
|
486 : |
|
|
<< lenindex[len] << sep << setprecision(largeprecision) << setw(largewidth);
|
487 : |
|
|
|
488 : |
|
|
//JMB crude filter to remove the 'silly' values from the output
|
489 : |
|
|
if ((*newDistribution[t][timeid])[area][len] < rathersmall)
|
490 : |
|
|
outfile << 0 << endl;
|
491 : |
|
|
else
|
492 : |
|
|
outfile << (*newDistribution[t][timeid])[area][len] << endl;
|
493 : |
|
|
}
|
494 : |
|
|
}
|
495 : |
|
|
}
|
496 : |
|
|
}
|
497 : |
|
|
}
|
498 : |
|
|
}
|
499 : |
|
|
}
|