1 : |
agomez |
1 |
#include "netgradient.h" |
2 : |
|
|
#include "mathfunc.h"
|
3 : |
|
|
|
4 : |
|
|
Gradient::~Gradient() {
|
5 : |
|
|
}
|
6 : |
|
|
|
7 : |
|
|
NetGradient::NetGradient(int numVars) {
|
8 : |
|
|
difficultgrad = 1;
|
9 : |
|
|
delta0 = 0.01;
|
10 : |
|
|
delta1 = 0.0001;
|
11 : |
|
|
delta2 = 0.00001;
|
12 : |
|
|
delta = 0.0001;
|
13 : |
|
|
numVar = numVars;
|
14 : |
|
|
normgrad = -1.0;
|
15 : |
|
|
// Vector tempVec(numVars);
|
16 : |
|
|
// grad = tempVec;
|
17 : |
|
|
// diagHess = tempVec;
|
18 : |
|
|
diagHess.resize(numVar, 0.0);
|
19 : |
|
|
grad.resize(numVar, 0.0);
|
20 : |
|
|
// deltavec = tempVec;
|
21 : |
|
|
deltavec.resize(numVar, delta);
|
22 : |
|
|
}
|
23 : |
|
|
|
24 : |
|
|
NetGradient::~NetGradient() {
|
25 : |
|
|
}
|
26 : |
|
|
|
27 : |
|
|
void NetGradient::initializeDiagonalHessian() {
|
28 : |
|
|
/*int i;
|
29 : |
|
|
for (i = 0; i < numVar; i++)
|
30 : |
|
|
diagHess[i]= -1.0;
|
31 : |
|
|
*/
|
32 : |
|
|
diagHess.Reset();
|
33 : |
|
|
diagHess.resize(numVar, -1.0);
|
34 : |
|
|
}
|
35 : |
|
|
|
36 : |
|
|
void NetGradient::setXVectors(const DoubleVector& x, double f, NetInterface* netInt) {
|
37 : |
|
|
int i;
|
38 : |
|
|
double deltai;
|
39 : |
|
|
int numberOfx = 0;
|
40 : |
|
|
|
41 : |
|
|
if (difficultgrad == 0)
|
42 : |
|
|
numberOfx = numVar + 1;
|
43 : |
|
|
else if (difficultgrad == 1)
|
44 : |
|
|
numberOfx = numVar * 2 + 1;
|
45 : |
|
|
else if (difficultgrad >= 2)
|
46 : |
|
|
numberOfx = numVar * 4 + 1;
|
47 : |
|
|
|
48 : |
|
|
// initialize new datagroup for gradient computing and set current point
|
49 : |
|
|
netInt->startNewDataGroup(numberOfx);
|
50 : |
|
|
netInt->setDataPair(x, f);
|
51 : |
|
|
//netInt->setX(x);
|
52 : |
|
|
|
53 : |
|
|
// compute x + hi * ei for all i
|
54 : |
|
|
DoubleVector tempVec(x);
|
55 : |
|
|
for (i = 0; i < numVar; i++) {
|
56 : |
|
|
deltai = deltavec[i] * (1.0 + fabs(tempVec[i]));
|
57 : |
|
|
tempVec[i] -= deltai;
|
58 : |
|
|
netInt->setX(tempVec);
|
59 : |
|
|
if (difficultgrad == 1) {
|
60 : |
|
|
tempVec[i] += 2 * deltai;
|
61 : |
|
|
netInt->setX(tempVec);
|
62 : |
|
|
tempVec[i] -= deltai;
|
63 : |
|
|
} else if (difficultgrad >= 2) {
|
64 : |
|
|
tempVec[i] += 0.5 * deltai;
|
65 : |
|
|
netInt->setX(tempVec);
|
66 : |
|
|
tempVec[i] += deltai;
|
67 : |
|
|
netInt->setX(tempVec);
|
68 : |
|
|
tempVec[i] += 0.5 * deltai;
|
69 : |
|
|
netInt->setX(tempVec);
|
70 : |
|
|
tempVec[i] -= deltai;
|
71 : |
|
|
}
|
72 : |
|
|
}
|
73 : |
|
|
}
|
74 : |
|
|
|
75 : |
|
|
int NetGradient::computeGradient(NetInterface* net, const DoubleVector& x, double f, int difficultgradient) {
|
76 : |
|
|
|
77 : |
|
|
difficult = 0;
|
78 : |
|
|
difficultgrad = difficultgradient;
|
79 : |
|
|
|
80 : |
|
|
int i;
|
81 : |
|
|
double fx1, fx2, fx3, fx4;
|
82 : |
|
|
int SEND_RECEIVE = 0;
|
83 : |
|
|
setXVectors(x, f, net);
|
84 : |
|
|
SEND_RECEIVE = net->sendAndReceiveAllData();
|
85 : |
|
|
if (SEND_RECEIVE == -1) {
|
86 : |
|
|
cerr << "Error in netgradient - could not send or receive data\n";
|
87 : |
|
|
exit(EXIT_FAILURE);
|
88 : |
|
|
}
|
89 : |
|
|
|
90 : |
|
|
int numfx = 1;
|
91 : |
|
|
fx0 = f;
|
92 : |
|
|
double deltai;
|
93 : |
|
|
normgrad = 0.0;
|
94 : |
|
|
|
95 : |
|
|
//Calculate f(x + hi * ei) for all i
|
96 : |
|
|
for (i = 0; i < numVar; i++) {
|
97 : |
|
|
deltai = (1.0 + fabs(x[i])) * deltavec[i];
|
98 : |
|
|
fx1 = net->getY(numfx);
|
99 : |
|
|
numfx++;
|
100 : |
|
|
|
101 : |
|
|
if (difficultgrad == 1) {
|
102 : |
|
|
fx4 = net->getY(numfx);
|
103 : |
|
|
numfx++;
|
104 : |
|
|
grad[i] = (fx4 - fx1) / (2.0 * deltai);
|
105 : |
|
|
diagHess[i] = (fx4 - 2.0 * fx0 + fx1) / (deltai * deltai);
|
106 : |
|
|
|
107 : |
|
|
//Check for difficultgrad
|
108 : |
|
|
if ((fx4 > fx0) && (fx1 > fx0))
|
109 : |
|
|
difficult = 1;
|
110 : |
|
|
|
111 : |
|
|
if (fabs(fx4 - fx1) / fx0 < rathersmall) { // may be running into roundoff
|
112 : |
|
|
deltavec[i] = min(delta0, deltavec[i] * 5.0);
|
113 : |
|
|
cerr << "Warning in netgradient - possible roundoff errors in gradient\n";
|
114 : |
|
|
}
|
115 : |
|
|
|
116 : |
|
|
} else if (difficultgrad >= 2) {
|
117 : |
|
|
fx2 = net->getY(numfx);
|
118 : |
|
|
numfx++;
|
119 : |
|
|
fx3 = net->getY(numfx);
|
120 : |
|
|
numfx++;
|
121 : |
|
|
fx4 = net->getY(numfx);
|
122 : |
|
|
numfx++;
|
123 : |
|
|
grad[i] = (fx1 - fx4 + 8.0 * fx3 - 8.0 * fx2) / (12.0 * deltai);
|
124 : |
|
|
diagHess[i] = (fx4 - 2.0 * fx0 + fx1) / (deltai * deltai);
|
125 : |
|
|
|
126 : |
|
|
if (fabs(fx4 - fx1) / fx0 < rathersmall) { // may be running into roundoff
|
127 : |
|
|
deltavec[i] = min(delta0, deltavec[i] * 5.0);
|
128 : |
|
|
cerr << "Warning in netgradient - possible roundoff errors in gradient\n";
|
129 : |
|
|
}
|
130 : |
|
|
|
131 : |
|
|
} else {
|
132 : |
|
|
grad[i] = (fx0 - fx1) / deltai;
|
133 : |
|
|
if (fabs(fx0 - fx1) / fx0 < rathersmall) { // may be running into roundoff
|
134 : |
|
|
deltavec[i] = min(delta0, deltavec[i] * 5.0);
|
135 : |
|
|
cerr << "Warning in netgradient - possible roundoff errors in gradient\n";
|
136 : |
|
|
}
|
137 : |
|
|
}
|
138 : |
|
|
|
139 : |
|
|
if (grad[i] > verybig) { // seem to be running outside the boundary
|
140 : |
|
|
deltavec[i] = max(delta1, deltavec[i] / 5.0);
|
141 : |
|
|
cerr << "Warning in netgradient - possible boundary errors in gradient\n";
|
142 : |
|
|
}
|
143 : |
|
|
normgrad += grad[i] * grad[i];
|
144 : |
|
|
}
|
145 : |
|
|
|
146 : |
|
|
normgrad = sqrt(normgrad);
|
147 : |
|
|
difficultgrad += difficult;
|
148 : |
|
|
|
149 : |
|
|
// finished computing gradient do not need datagroup anymore
|
150 : |
|
|
net->stopUsingDataGroup();
|
151 : |
|
|
return 1;
|
152 : |
|
|
}
|
153 : |
|
|
|
154 : |
|
|
double NetGradient::getBaseFX() {
|
155 : |
|
|
return fx0;
|
156 : |
|
|
}
|
157 : |
|
|
|
158 : |
|
|
const DoubleVector& NetGradient::getDiagonalHessian() {
|
159 : |
|
|
return diagHess;
|
160 : |
|
|
}
|
161 : |
|
|
|
162 : |
|
|
double NetGradient::getNormGrad() {
|
163 : |
|
|
return normgrad;
|
164 : |
|
|
}
|
165 : |
|
|
|
166 : |
|
|
const DoubleVector& NetGradient::getGradient() {
|
167 : |
|
|
return grad;
|
168 : |
|
|
}
|
169 : |
|
|
|
170 : |
|
|
int NetGradient::getDifficultGrad() {
|
171 : |
|
|
return difficultgrad;
|
172 : |
|
|
}
|