Skip to content
Snippets Groups Projects
Commit b09d8799 authored by Bertram Kopf's avatar Bertram Kopf
Browse files

changed criterion for double equality

parent cd7761bb
No related branches found
No related tags found
No related merge requests found
...@@ -57,16 +57,19 @@ bool AbsParamHandler::checkRecalculation(std::shared_ptr<AbsPawianParameters> fi ...@@ -57,16 +57,19 @@ bool AbsParamHandler::checkRecalculation(std::shared_ptr<AbsPawianParameters> fi
} }
bool AbsParamHandler::CheckDoubleEquality(double a, double b){ bool AbsParamHandler::CheckDoubleEquality(double a, double b){
//return std::fabs(a - b) <= std::numeric_limits<double>::epsilon();
double max1ab = std::max( { 1.0, std::fabs(a) , std::fabs(b) } ) ;
return std::fabs(a - b) <= std::numeric_limits<double>::epsilon()*max1ab ;
float diff = fabs(a - b); // float diff = fabs(a - b);
if (diff <= 10*DBL_EPSILON) // if (diff <= 10*DBL_EPSILON)
return true; // return true;
a = fabs(a); // a = fabs(a);
b = fabs(b); // b = fabs(b);
float largest = (b > a) ? b : a; // float largest = (b > a) ? b : a;
if (diff <= largest * 2*DBL_EPSILON) // if (diff <= largest * 2*DBL_EPSILON)
return true; // return true;
return false; // return false;
} }
...@@ -85,11 +85,11 @@ void AdamMinimizer::updateParameters(std::shared_ptr<AbsPawianParameters> pawian ...@@ -85,11 +85,11 @@ void AdamMinimizer::updateParameters(std::shared_ptr<AbsPawianParameters> pawian
for(unsigned int i = 0; i < pawianParams->Params().size(); ++i){ for(unsigned int i = 0; i < pawianParams->Params().size(); ++i){
if (pawianParams->IsFixed(i)) continue; if (pawianParams->IsFixed(i)) continue;
_s.at(i) = beta1 * _s.at(i) + (1.0 - beta1) * gradients.at(i); s.at(i) = beta1 * s.at(i) + (1.0 - beta1) * gradients.at(i);
_v.at(i) = beta2 * _v.at(i) + (1.0 - beta2) * gradients.at(i) * gradients.at(i); v.at(i) = beta2 * v.at(i) + (1.0 - beta2) * gradients.at(i) * gradients.at(i);
double s_hat = _s.at(i) / (1.0 - pow(beta1 , (t + 1))); double s_hat = s.at(i) / (1.0 - pow(beta1 , (t + 1)));
double v_hat = _v.at(i) / (1.0 - pow(beta2 , (t + 1))); double v_hat = v.at(i) / (1.0 - pow(beta2 , (t + 1)));
double newVal = pawianParams->Value(i) - _learning_rate * s_hat / (std::sqrt(v_hat) + epsilon); double newVal = pawianParams->Value(i) - _learning_rate * s_hat / (std::sqrt(v_hat) + epsilon);
if(pawianParams->HasLimits(i)){ if(pawianParams->HasLimits(i)){
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment