diff --git a/FitParams/AbsParamHandler.cc b/FitParams/AbsParamHandler.cc index 04262c8947f0c4d6a6b68fca9e59ddcdb64f7020..687859d1026750bbf7c8fcb7b47950f937362984 100644 --- a/FitParams/AbsParamHandler.cc +++ b/FitParams/AbsParamHandler.cc @@ -57,16 +57,19 @@ bool AbsParamHandler::checkRecalculation(std::shared_ptr<AbsPawianParameters> fi } bool AbsParamHandler::CheckDoubleEquality(double a, double b){ + //return std::fabs(a - b) <= std::numeric_limits<double>::epsilon(); + double max1ab = std::max( { 1.0, std::fabs(a) , std::fabs(b) } ) ; + return std::fabs(a - b) <= std::numeric_limits<double>::epsilon()*max1ab ; - float diff = fabs(a - b); - if (diff <= 10*DBL_EPSILON) - return true; + // float diff = fabs(a - b); + // if (diff <= 10*DBL_EPSILON) + // return true; - a = fabs(a); - b = fabs(b); - float largest = (b > a) ? b : a; + // a = fabs(a); + // b = fabs(b); + // float largest = (b > a) ? b : a; - if (diff <= largest * 2*DBL_EPSILON) - return true; - return false; + // if (diff <= largest * 2*DBL_EPSILON) + // return true; + // return false; } diff --git a/MinFunctions/AdamMinimizer.cc b/MinFunctions/AdamMinimizer.cc index 4f1b531edcbbd1fdca9cb367c7ec5e1a45223261..aa093ae065040576194a5361a91e66f86e5cb0f1 100644 --- a/MinFunctions/AdamMinimizer.cc +++ b/MinFunctions/AdamMinimizer.cc @@ -85,11 +85,11 @@ void AdamMinimizer::updateParameters(std::shared_ptr<AbsPawianParameters> pawian for(unsigned int i = 0; i < pawianParams->Params().size(); ++i){ if (pawianParams->IsFixed(i)) continue; - _s.at(i) = beta1 * _s.at(i) + (1.0 - beta1) * gradients.at(i); - _v.at(i) = beta2 * _v.at(i) + (1.0 - beta2) * gradients.at(i) * gradients.at(i); + s.at(i) = beta1 * s.at(i) + (1.0 - beta1) * gradients.at(i); + v.at(i) = beta2 * v.at(i) + (1.0 - beta2) * gradients.at(i) * gradients.at(i); - double s_hat = _s.at(i) / (1.0 - pow(beta1 , (t + 1))); - double v_hat = _v.at(i) / (1.0 - pow(beta2 , (t + 1))); + double s_hat = s.at(i) / (1.0 - pow(beta1 , (t + 1))); + double v_hat = v.at(i) / (1.0 - pow(beta2 , (t + 1))); double newVal = pawianParams->Value(i) - _learning_rate * s_hat / (std::sqrt(v_hat) + epsilon); if(pawianParams->HasLimits(i)){