\log \left(1 + e^{x}\right) - x \cdot y\log \left(\frac{{1}^{3} + {e}^{\left(3 \cdot x\right)}}{\mathsf{fma}\left(1, 1, e^{x} \cdot \left(e^{x} - 1\right)\right)}\right) - x \cdot ydouble f(double x, double y) {
double r125961 = 1.0;
double r125962 = x;
double r125963 = exp(r125962);
double r125964 = r125961 + r125963;
double r125965 = log(r125964);
double r125966 = y;
double r125967 = r125962 * r125966;
double r125968 = r125965 - r125967;
return r125968;
}
double f(double x, double y) {
double r125969 = 1.0;
double r125970 = 3.0;
double r125971 = pow(r125969, r125970);
double r125972 = exp(1.0);
double r125973 = x;
double r125974 = r125970 * r125973;
double r125975 = pow(r125972, r125974);
double r125976 = r125971 + r125975;
double r125977 = exp(r125973);
double r125978 = r125977 - r125969;
double r125979 = r125977 * r125978;
double r125980 = fma(r125969, r125969, r125979);
double r125981 = r125976 / r125980;
double r125982 = log(r125981);
double r125983 = y;
double r125984 = r125973 * r125983;
double r125985 = r125982 - r125984;
return r125985;
}




Bits error versus x




Bits error versus y
| Original | 0.5 |
|---|---|
| Target | 0.0 |
| Herbie | 0.5 |
Initial program 0.5
rmApplied flip3-+0.5
Simplified0.5
rmApplied *-un-lft-identity0.5
Applied exp-prod0.5
Applied pow-pow0.5
Simplified0.5
Final simplification0.5
herbie shell --seed 2019326 +o rules:numerics
(FPCore (x y)
:name "Logistic regression 2"
:precision binary64
:herbie-target
(if (<= x 0.0) (- (log (+ 1 (exp x))) (* x y)) (- (log (+ 1 (exp (- x)))) (* (- x) (- 1 y))))
(- (log (+ 1 (exp x))) (* x y)))