\log \left(1 + e^{x}\right) - x \cdot y\left(\log \left(1 \cdot 1 + \left(e^{x} \cdot e^{x} - 1 \cdot e^{x}\right)\right) + \left(\log \left(1 + e^{x}\right) - \log \left(\mathsf{fma}\left(1, 1, e^{x} \cdot \left(e^{x} - 1\right)\right)\right)\right)\right) - x \cdot ydouble f(double x, double y) {
double r109416 = 1.0;
double r109417 = x;
double r109418 = exp(r109417);
double r109419 = r109416 + r109418;
double r109420 = log(r109419);
double r109421 = y;
double r109422 = r109417 * r109421;
double r109423 = r109420 - r109422;
return r109423;
}
double f(double x, double y) {
double r109424 = 1.0;
double r109425 = r109424 * r109424;
double r109426 = x;
double r109427 = exp(r109426);
double r109428 = r109427 * r109427;
double r109429 = r109424 * r109427;
double r109430 = r109428 - r109429;
double r109431 = r109425 + r109430;
double r109432 = log(r109431);
double r109433 = r109424 + r109427;
double r109434 = log(r109433);
double r109435 = r109427 - r109424;
double r109436 = r109427 * r109435;
double r109437 = fma(r109424, r109424, r109436);
double r109438 = log(r109437);
double r109439 = r109434 - r109438;
double r109440 = r109432 + r109439;
double r109441 = y;
double r109442 = r109426 * r109441;
double r109443 = r109440 - r109442;
return r109443;
}




Bits error versus x




Bits error versus y
| Original | 0.5 |
|---|---|
| Target | 0.0 |
| Herbie | 0.5 |
Initial program 0.5
rmApplied flip3-+0.5
Applied log-div0.5
Simplified0.5
rmApplied sum-cubes0.5
Applied log-prod0.5
Applied associate--l+0.5
Final simplification0.5
herbie shell --seed 2019326 +o rules:numerics
(FPCore (x y)
:name "Logistic regression 2"
:precision binary64
:herbie-target
(if (<= x 0.0) (- (log (+ 1 (exp x))) (* x y)) (- (log (+ 1 (exp (- x)))) (* (- x) (- 1 y))))
(- (log (+ 1 (exp x))) (* x y)))