(FPCore (x y) :precision binary64 (- (log (+ 1.0 (exp x))) (* x y)))
(FPCore (x y) :precision binary64 (fma 1.0 (log1p (exp x)) (* y (- x))))
double code(double x, double y) {
return log((1.0 + exp(x))) - (x * y);
}
double code(double x, double y) {
return fma(1.0, log1p(exp(x)), (y * -x));
}
function code(x, y) return Float64(log(Float64(1.0 + exp(x))) - Float64(x * y)) end
function code(x, y) return fma(1.0, log1p(exp(x)), Float64(y * Float64(-x))) end
code[x_, y_] := N[(N[Log[N[(1.0 + N[Exp[x], $MachinePrecision]), $MachinePrecision]], $MachinePrecision] - N[(x * y), $MachinePrecision]), $MachinePrecision]
code[x_, y_] := N[(1.0 * N[Log[1 + N[Exp[x], $MachinePrecision]], $MachinePrecision] + N[(y * (-x)), $MachinePrecision]), $MachinePrecision]
\log \left(1 + e^{x}\right) - x \cdot y
\mathsf{fma}\left(1, \mathsf{log1p}\left(e^{x}\right), y \cdot \left(-x\right)\right)




Bits error versus x




Bits error versus y
| Original | 0.5 |
|---|---|
| Target | 0.0 |
| Herbie | 0.4 |
Initial program 0.5
Simplified0.4
Applied egg-rr15.9
Applied egg-rr0.4
Final simplification0.4
herbie shell --seed 2022155
(FPCore (x y)
:name "Logistic regression 2"
:precision binary64
:herbie-target
(if (<= x 0.0) (- (log (+ 1.0 (exp x))) (* x y)) (- (log (+ 1.0 (exp (- x)))) (* (- x) (- 1.0 y))))
(- (log (+ 1.0 (exp x))) (* x y)))