(FPCore (x y) :precision binary64 (- (log (+ 1.0 (exp x))) (* x y)))
(FPCore (x y) :precision binary64 (- (log1p (exp x)) (* x y)))
double code(double x, double y) {
return log((1.0 + exp(x))) - (x * y);
}
double code(double x, double y) {
return log1p(exp(x)) - (x * y);
}
public static double code(double x, double y) {
return Math.log((1.0 + Math.exp(x))) - (x * y);
}
public static double code(double x, double y) {
return Math.log1p(Math.exp(x)) - (x * y);
}
def code(x, y): return math.log((1.0 + math.exp(x))) - (x * y)
def code(x, y): return math.log1p(math.exp(x)) - (x * y)
function code(x, y) return Float64(log(Float64(1.0 + exp(x))) - Float64(x * y)) end
function code(x, y) return Float64(log1p(exp(x)) - Float64(x * y)) end
code[x_, y_] := N[(N[Log[N[(1.0 + N[Exp[x], $MachinePrecision]), $MachinePrecision]], $MachinePrecision] - N[(x * y), $MachinePrecision]), $MachinePrecision]
code[x_, y_] := N[(N[Log[1 + N[Exp[x], $MachinePrecision]], $MachinePrecision] - N[(x * y), $MachinePrecision]), $MachinePrecision]
\log \left(1 + e^{x}\right) - x \cdot y
\mathsf{log1p}\left(e^{x}\right) - x \cdot y




Bits error versus x




Bits error versus y
Results
| Original | 0.5 |
|---|---|
| Target | 0.1 |
| Herbie | 0.5 |
Initial program 0.5
Simplified0.5
Final simplification0.5
herbie shell --seed 2022170
(FPCore (x y)
:name "Logistic regression 2"
:precision binary64
:herbie-target
(if (<= x 0.0) (- (log (+ 1.0 (exp x))) (* x y)) (- (log (+ 1.0 (exp (- x)))) (* (- x) (- 1.0 y))))
(- (log (+ 1.0 (exp x))) (* x y)))