?

Average Error: 0.5 → 0.5
Time: 11.2s
Precision: binary64
Cost: 13248

?

\[\log \left(1 + e^{x}\right) - x \cdot y \]
\[\log \left(1 + e^{x}\right) - x \cdot y \]
(FPCore (x y) :precision binary64 (- (log (+ 1.0 (exp x))) (* x y)))
(FPCore (x y) :precision binary64 (- (log (+ 1.0 (exp x))) (* x y)))
double code(double x, double y) {
	return log((1.0 + exp(x))) - (x * y);
}
double code(double x, double y) {
	return log((1.0 + exp(x))) - (x * y);
}
real(8) function code(x, y)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    code = log((1.0d0 + exp(x))) - (x * y)
end function
real(8) function code(x, y)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    code = log((1.0d0 + exp(x))) - (x * y)
end function
public static double code(double x, double y) {
	return Math.log((1.0 + Math.exp(x))) - (x * y);
}
public static double code(double x, double y) {
	return Math.log((1.0 + Math.exp(x))) - (x * y);
}
def code(x, y):
	return math.log((1.0 + math.exp(x))) - (x * y)
def code(x, y):
	return math.log((1.0 + math.exp(x))) - (x * y)
function code(x, y)
	return Float64(log(Float64(1.0 + exp(x))) - Float64(x * y))
end
function code(x, y)
	return Float64(log(Float64(1.0 + exp(x))) - Float64(x * y))
end
function tmp = code(x, y)
	tmp = log((1.0 + exp(x))) - (x * y);
end
function tmp = code(x, y)
	tmp = log((1.0 + exp(x))) - (x * y);
end
code[x_, y_] := N[(N[Log[N[(1.0 + N[Exp[x], $MachinePrecision]), $MachinePrecision]], $MachinePrecision] - N[(x * y), $MachinePrecision]), $MachinePrecision]
code[x_, y_] := N[(N[Log[N[(1.0 + N[Exp[x], $MachinePrecision]), $MachinePrecision]], $MachinePrecision] - N[(x * y), $MachinePrecision]), $MachinePrecision]
\log \left(1 + e^{x}\right) - x \cdot y
\log \left(1 + e^{x}\right) - x \cdot y

Error?

Try it out?

Your Program's Arguments

Results

Enter valid numbers for all inputs

Target

Original0.5
Target0.0
Herbie0.5
\[\begin{array}{l} \mathbf{if}\;x \leq 0:\\ \;\;\;\;\log \left(1 + e^{x}\right) - x \cdot y\\ \mathbf{else}:\\ \;\;\;\;\log \left(1 + e^{-x}\right) - \left(-x\right) \cdot \left(1 - y\right)\\ \end{array} \]

Derivation?

  1. Initial program 0.5

    \[\log \left(1 + e^{x}\right) - x \cdot y \]
  2. Final simplification0.5

    \[\leadsto \log \left(1 + e^{x}\right) - x \cdot y \]

Alternatives

Alternative 1
Error0.5
Cost13120
\[\mathsf{log1p}\left(e^{x}\right) - x \cdot y \]
Alternative 2
Error1.0
Cost6980
\[\begin{array}{l} \mathbf{if}\;x \leq -20000000000000:\\ \;\;\;\;x \cdot \left(-y\right)\\ \mathbf{else}:\\ \;\;\;\;x \cdot \left(0.5 - y\right) + \log 2\\ \end{array} \]
Alternative 3
Error1.3
Cost6852
\[\begin{array}{l} \mathbf{if}\;x \leq -20000000000000:\\ \;\;\;\;x \cdot \left(-y\right)\\ \mathbf{else}:\\ \;\;\;\;\log 2 - x \cdot y\\ \end{array} \]
Alternative 4
Error14.9
Cost6728
\[\begin{array}{l} \mathbf{if}\;x \leq -6.5 \cdot 10^{-65}:\\ \;\;\;\;x \cdot \left(-y\right)\\ \mathbf{elif}\;x \leq 4.1 \cdot 10^{-147}:\\ \;\;\;\;\log 2\\ \mathbf{else}:\\ \;\;\;\;x \cdot \left(0.5 - y\right)\\ \end{array} \]
Alternative 5
Error34.4
Cost256
\[x \cdot \left(-y\right) \]
Alternative 6
Error61.7
Cost192
\[x \cdot 0.5 \]

Error

Reproduce?

herbie shell --seed 2023073 
(FPCore (x y)
  :name "Logistic regression 2"
  :precision binary64

  :herbie-target
  (if (<= x 0.0) (- (log (+ 1.0 (exp x))) (* x y)) (- (log (+ 1.0 (exp (- x)))) (* (- x) (- 1.0 y))))

  (- (log (+ 1.0 (exp x))) (* x y)))