Average Error: 13.8 → 1.4
Time: 8.0s
Precision: binary64
\[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
\[x \cdot e^{\left(-wj\right) - \mathsf{log1p}\left(wj\right)} - \left({wj}^{5} + \left({wj}^{3} - \mathsf{fma}\left(wj, wj, {wj}^{4}\right)\right)\right) \]
(FPCore (wj x)
 :precision binary64
 (- wj (/ (- (* wj (exp wj)) x) (+ (exp wj) (* wj (exp wj))))))
(FPCore (wj x)
 :precision binary64
 (-
  (* x (exp (- (- wj) (log1p wj))))
  (+ (pow wj 5.0) (- (pow wj 3.0) (fma wj wj (pow wj 4.0))))))
double code(double wj, double x) {
	return wj - (((wj * exp(wj)) - x) / (exp(wj) + (wj * exp(wj))));
}
double code(double wj, double x) {
	return (x * exp((-wj - log1p(wj)))) - (pow(wj, 5.0) + (pow(wj, 3.0) - fma(wj, wj, pow(wj, 4.0))));
}
function code(wj, x)
	return Float64(wj - Float64(Float64(Float64(wj * exp(wj)) - x) / Float64(exp(wj) + Float64(wj * exp(wj)))))
end
function code(wj, x)
	return Float64(Float64(x * exp(Float64(Float64(-wj) - log1p(wj)))) - Float64((wj ^ 5.0) + Float64((wj ^ 3.0) - fma(wj, wj, (wj ^ 4.0)))))
end
code[wj_, x_] := N[(wj - N[(N[(N[(wj * N[Exp[wj], $MachinePrecision]), $MachinePrecision] - x), $MachinePrecision] / N[(N[Exp[wj], $MachinePrecision] + N[(wj * N[Exp[wj], $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]
code[wj_, x_] := N[(N[(x * N[Exp[N[((-wj) - N[Log[1 + wj], $MachinePrecision]), $MachinePrecision]], $MachinePrecision]), $MachinePrecision] - N[(N[Power[wj, 5.0], $MachinePrecision] + N[(N[Power[wj, 3.0], $MachinePrecision] - N[(wj * wj + N[Power[wj, 4.0], $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]
wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}
x \cdot e^{\left(-wj\right) - \mathsf{log1p}\left(wj\right)} - \left({wj}^{5} + \left({wj}^{3} - \mathsf{fma}\left(wj, wj, {wj}^{4}\right)\right)\right)

Error

Bits error versus wj

Bits error versus x

Target

Original13.8
Target13.1
Herbie1.4
\[wj - \left(\frac{wj}{wj + 1} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right) \]

Derivation

  1. Initial program 13.8

    \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
  2. Simplified13.1

    \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
  3. Applied egg-rr6.9

    \[\leadsto \color{blue}{\mathsf{fma}\left(\frac{x}{1}, \frac{e^{-wj}}{wj + 1}, -\left(\frac{wj}{wj + 1} - wj\right)\right)} \]
  4. Taylor expanded in wj around 0 1.2

    \[\leadsto \mathsf{fma}\left(\frac{x}{1}, \frac{e^{-wj}}{wj + 1}, -\color{blue}{\left(\left({wj}^{5} + {wj}^{3}\right) - \left({wj}^{4} + {wj}^{2}\right)\right)}\right) \]
  5. Applied egg-rr1.4

    \[\leadsto \color{blue}{x \cdot e^{\left(-wj\right) - \mathsf{log1p}\left(wj\right)} + \left(-\left({wj}^{5} + \left({wj}^{3} - \mathsf{fma}\left(wj, wj, {wj}^{4}\right)\right)\right)\right)} \]
  6. Final simplification1.4

    \[\leadsto x \cdot e^{\left(-wj\right) - \mathsf{log1p}\left(wj\right)} - \left({wj}^{5} + \left({wj}^{3} - \mathsf{fma}\left(wj, wj, {wj}^{4}\right)\right)\right) \]

Reproduce

herbie shell --seed 2022159 
(FPCore (wj x)
  :name "Jmat.Real.lambertw, newton loop step"
  :precision binary64

  :herbie-target
  (- wj (- (/ wj (+ wj 1.0)) (/ x (+ (exp wj) (* wj (exp wj))))))

  (- wj (/ (- (* wj (exp wj)) x) (+ (exp wj) (* wj (exp wj))))))