Jmat.Real.lambertw, newton loop step

Percentage Accurate: 52.5% → 99.4%
Time: 28.6s
Alternatives: 19
Speedup: 313.0×

Specification

?
\[\begin{array}{l} \\ \begin{array}{l} t_0 := wj \cdot e^{wj}\\ wj - \frac{t_0 - x}{e^{wj} + t_0} \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (let* ((t_0 (* wj (exp wj)))) (- wj (/ (- t_0 x) (+ (exp wj) t_0)))))
double code(double wj, double x) {
	double t_0 = wj * exp(wj);
	return wj - ((t_0 - x) / (exp(wj) + t_0));
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: t_0
    t_0 = wj * exp(wj)
    code = wj - ((t_0 - x) / (exp(wj) + t_0))
end function
public static double code(double wj, double x) {
	double t_0 = wj * Math.exp(wj);
	return wj - ((t_0 - x) / (Math.exp(wj) + t_0));
}
def code(wj, x):
	t_0 = wj * math.exp(wj)
	return wj - ((t_0 - x) / (math.exp(wj) + t_0))
function code(wj, x)
	t_0 = Float64(wj * exp(wj))
	return Float64(wj - Float64(Float64(t_0 - x) / Float64(exp(wj) + t_0)))
end
function tmp = code(wj, x)
	t_0 = wj * exp(wj);
	tmp = wj - ((t_0 - x) / (exp(wj) + t_0));
end
code[wj_, x_] := Block[{t$95$0 = N[(wj * N[Exp[wj], $MachinePrecision]), $MachinePrecision]}, N[(wj - N[(N[(t$95$0 - x), $MachinePrecision] / N[(N[Exp[wj], $MachinePrecision] + t$95$0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
t_0 := wj \cdot e^{wj}\\
wj - \frac{t_0 - x}{e^{wj} + t_0}
\end{array}
\end{array}

Sampling outcomes in binary64 precision:

Local Percentage Accuracy vs ?

The average percentage accuracy by input value. Horizontal axis shows value of an input variable; the variable is choosen in the title. Vertical axis is accuracy; higher is better. Red represent the original program, while blue represents Herbie's suggestion. These can be toggled with buttons below the plot. The line is an average while dots represent individual samples.

Accuracy vs Speed?

Herbie found 19 alternatives:

AlternativeAccuracySpeedup
The accuracy (vertical axis) and speed (horizontal axis) of each alternatives. Up and to the right is better. The red square shows the initial program, and each blue circle shows an alternative.The line shows the best available speed-accuracy tradeoffs.

Initial Program: 52.5% accurate, 1.0× speedup?

\[\begin{array}{l} \\ \begin{array}{l} t_0 := wj \cdot e^{wj}\\ wj - \frac{t_0 - x}{e^{wj} + t_0} \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (let* ((t_0 (* wj (exp wj)))) (- wj (/ (- t_0 x) (+ (exp wj) t_0)))))
double code(double wj, double x) {
	double t_0 = wj * exp(wj);
	return wj - ((t_0 - x) / (exp(wj) + t_0));
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: t_0
    t_0 = wj * exp(wj)
    code = wj - ((t_0 - x) / (exp(wj) + t_0))
end function
public static double code(double wj, double x) {
	double t_0 = wj * Math.exp(wj);
	return wj - ((t_0 - x) / (Math.exp(wj) + t_0));
}
def code(wj, x):
	t_0 = wj * math.exp(wj)
	return wj - ((t_0 - x) / (math.exp(wj) + t_0))
function code(wj, x)
	t_0 = Float64(wj * exp(wj))
	return Float64(wj - Float64(Float64(t_0 - x) / Float64(exp(wj) + t_0)))
end
function tmp = code(wj, x)
	t_0 = wj * exp(wj);
	tmp = wj - ((t_0 - x) / (exp(wj) + t_0));
end
code[wj_, x_] := Block[{t$95$0 = N[(wj * N[Exp[wj], $MachinePrecision]), $MachinePrecision]}, N[(wj - N[(N[(t$95$0 - x), $MachinePrecision] / N[(N[Exp[wj], $MachinePrecision] + t$95$0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
t_0 := wj \cdot e^{wj}\\
wj - \frac{t_0 - x}{e^{wj} + t_0}
\end{array}
\end{array}

Alternative 1: 99.4% accurate, 1.4× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;wj \leq -2.9 \cdot 10^{-7}:\\ \;\;\;\;wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}\\ \mathbf{else}:\\ \;\;\;\;{wj}^{2} \cdot \left(x + \left(x + 1\right)\right) + \left(\left(x + \left(\left(-1 - x\right) - x\right) \cdot {wj}^{3}\right) - wj \cdot \left(x + x\right)\right)\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (if (<= wj -2.9e-7)
   (+ wj (/ (- (/ x (exp wj)) wj) (+ wj 1.0)))
   (+
    (* (pow wj 2.0) (+ x (+ x 1.0)))
    (- (+ x (* (- (- -1.0 x) x) (pow wj 3.0))) (* wj (+ x x))))))
double code(double wj, double x) {
	double tmp;
	if (wj <= -2.9e-7) {
		tmp = wj + (((x / exp(wj)) - wj) / (wj + 1.0));
	} else {
		tmp = (pow(wj, 2.0) * (x + (x + 1.0))) + ((x + (((-1.0 - x) - x) * pow(wj, 3.0))) - (wj * (x + x)));
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if (wj <= (-2.9d-7)) then
        tmp = wj + (((x / exp(wj)) - wj) / (wj + 1.0d0))
    else
        tmp = ((wj ** 2.0d0) * (x + (x + 1.0d0))) + ((x + ((((-1.0d0) - x) - x) * (wj ** 3.0d0))) - (wj * (x + x)))
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if (wj <= -2.9e-7) {
		tmp = wj + (((x / Math.exp(wj)) - wj) / (wj + 1.0));
	} else {
		tmp = (Math.pow(wj, 2.0) * (x + (x + 1.0))) + ((x + (((-1.0 - x) - x) * Math.pow(wj, 3.0))) - (wj * (x + x)));
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if wj <= -2.9e-7:
		tmp = wj + (((x / math.exp(wj)) - wj) / (wj + 1.0))
	else:
		tmp = (math.pow(wj, 2.0) * (x + (x + 1.0))) + ((x + (((-1.0 - x) - x) * math.pow(wj, 3.0))) - (wj * (x + x)))
	return tmp
function code(wj, x)
	tmp = 0.0
	if (wj <= -2.9e-7)
		tmp = Float64(wj + Float64(Float64(Float64(x / exp(wj)) - wj) / Float64(wj + 1.0)));
	else
		tmp = Float64(Float64((wj ^ 2.0) * Float64(x + Float64(x + 1.0))) + Float64(Float64(x + Float64(Float64(Float64(-1.0 - x) - x) * (wj ^ 3.0))) - Float64(wj * Float64(x + x))));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if (wj <= -2.9e-7)
		tmp = wj + (((x / exp(wj)) - wj) / (wj + 1.0));
	else
		tmp = ((wj ^ 2.0) * (x + (x + 1.0))) + ((x + (((-1.0 - x) - x) * (wj ^ 3.0))) - (wj * (x + x)));
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[LessEqual[wj, -2.9e-7], N[(wj + N[(N[(N[(x / N[Exp[wj], $MachinePrecision]), $MachinePrecision] - wj), $MachinePrecision] / N[(wj + 1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(N[(N[Power[wj, 2.0], $MachinePrecision] * N[(x + N[(x + 1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(N[(x + N[(N[(N[(-1.0 - x), $MachinePrecision] - x), $MachinePrecision] * N[Power[wj, 3.0], $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - N[(wj * N[(x + x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;wj \leq -2.9 \cdot 10^{-7}:\\
\;\;\;\;wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}\\

\mathbf{else}:\\
\;\;\;\;{wj}^{2} \cdot \left(x + \left(x + 1\right)\right) + \left(\left(x + \left(\left(-1 - x\right) - x\right) \cdot {wj}^{3}\right) - wj \cdot \left(x + x\right)\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if wj < -2.8999999999999998e-7

    1. Initial program 2.3%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg2.3%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in2.3%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg2.3%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg2.3%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub2.3%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]

    if -2.8999999999999998e-7 < wj

    1. Initial program 81.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg81.0%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub81.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg81.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative81.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in81.0%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg81.0%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg81.0%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub81.0%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in81.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/81.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified81.6%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in wj around 0 80.4%

      \[\leadsto wj + \frac{\color{blue}{\left(-1 \cdot \left(wj \cdot x\right) + x\right)} - wj}{wj + 1} \]
    5. Taylor expanded in wj around 0 98.8%

      \[\leadsto \color{blue}{{wj}^{2} \cdot \left(\left(1 + x\right) - -1 \cdot x\right) + \left(\left(-1 \cdot x - x\right) \cdot wj + \left(\left(-1 \cdot x - \left(1 + x\right)\right) \cdot {wj}^{3} + x\right)\right)} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification99.2%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -2.9 \cdot 10^{-7}:\\ \;\;\;\;wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}\\ \mathbf{else}:\\ \;\;\;\;{wj}^{2} \cdot \left(x + \left(x + 1\right)\right) + \left(\left(x + \left(\left(-1 - x\right) - x\right) \cdot {wj}^{3}\right) - wj \cdot \left(x + x\right)\right)\\ \end{array} \]

Alternative 2: 99.2% accurate, 2.6× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;wj \leq -6.6 \cdot 10^{-9}:\\ \;\;\;\;wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}\\ \mathbf{else}:\\ \;\;\;\;{wj}^{2} \cdot \left(1 - \left(x \cdot -4 + x \cdot 1.5\right)\right) + \left(x + -2 \cdot \left(wj \cdot x\right)\right)\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (if (<= wj -6.6e-9)
   (+ wj (/ (- (/ x (exp wj)) wj) (+ wj 1.0)))
   (+
    (* (pow wj 2.0) (- 1.0 (+ (* x -4.0) (* x 1.5))))
    (+ x (* -2.0 (* wj x))))))
double code(double wj, double x) {
	double tmp;
	if (wj <= -6.6e-9) {
		tmp = wj + (((x / exp(wj)) - wj) / (wj + 1.0));
	} else {
		tmp = (pow(wj, 2.0) * (1.0 - ((x * -4.0) + (x * 1.5)))) + (x + (-2.0 * (wj * x)));
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if (wj <= (-6.6d-9)) then
        tmp = wj + (((x / exp(wj)) - wj) / (wj + 1.0d0))
    else
        tmp = ((wj ** 2.0d0) * (1.0d0 - ((x * (-4.0d0)) + (x * 1.5d0)))) + (x + ((-2.0d0) * (wj * x)))
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if (wj <= -6.6e-9) {
		tmp = wj + (((x / Math.exp(wj)) - wj) / (wj + 1.0));
	} else {
		tmp = (Math.pow(wj, 2.0) * (1.0 - ((x * -4.0) + (x * 1.5)))) + (x + (-2.0 * (wj * x)));
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if wj <= -6.6e-9:
		tmp = wj + (((x / math.exp(wj)) - wj) / (wj + 1.0))
	else:
		tmp = (math.pow(wj, 2.0) * (1.0 - ((x * -4.0) + (x * 1.5)))) + (x + (-2.0 * (wj * x)))
	return tmp
function code(wj, x)
	tmp = 0.0
	if (wj <= -6.6e-9)
		tmp = Float64(wj + Float64(Float64(Float64(x / exp(wj)) - wj) / Float64(wj + 1.0)));
	else
		tmp = Float64(Float64((wj ^ 2.0) * Float64(1.0 - Float64(Float64(x * -4.0) + Float64(x * 1.5)))) + Float64(x + Float64(-2.0 * Float64(wj * x))));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if (wj <= -6.6e-9)
		tmp = wj + (((x / exp(wj)) - wj) / (wj + 1.0));
	else
		tmp = ((wj ^ 2.0) * (1.0 - ((x * -4.0) + (x * 1.5)))) + (x + (-2.0 * (wj * x)));
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[LessEqual[wj, -6.6e-9], N[(wj + N[(N[(N[(x / N[Exp[wj], $MachinePrecision]), $MachinePrecision] - wj), $MachinePrecision] / N[(wj + 1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(N[(N[Power[wj, 2.0], $MachinePrecision] * N[(1.0 - N[(N[(x * -4.0), $MachinePrecision] + N[(x * 1.5), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;wj \leq -6.6 \cdot 10^{-9}:\\
\;\;\;\;wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}\\

\mathbf{else}:\\
\;\;\;\;{wj}^{2} \cdot \left(1 - \left(x \cdot -4 + x \cdot 1.5\right)\right) + \left(x + -2 \cdot \left(wj \cdot x\right)\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if wj < -6.60000000000000037e-9

    1. Initial program 2.3%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg2.3%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in2.3%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg2.3%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg2.3%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub2.3%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]

    if -6.60000000000000037e-9 < wj

    1. Initial program 81.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg81.0%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub81.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg81.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative81.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in81.0%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg81.0%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg81.0%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub81.0%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in81.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/81.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified81.6%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in wj around 0 98.6%

      \[\leadsto \color{blue}{\left(1 - \left(-4 \cdot x + 1.5 \cdot x\right)\right) \cdot {wj}^{2} + \left(-2 \cdot \left(wj \cdot x\right) + x\right)} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification99.1%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -6.6 \cdot 10^{-9}:\\ \;\;\;\;wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}\\ \mathbf{else}:\\ \;\;\;\;{wj}^{2} \cdot \left(1 - \left(x \cdot -4 + x \cdot 1.5\right)\right) + \left(x + -2 \cdot \left(wj \cdot x\right)\right)\\ \end{array} \]

Alternative 3: 99.1% accurate, 2.7× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;wj \leq -3.5 \cdot 10^{-9}:\\ \;\;\;\;wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + {wj}^{2} \cdot \left(1 - x \cdot -4\right)\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (if (<= wj -3.5e-9)
   (+ wj (/ (- (/ x (exp wj)) wj) (+ wj 1.0)))
   (+ (+ x (* -2.0 (* wj x))) (* (pow wj 2.0) (- 1.0 (* x -4.0))))))
double code(double wj, double x) {
	double tmp;
	if (wj <= -3.5e-9) {
		tmp = wj + (((x / exp(wj)) - wj) / (wj + 1.0));
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (pow(wj, 2.0) * (1.0 - (x * -4.0)));
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if (wj <= (-3.5d-9)) then
        tmp = wj + (((x / exp(wj)) - wj) / (wj + 1.0d0))
    else
        tmp = (x + ((-2.0d0) * (wj * x))) + ((wj ** 2.0d0) * (1.0d0 - (x * (-4.0d0))))
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if (wj <= -3.5e-9) {
		tmp = wj + (((x / Math.exp(wj)) - wj) / (wj + 1.0));
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (Math.pow(wj, 2.0) * (1.0 - (x * -4.0)));
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if wj <= -3.5e-9:
		tmp = wj + (((x / math.exp(wj)) - wj) / (wj + 1.0))
	else:
		tmp = (x + (-2.0 * (wj * x))) + (math.pow(wj, 2.0) * (1.0 - (x * -4.0)))
	return tmp
function code(wj, x)
	tmp = 0.0
	if (wj <= -3.5e-9)
		tmp = Float64(wj + Float64(Float64(Float64(x / exp(wj)) - wj) / Float64(wj + 1.0)));
	else
		tmp = Float64(Float64(x + Float64(-2.0 * Float64(wj * x))) + Float64((wj ^ 2.0) * Float64(1.0 - Float64(x * -4.0))));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if (wj <= -3.5e-9)
		tmp = wj + (((x / exp(wj)) - wj) / (wj + 1.0));
	else
		tmp = (x + (-2.0 * (wj * x))) + ((wj ^ 2.0) * (1.0 - (x * -4.0)));
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[LessEqual[wj, -3.5e-9], N[(wj + N[(N[(N[(x / N[Exp[wj], $MachinePrecision]), $MachinePrecision] - wj), $MachinePrecision] / N[(wj + 1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(N[Power[wj, 2.0], $MachinePrecision] * N[(1.0 - N[(x * -4.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;wj \leq -3.5 \cdot 10^{-9}:\\
\;\;\;\;wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}\\

\mathbf{else}:\\
\;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + {wj}^{2} \cdot \left(1 - x \cdot -4\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if wj < -3.4999999999999999e-9

    1. Initial program 2.3%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg2.3%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in2.3%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg2.3%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg2.3%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub2.3%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]

    if -3.4999999999999999e-9 < wj

    1. Initial program 81.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. distribute-rgt1-in81.0%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    3. Simplified81.0%

      \[\leadsto \color{blue}{wj - \frac{wj \cdot e^{wj} - x}{\left(wj + 1\right) \cdot e^{wj}}} \]
    4. Taylor expanded in wj around 0 80.5%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + 2 \cdot wj}} \]
    5. Step-by-step derivation
      1. *-commutative80.5%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{1 + \color{blue}{wj \cdot 2}} \]
    6. Simplified80.5%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + wj \cdot 2}} \]
    7. Taylor expanded in wj around 0 98.6%

      \[\leadsto \color{blue}{\left(1 - -4 \cdot x\right) \cdot {wj}^{2} + \left(-2 \cdot \left(wj \cdot x\right) + x\right)} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification99.1%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -3.5 \cdot 10^{-9}:\\ \;\;\;\;wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + {wj}^{2} \cdot \left(1 - x \cdot -4\right)\\ \end{array} \]

Alternative 4: 99.1% accurate, 2.8× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;wj \leq -3.5 \cdot 10^{-9}:\\ \;\;\;\;wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (if (<= wj -3.5e-9)
   (+ wj (/ (- (/ x (exp wj)) wj) (+ wj 1.0)))
   (+ (+ x (* -2.0 (* wj x))) (* wj wj))))
double code(double wj, double x) {
	double tmp;
	if (wj <= -3.5e-9) {
		tmp = wj + (((x / exp(wj)) - wj) / (wj + 1.0));
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if (wj <= (-3.5d-9)) then
        tmp = wj + (((x / exp(wj)) - wj) / (wj + 1.0d0))
    else
        tmp = (x + ((-2.0d0) * (wj * x))) + (wj * wj)
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if (wj <= -3.5e-9) {
		tmp = wj + (((x / Math.exp(wj)) - wj) / (wj + 1.0));
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if wj <= -3.5e-9:
		tmp = wj + (((x / math.exp(wj)) - wj) / (wj + 1.0))
	else:
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj)
	return tmp
function code(wj, x)
	tmp = 0.0
	if (wj <= -3.5e-9)
		tmp = Float64(wj + Float64(Float64(Float64(x / exp(wj)) - wj) / Float64(wj + 1.0)));
	else
		tmp = Float64(Float64(x + Float64(-2.0 * Float64(wj * x))) + Float64(wj * wj));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if (wj <= -3.5e-9)
		tmp = wj + (((x / exp(wj)) - wj) / (wj + 1.0));
	else
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[LessEqual[wj, -3.5e-9], N[(wj + N[(N[(N[(x / N[Exp[wj], $MachinePrecision]), $MachinePrecision] - wj), $MachinePrecision] / N[(wj + 1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(wj * wj), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;wj \leq -3.5 \cdot 10^{-9}:\\
\;\;\;\;wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}\\

\mathbf{else}:\\
\;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if wj < -3.4999999999999999e-9

    1. Initial program 2.3%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg2.3%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in2.3%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg2.3%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg2.3%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub2.3%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]

    if -3.4999999999999999e-9 < wj

    1. Initial program 81.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. distribute-rgt1-in81.0%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    3. Simplified81.0%

      \[\leadsto \color{blue}{wj - \frac{wj \cdot e^{wj} - x}{\left(wj + 1\right) \cdot e^{wj}}} \]
    4. Taylor expanded in wj around 0 80.5%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + 2 \cdot wj}} \]
    5. Step-by-step derivation
      1. *-commutative80.5%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{1 + \color{blue}{wj \cdot 2}} \]
    6. Simplified80.5%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + wj \cdot 2}} \]
    7. Taylor expanded in wj around 0 98.6%

      \[\leadsto \color{blue}{\left(1 - -4 \cdot x\right) \cdot {wj}^{2} + \left(-2 \cdot \left(wj \cdot x\right) + x\right)} \]
    8. Taylor expanded in x around 0 98.6%

      \[\leadsto \color{blue}{{wj}^{2}} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    9. Step-by-step derivation
      1. unpow298.6%

        \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    10. Simplified98.6%

      \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
  3. Recombined 2 regimes into one program.
  4. Final simplification99.1%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -3.5 \cdot 10^{-9}:\\ \;\;\;\;wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \]

Alternative 5: 98.8% accurate, 2.8× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;wj \leq -1.7 \cdot 10^{-5}:\\ \;\;\;\;wj + \frac{\frac{x}{e^{wj}}}{wj + 1}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (if (<= wj -1.7e-5)
   (+ wj (/ (/ x (exp wj)) (+ wj 1.0)))
   (+ (+ x (* -2.0 (* wj x))) (* wj wj))))
double code(double wj, double x) {
	double tmp;
	if (wj <= -1.7e-5) {
		tmp = wj + ((x / exp(wj)) / (wj + 1.0));
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if (wj <= (-1.7d-5)) then
        tmp = wj + ((x / exp(wj)) / (wj + 1.0d0))
    else
        tmp = (x + ((-2.0d0) * (wj * x))) + (wj * wj)
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if (wj <= -1.7e-5) {
		tmp = wj + ((x / Math.exp(wj)) / (wj + 1.0));
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if wj <= -1.7e-5:
		tmp = wj + ((x / math.exp(wj)) / (wj + 1.0))
	else:
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj)
	return tmp
function code(wj, x)
	tmp = 0.0
	if (wj <= -1.7e-5)
		tmp = Float64(wj + Float64(Float64(x / exp(wj)) / Float64(wj + 1.0)));
	else
		tmp = Float64(Float64(x + Float64(-2.0 * Float64(wj * x))) + Float64(wj * wj));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if (wj <= -1.7e-5)
		tmp = wj + ((x / exp(wj)) / (wj + 1.0));
	else
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[LessEqual[wj, -1.7e-5], N[(wj + N[(N[(x / N[Exp[wj], $MachinePrecision]), $MachinePrecision] / N[(wj + 1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(wj * wj), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;wj \leq -1.7 \cdot 10^{-5}:\\
\;\;\;\;wj + \frac{\frac{x}{e^{wj}}}{wj + 1}\\

\mathbf{else}:\\
\;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if wj < -1.7e-5

    1. Initial program 2.3%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. distribute-rgt1-in100.0%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj - \frac{wj \cdot e^{wj} - x}{\left(wj + 1\right) \cdot e^{wj}}} \]
    4. Taylor expanded in x around inf 99.2%

      \[\leadsto wj - \color{blue}{-1 \cdot \frac{x}{e^{wj} \cdot \left(1 + wj\right)}} \]
    5. Step-by-step derivation
      1. associate-*r/99.2%

        \[\leadsto wj - \color{blue}{\frac{-1 \cdot x}{e^{wj} \cdot \left(1 + wj\right)}} \]
      2. +-commutative99.2%

        \[\leadsto wj - \frac{-1 \cdot x}{e^{wj} \cdot \color{blue}{\left(wj + 1\right)}} \]
      3. neg-mul-199.2%

        \[\leadsto wj - \frac{\color{blue}{-x}}{e^{wj} \cdot \left(wj + 1\right)} \]
      4. associate-/r*99.2%

        \[\leadsto wj - \color{blue}{\frac{\frac{-x}{e^{wj}}}{wj + 1}} \]
    6. Simplified99.2%

      \[\leadsto wj - \color{blue}{\frac{\frac{-x}{e^{wj}}}{wj + 1}} \]

    if -1.7e-5 < wj

    1. Initial program 81.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. distribute-rgt1-in81.0%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    3. Simplified81.0%

      \[\leadsto \color{blue}{wj - \frac{wj \cdot e^{wj} - x}{\left(wj + 1\right) \cdot e^{wj}}} \]
    4. Taylor expanded in wj around 0 80.5%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + 2 \cdot wj}} \]
    5. Step-by-step derivation
      1. *-commutative80.5%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{1 + \color{blue}{wj \cdot 2}} \]
    6. Simplified80.5%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + wj \cdot 2}} \]
    7. Taylor expanded in wj around 0 98.6%

      \[\leadsto \color{blue}{\left(1 - -4 \cdot x\right) \cdot {wj}^{2} + \left(-2 \cdot \left(wj \cdot x\right) + x\right)} \]
    8. Taylor expanded in x around 0 98.6%

      \[\leadsto \color{blue}{{wj}^{2}} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    9. Step-by-step derivation
      1. unpow298.6%

        \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    10. Simplified98.6%

      \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
  3. Recombined 2 regimes into one program.
  4. Final simplification98.8%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -1.7 \cdot 10^{-5}:\\ \;\;\;\;wj + \frac{\frac{x}{e^{wj}}}{wj + 1}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \]

Alternative 6: 98.8% accurate, 2.9× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;wj \leq -8.5 \cdot 10^{-6}:\\ \;\;\;\;\frac{x}{e^{wj} \cdot \left(wj + 1\right)}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (if (<= wj -8.5e-6)
   (/ x (* (exp wj) (+ wj 1.0)))
   (+ (+ x (* -2.0 (* wj x))) (* wj wj))))
double code(double wj, double x) {
	double tmp;
	if (wj <= -8.5e-6) {
		tmp = x / (exp(wj) * (wj + 1.0));
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if (wj <= (-8.5d-6)) then
        tmp = x / (exp(wj) * (wj + 1.0d0))
    else
        tmp = (x + ((-2.0d0) * (wj * x))) + (wj * wj)
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if (wj <= -8.5e-6) {
		tmp = x / (Math.exp(wj) * (wj + 1.0));
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if wj <= -8.5e-6:
		tmp = x / (math.exp(wj) * (wj + 1.0))
	else:
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj)
	return tmp
function code(wj, x)
	tmp = 0.0
	if (wj <= -8.5e-6)
		tmp = Float64(x / Float64(exp(wj) * Float64(wj + 1.0)));
	else
		tmp = Float64(Float64(x + Float64(-2.0 * Float64(wj * x))) + Float64(wj * wj));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if (wj <= -8.5e-6)
		tmp = x / (exp(wj) * (wj + 1.0));
	else
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[LessEqual[wj, -8.5e-6], N[(x / N[(N[Exp[wj], $MachinePrecision] * N[(wj + 1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(wj * wj), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;wj \leq -8.5 \cdot 10^{-6}:\\
\;\;\;\;\frac{x}{e^{wj} \cdot \left(wj + 1\right)}\\

\mathbf{else}:\\
\;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if wj < -8.4999999999999999e-6

    1. Initial program 2.3%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg2.3%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative2.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in2.3%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg2.3%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg2.3%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub2.3%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around inf 98.9%

      \[\leadsto \color{blue}{\frac{x}{\left(1 + wj\right) \cdot e^{wj}}} \]

    if -8.4999999999999999e-6 < wj

    1. Initial program 81.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. distribute-rgt1-in81.0%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    3. Simplified81.0%

      \[\leadsto \color{blue}{wj - \frac{wj \cdot e^{wj} - x}{\left(wj + 1\right) \cdot e^{wj}}} \]
    4. Taylor expanded in wj around 0 80.5%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + 2 \cdot wj}} \]
    5. Step-by-step derivation
      1. *-commutative80.5%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{1 + \color{blue}{wj \cdot 2}} \]
    6. Simplified80.5%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + wj \cdot 2}} \]
    7. Taylor expanded in wj around 0 98.6%

      \[\leadsto \color{blue}{\left(1 - -4 \cdot x\right) \cdot {wj}^{2} + \left(-2 \cdot \left(wj \cdot x\right) + x\right)} \]
    8. Taylor expanded in x around 0 98.6%

      \[\leadsto \color{blue}{{wj}^{2}} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    9. Step-by-step derivation
      1. unpow298.6%

        \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    10. Simplified98.6%

      \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
  3. Recombined 2 regimes into one program.
  4. Final simplification98.7%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -8.5 \cdot 10^{-6}:\\ \;\;\;\;\frac{x}{e^{wj} \cdot \left(wj + 1\right)}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \]

Alternative 7: 98.5% accurate, 2.9× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;wj \leq -1:\\ \;\;\;\;\frac{x}{wj \cdot e^{wj}}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (if (<= wj -1.0) (/ x (* wj (exp wj))) (+ (+ x (* -2.0 (* wj x))) (* wj wj))))
double code(double wj, double x) {
	double tmp;
	if (wj <= -1.0) {
		tmp = x / (wj * exp(wj));
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if (wj <= (-1.0d0)) then
        tmp = x / (wj * exp(wj))
    else
        tmp = (x + ((-2.0d0) * (wj * x))) + (wj * wj)
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if (wj <= -1.0) {
		tmp = x / (wj * Math.exp(wj));
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if wj <= -1.0:
		tmp = x / (wj * math.exp(wj))
	else:
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj)
	return tmp
function code(wj, x)
	tmp = 0.0
	if (wj <= -1.0)
		tmp = Float64(x / Float64(wj * exp(wj)));
	else
		tmp = Float64(Float64(x + Float64(-2.0 * Float64(wj * x))) + Float64(wj * wj));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if (wj <= -1.0)
		tmp = x / (wj * exp(wj));
	else
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[LessEqual[wj, -1.0], N[(x / N[(wj * N[Exp[wj], $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(wj * wj), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;wj \leq -1:\\
\;\;\;\;\frac{x}{wj \cdot e^{wj}}\\

\mathbf{else}:\\
\;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if wj < -1

    1. Initial program 1.2%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg1.2%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub1.1%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg1.1%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative1.1%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in1.1%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg1.1%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg1.1%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub1.2%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around inf 98.9%

      \[\leadsto \color{blue}{\frac{x}{\left(1 + wj\right) \cdot e^{wj}}} \]
    5. Taylor expanded in wj around inf 98.9%

      \[\leadsto \frac{x}{\color{blue}{e^{wj} \cdot wj}} \]

    if -1 < wj

    1. Initial program 81.1%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. distribute-rgt1-in81.1%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    3. Simplified81.1%

      \[\leadsto \color{blue}{wj - \frac{wj \cdot e^{wj} - x}{\left(wj + 1\right) \cdot e^{wj}}} \]
    4. Taylor expanded in wj around 0 80.0%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + 2 \cdot wj}} \]
    5. Step-by-step derivation
      1. *-commutative80.0%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{1 + \color{blue}{wj \cdot 2}} \]
    6. Simplified80.0%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + wj \cdot 2}} \]
    7. Taylor expanded in wj around 0 98.1%

      \[\leadsto \color{blue}{\left(1 - -4 \cdot x\right) \cdot {wj}^{2} + \left(-2 \cdot \left(wj \cdot x\right) + x\right)} \]
    8. Taylor expanded in x around 0 98.1%

      \[\leadsto \color{blue}{{wj}^{2}} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    9. Step-by-step derivation
      1. unpow298.1%

        \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    10. Simplified98.1%

      \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
  3. Recombined 2 regimes into one program.
  4. Final simplification98.4%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -1:\\ \;\;\;\;\frac{x}{wj \cdot e^{wj}}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \]

Alternative 8: 75.5% accurate, 18.2× speedup?

\[\begin{array}{l} \\ \begin{array}{l} t_0 := wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\ \mathbf{if}\;wj \leq -1.9 \cdot 10^{+266}:\\ \;\;\;\;t_0\\ \mathbf{elif}\;wj \leq -4.8 \cdot 10^{+193}:\\ \;\;\;\;wj \cdot wj\\ \mathbf{elif}\;wj \leq -1:\\ \;\;\;\;t_0\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (let* ((t_0 (+ wj (/ (* x (- 1.0 wj)) wj))))
   (if (<= wj -1.9e+266)
     t_0
     (if (<= wj -4.8e+193)
       (* wj wj)
       (if (<= wj -1.0) t_0 (+ (+ x (* -2.0 (* wj x))) (* wj wj)))))))
double code(double wj, double x) {
	double t_0 = wj + ((x * (1.0 - wj)) / wj);
	double tmp;
	if (wj <= -1.9e+266) {
		tmp = t_0;
	} else if (wj <= -4.8e+193) {
		tmp = wj * wj;
	} else if (wj <= -1.0) {
		tmp = t_0;
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: t_0
    real(8) :: tmp
    t_0 = wj + ((x * (1.0d0 - wj)) / wj)
    if (wj <= (-1.9d+266)) then
        tmp = t_0
    else if (wj <= (-4.8d+193)) then
        tmp = wj * wj
    else if (wj <= (-1.0d0)) then
        tmp = t_0
    else
        tmp = (x + ((-2.0d0) * (wj * x))) + (wj * wj)
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double t_0 = wj + ((x * (1.0 - wj)) / wj);
	double tmp;
	if (wj <= -1.9e+266) {
		tmp = t_0;
	} else if (wj <= -4.8e+193) {
		tmp = wj * wj;
	} else if (wj <= -1.0) {
		tmp = t_0;
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
def code(wj, x):
	t_0 = wj + ((x * (1.0 - wj)) / wj)
	tmp = 0
	if wj <= -1.9e+266:
		tmp = t_0
	elif wj <= -4.8e+193:
		tmp = wj * wj
	elif wj <= -1.0:
		tmp = t_0
	else:
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj)
	return tmp
function code(wj, x)
	t_0 = Float64(wj + Float64(Float64(x * Float64(1.0 - wj)) / wj))
	tmp = 0.0
	if (wj <= -1.9e+266)
		tmp = t_0;
	elseif (wj <= -4.8e+193)
		tmp = Float64(wj * wj);
	elseif (wj <= -1.0)
		tmp = t_0;
	else
		tmp = Float64(Float64(x + Float64(-2.0 * Float64(wj * x))) + Float64(wj * wj));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	t_0 = wj + ((x * (1.0 - wj)) / wj);
	tmp = 0.0;
	if (wj <= -1.9e+266)
		tmp = t_0;
	elseif (wj <= -4.8e+193)
		tmp = wj * wj;
	elseif (wj <= -1.0)
		tmp = t_0;
	else
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	end
	tmp_2 = tmp;
end
code[wj_, x_] := Block[{t$95$0 = N[(wj + N[(N[(x * N[(1.0 - wj), $MachinePrecision]), $MachinePrecision] / wj), $MachinePrecision]), $MachinePrecision]}, If[LessEqual[wj, -1.9e+266], t$95$0, If[LessEqual[wj, -4.8e+193], N[(wj * wj), $MachinePrecision], If[LessEqual[wj, -1.0], t$95$0, N[(N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(wj * wj), $MachinePrecision]), $MachinePrecision]]]]]
\begin{array}{l}

\\
\begin{array}{l}
t_0 := wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\
\mathbf{if}\;wj \leq -1.9 \cdot 10^{+266}:\\
\;\;\;\;t_0\\

\mathbf{elif}\;wj \leq -4.8 \cdot 10^{+193}:\\
\;\;\;\;wj \cdot wj\\

\mathbf{elif}\;wj \leq -1:\\
\;\;\;\;t_0\\

\mathbf{else}:\\
\;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if wj < -1.8999999999999999e266 or -4.8e193 < wj < -1

    1. Initial program 1.7%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg1.7%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub1.7%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg1.7%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative1.7%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in1.7%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg1.7%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg1.7%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub1.7%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in wj around 0 37.7%

      \[\leadsto wj + \frac{\color{blue}{\left(-1 \cdot \left(wj \cdot x\right) + x\right)} - wj}{wj + 1} \]
    5. Taylor expanded in x around inf 36.5%

      \[\leadsto wj + \color{blue}{\frac{\left(1 + -1 \cdot wj\right) \cdot x}{1 + wj}} \]
    6. Step-by-step derivation
      1. associate-/l*5.0%

        \[\leadsto wj + \color{blue}{\frac{1 + -1 \cdot wj}{\frac{1 + wj}{x}}} \]
      2. mul-1-neg5.0%

        \[\leadsto wj + \frac{1 + \color{blue}{\left(-wj\right)}}{\frac{1 + wj}{x}} \]
    7. Simplified5.0%

      \[\leadsto wj + \color{blue}{\frac{1 + \left(-wj\right)}{\frac{1 + wj}{x}}} \]
    8. Taylor expanded in wj around inf 5.0%

      \[\leadsto wj + \frac{1 + \left(-wj\right)}{\color{blue}{\frac{wj}{x}}} \]
    9. Taylor expanded in x around 0 36.5%

      \[\leadsto wj + \color{blue}{\frac{\left(1 - wj\right) \cdot x}{wj}} \]

    if -1.8999999999999999e266 < wj < -4.8e193

    1. Initial program 0.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg0.0%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in0.0%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg0.0%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg0.0%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub0.0%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around 0 2.5%

      \[\leadsto \color{blue}{wj - \frac{wj}{1 + wj}} \]
    5. Step-by-step derivation
      1. +-commutative2.5%

        \[\leadsto wj - \frac{wj}{\color{blue}{wj + 1}} \]
    6. Simplified2.5%

      \[\leadsto \color{blue}{wj - \frac{wj}{wj + 1}} \]
    7. Taylor expanded in wj around 0 60.7%

      \[\leadsto \color{blue}{{wj}^{2}} \]
    8. Step-by-step derivation
      1. unpow260.7%

        \[\leadsto \color{blue}{wj \cdot wj} \]
    9. Simplified60.7%

      \[\leadsto \color{blue}{wj \cdot wj} \]

    if -1 < wj

    1. Initial program 81.1%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. distribute-rgt1-in81.1%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    3. Simplified81.1%

      \[\leadsto \color{blue}{wj - \frac{wj \cdot e^{wj} - x}{\left(wj + 1\right) \cdot e^{wj}}} \]
    4. Taylor expanded in wj around 0 80.0%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + 2 \cdot wj}} \]
    5. Step-by-step derivation
      1. *-commutative80.0%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{1 + \color{blue}{wj \cdot 2}} \]
    6. Simplified80.0%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + wj \cdot 2}} \]
    7. Taylor expanded in wj around 0 98.1%

      \[\leadsto \color{blue}{\left(1 - -4 \cdot x\right) \cdot {wj}^{2} + \left(-2 \cdot \left(wj \cdot x\right) + x\right)} \]
    8. Taylor expanded in x around 0 98.1%

      \[\leadsto \color{blue}{{wj}^{2}} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    9. Step-by-step derivation
      1. unpow298.1%

        \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    10. Simplified98.1%

      \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
  3. Recombined 3 regimes into one program.
  4. Final simplification79.8%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -1.9 \cdot 10^{+266}:\\ \;\;\;\;wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\ \mathbf{elif}\;wj \leq -4.8 \cdot 10^{+193}:\\ \;\;\;\;wj \cdot wj\\ \mathbf{elif}\;wj \leq -1:\\ \;\;\;\;wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \]

Alternative 9: 77.5% accurate, 18.3× speedup?

\[\begin{array}{l} \\ \begin{array}{l} t_0 := x - wj \cdot x\\ \mathbf{if}\;x \leq -7.6 \cdot 10^{+153}:\\ \;\;\;\;\frac{t_0}{wj + 1}\\ \mathbf{elif}\;x \leq 7.6 \cdot 10^{-127}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \mathbf{else}:\\ \;\;\;\;wj + \frac{t_0 - wj}{wj + 1}\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (let* ((t_0 (- x (* wj x))))
   (if (<= x -7.6e+153)
     (/ t_0 (+ wj 1.0))
     (if (<= x 7.6e-127)
       (+ (+ x (* -2.0 (* wj x))) (* wj wj))
       (+ wj (/ (- t_0 wj) (+ wj 1.0)))))))
double code(double wj, double x) {
	double t_0 = x - (wj * x);
	double tmp;
	if (x <= -7.6e+153) {
		tmp = t_0 / (wj + 1.0);
	} else if (x <= 7.6e-127) {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	} else {
		tmp = wj + ((t_0 - wj) / (wj + 1.0));
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: t_0
    real(8) :: tmp
    t_0 = x - (wj * x)
    if (x <= (-7.6d+153)) then
        tmp = t_0 / (wj + 1.0d0)
    else if (x <= 7.6d-127) then
        tmp = (x + ((-2.0d0) * (wj * x))) + (wj * wj)
    else
        tmp = wj + ((t_0 - wj) / (wj + 1.0d0))
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double t_0 = x - (wj * x);
	double tmp;
	if (x <= -7.6e+153) {
		tmp = t_0 / (wj + 1.0);
	} else if (x <= 7.6e-127) {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	} else {
		tmp = wj + ((t_0 - wj) / (wj + 1.0));
	}
	return tmp;
}
def code(wj, x):
	t_0 = x - (wj * x)
	tmp = 0
	if x <= -7.6e+153:
		tmp = t_0 / (wj + 1.0)
	elif x <= 7.6e-127:
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj)
	else:
		tmp = wj + ((t_0 - wj) / (wj + 1.0))
	return tmp
function code(wj, x)
	t_0 = Float64(x - Float64(wj * x))
	tmp = 0.0
	if (x <= -7.6e+153)
		tmp = Float64(t_0 / Float64(wj + 1.0));
	elseif (x <= 7.6e-127)
		tmp = Float64(Float64(x + Float64(-2.0 * Float64(wj * x))) + Float64(wj * wj));
	else
		tmp = Float64(wj + Float64(Float64(t_0 - wj) / Float64(wj + 1.0)));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	t_0 = x - (wj * x);
	tmp = 0.0;
	if (x <= -7.6e+153)
		tmp = t_0 / (wj + 1.0);
	elseif (x <= 7.6e-127)
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	else
		tmp = wj + ((t_0 - wj) / (wj + 1.0));
	end
	tmp_2 = tmp;
end
code[wj_, x_] := Block[{t$95$0 = N[(x - N[(wj * x), $MachinePrecision]), $MachinePrecision]}, If[LessEqual[x, -7.6e+153], N[(t$95$0 / N[(wj + 1.0), $MachinePrecision]), $MachinePrecision], If[LessEqual[x, 7.6e-127], N[(N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(wj * wj), $MachinePrecision]), $MachinePrecision], N[(wj + N[(N[(t$95$0 - wj), $MachinePrecision] / N[(wj + 1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]]]
\begin{array}{l}

\\
\begin{array}{l}
t_0 := x - wj \cdot x\\
\mathbf{if}\;x \leq -7.6 \cdot 10^{+153}:\\
\;\;\;\;\frac{t_0}{wj + 1}\\

\mathbf{elif}\;x \leq 7.6 \cdot 10^{-127}:\\
\;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\

\mathbf{else}:\\
\;\;\;\;wj + \frac{t_0 - wj}{wj + 1}\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if x < -7.59999999999999933e153

    1. Initial program 62.5%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg62.5%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub62.5%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg62.5%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative62.5%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in62.5%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg62.5%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg62.5%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub62.5%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in96.8%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/96.9%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around inf 96.9%

      \[\leadsto \color{blue}{\frac{x}{\left(1 + wj\right) \cdot e^{wj}}} \]
    5. Step-by-step derivation
      1. *-un-lft-identity96.9%

        \[\leadsto \frac{\color{blue}{1 \cdot x}}{\left(1 + wj\right) \cdot e^{wj}} \]
      2. +-commutative96.9%

        \[\leadsto \frac{1 \cdot x}{\color{blue}{\left(wj + 1\right)} \cdot e^{wj}} \]
      3. times-frac96.9%

        \[\leadsto \color{blue}{\frac{1}{wj + 1} \cdot \frac{x}{e^{wj}}} \]
      4. +-commutative96.9%

        \[\leadsto \frac{1}{\color{blue}{1 + wj}} \cdot \frac{x}{e^{wj}} \]
    6. Applied egg-rr96.9%

      \[\leadsto \color{blue}{\frac{1}{1 + wj} \cdot \frac{x}{e^{wj}}} \]
    7. Step-by-step derivation
      1. associate-*l/97.0%

        \[\leadsto \color{blue}{\frac{1 \cdot \frac{x}{e^{wj}}}{1 + wj}} \]
      2. *-lft-identity97.0%

        \[\leadsto \frac{\color{blue}{\frac{x}{e^{wj}}}}{1 + wj} \]
    8. Simplified97.0%

      \[\leadsto \color{blue}{\frac{\frac{x}{e^{wj}}}{1 + wj}} \]
    9. Taylor expanded in wj around 0 90.9%

      \[\leadsto \frac{\color{blue}{-1 \cdot \left(wj \cdot x\right) + x}}{1 + wj} \]

    if -7.59999999999999933e153 < x < 7.60000000000000005e-127

    1. Initial program 46.9%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. distribute-rgt1-in76.4%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    3. Simplified76.4%

      \[\leadsto \color{blue}{wj - \frac{wj \cdot e^{wj} - x}{\left(wj + 1\right) \cdot e^{wj}}} \]
    4. Taylor expanded in wj around 0 46.8%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + 2 \cdot wj}} \]
    5. Step-by-step derivation
      1. *-commutative46.8%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{1 + \color{blue}{wj \cdot 2}} \]
    6. Simplified46.8%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + wj \cdot 2}} \]
    7. Taylor expanded in wj around 0 79.2%

      \[\leadsto \color{blue}{\left(1 - -4 \cdot x\right) \cdot {wj}^{2} + \left(-2 \cdot \left(wj \cdot x\right) + x\right)} \]
    8. Taylor expanded in x around 0 84.0%

      \[\leadsto \color{blue}{{wj}^{2}} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    9. Step-by-step derivation
      1. unpow284.0%

        \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    10. Simplified84.0%

      \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]

    if 7.60000000000000005e-127 < x

    1. Initial program 60.6%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg60.6%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub60.6%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg60.6%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative60.6%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in60.6%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg60.6%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg60.6%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub60.6%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in99.6%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/99.5%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified99.5%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in wj around 0 77.7%

      \[\leadsto wj + \frac{\color{blue}{\left(-1 \cdot \left(wj \cdot x\right) + x\right)} - wj}{wj + 1} \]
  3. Recombined 3 regimes into one program.
  4. Final simplification82.5%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -7.6 \cdot 10^{+153}:\\ \;\;\;\;\frac{x - wj \cdot x}{wj + 1}\\ \mathbf{elif}\;x \leq 7.6 \cdot 10^{-127}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \mathbf{else}:\\ \;\;\;\;wj + \frac{\left(x - wj \cdot x\right) - wj}{wj + 1}\\ \end{array} \]

Alternative 10: 67.6% accurate, 20.6× speedup?

\[\begin{array}{l} \\ \begin{array}{l} t_0 := wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\ \mathbf{if}\;wj \leq -2.9 \cdot 10^{+266}:\\ \;\;\;\;t_0\\ \mathbf{elif}\;wj \leq -5.1 \cdot 10^{+191}:\\ \;\;\;\;wj \cdot wj\\ \mathbf{elif}\;wj \leq -1:\\ \;\;\;\;t_0\\ \mathbf{else}:\\ \;\;\;\;x + -2 \cdot \left(wj \cdot x\right)\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (let* ((t_0 (+ wj (/ (* x (- 1.0 wj)) wj))))
   (if (<= wj -2.9e+266)
     t_0
     (if (<= wj -5.1e+191)
       (* wj wj)
       (if (<= wj -1.0) t_0 (+ x (* -2.0 (* wj x))))))))
double code(double wj, double x) {
	double t_0 = wj + ((x * (1.0 - wj)) / wj);
	double tmp;
	if (wj <= -2.9e+266) {
		tmp = t_0;
	} else if (wj <= -5.1e+191) {
		tmp = wj * wj;
	} else if (wj <= -1.0) {
		tmp = t_0;
	} else {
		tmp = x + (-2.0 * (wj * x));
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: t_0
    real(8) :: tmp
    t_0 = wj + ((x * (1.0d0 - wj)) / wj)
    if (wj <= (-2.9d+266)) then
        tmp = t_0
    else if (wj <= (-5.1d+191)) then
        tmp = wj * wj
    else if (wj <= (-1.0d0)) then
        tmp = t_0
    else
        tmp = x + ((-2.0d0) * (wj * x))
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double t_0 = wj + ((x * (1.0 - wj)) / wj);
	double tmp;
	if (wj <= -2.9e+266) {
		tmp = t_0;
	} else if (wj <= -5.1e+191) {
		tmp = wj * wj;
	} else if (wj <= -1.0) {
		tmp = t_0;
	} else {
		tmp = x + (-2.0 * (wj * x));
	}
	return tmp;
}
def code(wj, x):
	t_0 = wj + ((x * (1.0 - wj)) / wj)
	tmp = 0
	if wj <= -2.9e+266:
		tmp = t_0
	elif wj <= -5.1e+191:
		tmp = wj * wj
	elif wj <= -1.0:
		tmp = t_0
	else:
		tmp = x + (-2.0 * (wj * x))
	return tmp
function code(wj, x)
	t_0 = Float64(wj + Float64(Float64(x * Float64(1.0 - wj)) / wj))
	tmp = 0.0
	if (wj <= -2.9e+266)
		tmp = t_0;
	elseif (wj <= -5.1e+191)
		tmp = Float64(wj * wj);
	elseif (wj <= -1.0)
		tmp = t_0;
	else
		tmp = Float64(x + Float64(-2.0 * Float64(wj * x)));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	t_0 = wj + ((x * (1.0 - wj)) / wj);
	tmp = 0.0;
	if (wj <= -2.9e+266)
		tmp = t_0;
	elseif (wj <= -5.1e+191)
		tmp = wj * wj;
	elseif (wj <= -1.0)
		tmp = t_0;
	else
		tmp = x + (-2.0 * (wj * x));
	end
	tmp_2 = tmp;
end
code[wj_, x_] := Block[{t$95$0 = N[(wj + N[(N[(x * N[(1.0 - wj), $MachinePrecision]), $MachinePrecision] / wj), $MachinePrecision]), $MachinePrecision]}, If[LessEqual[wj, -2.9e+266], t$95$0, If[LessEqual[wj, -5.1e+191], N[(wj * wj), $MachinePrecision], If[LessEqual[wj, -1.0], t$95$0, N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]]]]
\begin{array}{l}

\\
\begin{array}{l}
t_0 := wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\
\mathbf{if}\;wj \leq -2.9 \cdot 10^{+266}:\\
\;\;\;\;t_0\\

\mathbf{elif}\;wj \leq -5.1 \cdot 10^{+191}:\\
\;\;\;\;wj \cdot wj\\

\mathbf{elif}\;wj \leq -1:\\
\;\;\;\;t_0\\

\mathbf{else}:\\
\;\;\;\;x + -2 \cdot \left(wj \cdot x\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if wj < -2.90000000000000017e266 or -5.09999999999999982e191 < wj < -1

    1. Initial program 1.7%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg1.7%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub1.7%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg1.7%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative1.7%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in1.7%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg1.7%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg1.7%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub1.7%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in wj around 0 37.7%

      \[\leadsto wj + \frac{\color{blue}{\left(-1 \cdot \left(wj \cdot x\right) + x\right)} - wj}{wj + 1} \]
    5. Taylor expanded in x around inf 36.5%

      \[\leadsto wj + \color{blue}{\frac{\left(1 + -1 \cdot wj\right) \cdot x}{1 + wj}} \]
    6. Step-by-step derivation
      1. associate-/l*5.0%

        \[\leadsto wj + \color{blue}{\frac{1 + -1 \cdot wj}{\frac{1 + wj}{x}}} \]
      2. mul-1-neg5.0%

        \[\leadsto wj + \frac{1 + \color{blue}{\left(-wj\right)}}{\frac{1 + wj}{x}} \]
    7. Simplified5.0%

      \[\leadsto wj + \color{blue}{\frac{1 + \left(-wj\right)}{\frac{1 + wj}{x}}} \]
    8. Taylor expanded in wj around inf 5.0%

      \[\leadsto wj + \frac{1 + \left(-wj\right)}{\color{blue}{\frac{wj}{x}}} \]
    9. Taylor expanded in x around 0 36.5%

      \[\leadsto wj + \color{blue}{\frac{\left(1 - wj\right) \cdot x}{wj}} \]

    if -2.90000000000000017e266 < wj < -5.09999999999999982e191

    1. Initial program 0.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg0.0%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in0.0%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg0.0%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg0.0%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub0.0%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around 0 2.5%

      \[\leadsto \color{blue}{wj - \frac{wj}{1 + wj}} \]
    5. Step-by-step derivation
      1. +-commutative2.5%

        \[\leadsto wj - \frac{wj}{\color{blue}{wj + 1}} \]
    6. Simplified2.5%

      \[\leadsto \color{blue}{wj - \frac{wj}{wj + 1}} \]
    7. Taylor expanded in wj around 0 60.7%

      \[\leadsto \color{blue}{{wj}^{2}} \]
    8. Step-by-step derivation
      1. unpow260.7%

        \[\leadsto \color{blue}{wj \cdot wj} \]
    9. Simplified60.7%

      \[\leadsto \color{blue}{wj \cdot wj} \]

    if -1 < wj

    1. Initial program 81.1%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg81.1%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub81.1%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg81.1%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative81.1%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in81.1%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg81.1%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg81.1%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub81.1%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in81.1%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/81.1%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified81.7%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in wj around 0 87.6%

      \[\leadsto \color{blue}{-2 \cdot \left(wj \cdot x\right) + x} \]
  3. Recombined 3 regimes into one program.
  4. Final simplification72.9%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -2.9 \cdot 10^{+266}:\\ \;\;\;\;wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\ \mathbf{elif}\;wj \leq -5.1 \cdot 10^{+191}:\\ \;\;\;\;wj \cdot wj\\ \mathbf{elif}\;wj \leq -1:\\ \;\;\;\;wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\ \mathbf{else}:\\ \;\;\;\;x + -2 \cdot \left(wj \cdot x\right)\\ \end{array} \]

Alternative 11: 67.6% accurate, 20.6× speedup?

\[\begin{array}{l} \\ \begin{array}{l} t_0 := wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\ \mathbf{if}\;wj \leq -5.2 \cdot 10^{+265}:\\ \;\;\;\;t_0\\ \mathbf{elif}\;wj \leq -5.2 \cdot 10^{+191}:\\ \;\;\;\;wj \cdot wj\\ \mathbf{elif}\;wj \leq -1:\\ \;\;\;\;t_0\\ \mathbf{else}:\\ \;\;\;\;x \cdot \frac{1 - wj}{wj + 1}\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (let* ((t_0 (+ wj (/ (* x (- 1.0 wj)) wj))))
   (if (<= wj -5.2e+265)
     t_0
     (if (<= wj -5.2e+191)
       (* wj wj)
       (if (<= wj -1.0) t_0 (* x (/ (- 1.0 wj) (+ wj 1.0))))))))
double code(double wj, double x) {
	double t_0 = wj + ((x * (1.0 - wj)) / wj);
	double tmp;
	if (wj <= -5.2e+265) {
		tmp = t_0;
	} else if (wj <= -5.2e+191) {
		tmp = wj * wj;
	} else if (wj <= -1.0) {
		tmp = t_0;
	} else {
		tmp = x * ((1.0 - wj) / (wj + 1.0));
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: t_0
    real(8) :: tmp
    t_0 = wj + ((x * (1.0d0 - wj)) / wj)
    if (wj <= (-5.2d+265)) then
        tmp = t_0
    else if (wj <= (-5.2d+191)) then
        tmp = wj * wj
    else if (wj <= (-1.0d0)) then
        tmp = t_0
    else
        tmp = x * ((1.0d0 - wj) / (wj + 1.0d0))
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double t_0 = wj + ((x * (1.0 - wj)) / wj);
	double tmp;
	if (wj <= -5.2e+265) {
		tmp = t_0;
	} else if (wj <= -5.2e+191) {
		tmp = wj * wj;
	} else if (wj <= -1.0) {
		tmp = t_0;
	} else {
		tmp = x * ((1.0 - wj) / (wj + 1.0));
	}
	return tmp;
}
def code(wj, x):
	t_0 = wj + ((x * (1.0 - wj)) / wj)
	tmp = 0
	if wj <= -5.2e+265:
		tmp = t_0
	elif wj <= -5.2e+191:
		tmp = wj * wj
	elif wj <= -1.0:
		tmp = t_0
	else:
		tmp = x * ((1.0 - wj) / (wj + 1.0))
	return tmp
function code(wj, x)
	t_0 = Float64(wj + Float64(Float64(x * Float64(1.0 - wj)) / wj))
	tmp = 0.0
	if (wj <= -5.2e+265)
		tmp = t_0;
	elseif (wj <= -5.2e+191)
		tmp = Float64(wj * wj);
	elseif (wj <= -1.0)
		tmp = t_0;
	else
		tmp = Float64(x * Float64(Float64(1.0 - wj) / Float64(wj + 1.0)));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	t_0 = wj + ((x * (1.0 - wj)) / wj);
	tmp = 0.0;
	if (wj <= -5.2e+265)
		tmp = t_0;
	elseif (wj <= -5.2e+191)
		tmp = wj * wj;
	elseif (wj <= -1.0)
		tmp = t_0;
	else
		tmp = x * ((1.0 - wj) / (wj + 1.0));
	end
	tmp_2 = tmp;
end
code[wj_, x_] := Block[{t$95$0 = N[(wj + N[(N[(x * N[(1.0 - wj), $MachinePrecision]), $MachinePrecision] / wj), $MachinePrecision]), $MachinePrecision]}, If[LessEqual[wj, -5.2e+265], t$95$0, If[LessEqual[wj, -5.2e+191], N[(wj * wj), $MachinePrecision], If[LessEqual[wj, -1.0], t$95$0, N[(x * N[(N[(1.0 - wj), $MachinePrecision] / N[(wj + 1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]]]]
\begin{array}{l}

\\
\begin{array}{l}
t_0 := wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\
\mathbf{if}\;wj \leq -5.2 \cdot 10^{+265}:\\
\;\;\;\;t_0\\

\mathbf{elif}\;wj \leq -5.2 \cdot 10^{+191}:\\
\;\;\;\;wj \cdot wj\\

\mathbf{elif}\;wj \leq -1:\\
\;\;\;\;t_0\\

\mathbf{else}:\\
\;\;\;\;x \cdot \frac{1 - wj}{wj + 1}\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if wj < -5.2000000000000003e265 or -5.2000000000000001e191 < wj < -1

    1. Initial program 1.7%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg1.7%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub1.7%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg1.7%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative1.7%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in1.7%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg1.7%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg1.7%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub1.7%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in wj around 0 37.7%

      \[\leadsto wj + \frac{\color{blue}{\left(-1 \cdot \left(wj \cdot x\right) + x\right)} - wj}{wj + 1} \]
    5. Taylor expanded in x around inf 36.5%

      \[\leadsto wj + \color{blue}{\frac{\left(1 + -1 \cdot wj\right) \cdot x}{1 + wj}} \]
    6. Step-by-step derivation
      1. associate-/l*5.0%

        \[\leadsto wj + \color{blue}{\frac{1 + -1 \cdot wj}{\frac{1 + wj}{x}}} \]
      2. mul-1-neg5.0%

        \[\leadsto wj + \frac{1 + \color{blue}{\left(-wj\right)}}{\frac{1 + wj}{x}} \]
    7. Simplified5.0%

      \[\leadsto wj + \color{blue}{\frac{1 + \left(-wj\right)}{\frac{1 + wj}{x}}} \]
    8. Taylor expanded in wj around inf 5.0%

      \[\leadsto wj + \frac{1 + \left(-wj\right)}{\color{blue}{\frac{wj}{x}}} \]
    9. Taylor expanded in x around 0 36.5%

      \[\leadsto wj + \color{blue}{\frac{\left(1 - wj\right) \cdot x}{wj}} \]

    if -5.2000000000000003e265 < wj < -5.2000000000000001e191

    1. Initial program 0.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg0.0%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in0.0%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg0.0%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg0.0%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub0.0%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around 0 2.5%

      \[\leadsto \color{blue}{wj - \frac{wj}{1 + wj}} \]
    5. Step-by-step derivation
      1. +-commutative2.5%

        \[\leadsto wj - \frac{wj}{\color{blue}{wj + 1}} \]
    6. Simplified2.5%

      \[\leadsto \color{blue}{wj - \frac{wj}{wj + 1}} \]
    7. Taylor expanded in wj around 0 60.7%

      \[\leadsto \color{blue}{{wj}^{2}} \]
    8. Step-by-step derivation
      1. unpow260.7%

        \[\leadsto \color{blue}{wj \cdot wj} \]
    9. Simplified60.7%

      \[\leadsto \color{blue}{wj \cdot wj} \]

    if -1 < wj

    1. Initial program 81.1%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg81.1%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub81.1%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg81.1%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative81.1%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in81.1%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg81.1%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg81.1%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub81.1%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in81.1%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/81.1%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified81.7%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in wj around 0 80.1%

      \[\leadsto wj + \frac{\color{blue}{\left(-1 \cdot \left(wj \cdot x\right) + x\right)} - wj}{wj + 1} \]
    5. Taylor expanded in x around -inf 87.6%

      \[\leadsto \color{blue}{-1 \cdot \left(\left(\frac{wj}{1 + wj} - \frac{1}{1 + wj}\right) \cdot x\right)} \]
    6. Step-by-step derivation
      1. mul-1-neg87.6%

        \[\leadsto \color{blue}{-\left(\frac{wj}{1 + wj} - \frac{1}{1 + wj}\right) \cdot x} \]
      2. *-commutative87.6%

        \[\leadsto -\color{blue}{x \cdot \left(\frac{wj}{1 + wj} - \frac{1}{1 + wj}\right)} \]
      3. distribute-lft-neg-in87.6%

        \[\leadsto \color{blue}{\left(-x\right) \cdot \left(\frac{wj}{1 + wj} - \frac{1}{1 + wj}\right)} \]
      4. sub-neg87.6%

        \[\leadsto \left(-x\right) \cdot \color{blue}{\left(\frac{wj}{1 + wj} + \left(-\frac{1}{1 + wj}\right)\right)} \]
      5. *-rgt-identity87.6%

        \[\leadsto \left(-x\right) \cdot \left(\frac{\color{blue}{wj \cdot 1}}{1 + wj} + \left(-\frac{1}{1 + wj}\right)\right) \]
      6. associate-*r/87.6%

        \[\leadsto \left(-x\right) \cdot \left(\color{blue}{wj \cdot \frac{1}{1 + wj}} + \left(-\frac{1}{1 + wj}\right)\right) \]
      7. neg-mul-187.6%

        \[\leadsto \left(-x\right) \cdot \left(wj \cdot \frac{1}{1 + wj} + \color{blue}{-1 \cdot \frac{1}{1 + wj}}\right) \]
      8. distribute-rgt-out87.6%

        \[\leadsto \left(-x\right) \cdot \color{blue}{\left(\frac{1}{1 + wj} \cdot \left(wj + -1\right)\right)} \]
    7. Simplified87.6%

      \[\leadsto \color{blue}{\left(-x\right) \cdot \left(\frac{1}{1 + wj} \cdot \left(wj + -1\right)\right)} \]
    8. Step-by-step derivation
      1. distribute-lft-in87.6%

        \[\leadsto \left(-x\right) \cdot \color{blue}{\left(\frac{1}{1 + wj} \cdot wj + \frac{1}{1 + wj} \cdot -1\right)} \]
      2. +-commutative87.6%

        \[\leadsto \left(-x\right) \cdot \left(\frac{1}{\color{blue}{wj + 1}} \cdot wj + \frac{1}{1 + wj} \cdot -1\right) \]
      3. +-commutative87.6%

        \[\leadsto \left(-x\right) \cdot \left(\frac{1}{wj + 1} \cdot wj + \frac{1}{\color{blue}{wj + 1}} \cdot -1\right) \]
    9. Applied egg-rr87.6%

      \[\leadsto \left(-x\right) \cdot \color{blue}{\left(\frac{1}{wj + 1} \cdot wj + \frac{1}{wj + 1} \cdot -1\right)} \]
    10. Step-by-step derivation
      1. distribute-lft-out87.6%

        \[\leadsto \left(-x\right) \cdot \color{blue}{\left(\frac{1}{wj + 1} \cdot \left(wj + -1\right)\right)} \]
      2. associate-*l/87.6%

        \[\leadsto \left(-x\right) \cdot \color{blue}{\frac{1 \cdot \left(wj + -1\right)}{wj + 1}} \]
      3. *-lft-identity87.6%

        \[\leadsto \left(-x\right) \cdot \frac{\color{blue}{wj + -1}}{wj + 1} \]
    11. Simplified87.6%

      \[\leadsto \left(-x\right) \cdot \color{blue}{\frac{wj + -1}{wj + 1}} \]
  3. Recombined 3 regimes into one program.
  4. Final simplification72.9%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -5.2 \cdot 10^{+265}:\\ \;\;\;\;wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\ \mathbf{elif}\;wj \leq -5.2 \cdot 10^{+191}:\\ \;\;\;\;wj \cdot wj\\ \mathbf{elif}\;wj \leq -1:\\ \;\;\;\;wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\ \mathbf{else}:\\ \;\;\;\;x \cdot \frac{1 - wj}{wj + 1}\\ \end{array} \]

Alternative 12: 78.4% accurate, 20.7× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq -7.6 \cdot 10^{+153} \lor \neg \left(x \leq 5.2 \cdot 10^{-21}\right):\\ \;\;\;\;\frac{x - wj \cdot x}{wj + 1}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (if (or (<= x -7.6e+153) (not (<= x 5.2e-21)))
   (/ (- x (* wj x)) (+ wj 1.0))
   (+ (+ x (* -2.0 (* wj x))) (* wj wj))))
double code(double wj, double x) {
	double tmp;
	if ((x <= -7.6e+153) || !(x <= 5.2e-21)) {
		tmp = (x - (wj * x)) / (wj + 1.0);
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if ((x <= (-7.6d+153)) .or. (.not. (x <= 5.2d-21))) then
        tmp = (x - (wj * x)) / (wj + 1.0d0)
    else
        tmp = (x + ((-2.0d0) * (wj * x))) + (wj * wj)
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if ((x <= -7.6e+153) || !(x <= 5.2e-21)) {
		tmp = (x - (wj * x)) / (wj + 1.0);
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if (x <= -7.6e+153) or not (x <= 5.2e-21):
		tmp = (x - (wj * x)) / (wj + 1.0)
	else:
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj)
	return tmp
function code(wj, x)
	tmp = 0.0
	if ((x <= -7.6e+153) || !(x <= 5.2e-21))
		tmp = Float64(Float64(x - Float64(wj * x)) / Float64(wj + 1.0));
	else
		tmp = Float64(Float64(x + Float64(-2.0 * Float64(wj * x))) + Float64(wj * wj));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if ((x <= -7.6e+153) || ~((x <= 5.2e-21)))
		tmp = (x - (wj * x)) / (wj + 1.0);
	else
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[Or[LessEqual[x, -7.6e+153], N[Not[LessEqual[x, 5.2e-21]], $MachinePrecision]], N[(N[(x - N[(wj * x), $MachinePrecision]), $MachinePrecision] / N[(wj + 1.0), $MachinePrecision]), $MachinePrecision], N[(N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(wj * wj), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq -7.6 \cdot 10^{+153} \lor \neg \left(x \leq 5.2 \cdot 10^{-21}\right):\\
\;\;\;\;\frac{x - wj \cdot x}{wj + 1}\\

\mathbf{else}:\\
\;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if x < -7.59999999999999933e153 or 5.20000000000000035e-21 < x

    1. Initial program 61.8%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg61.8%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub61.8%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg61.8%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative61.8%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in61.8%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg61.8%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg61.8%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub61.8%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in99.1%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/99.1%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around inf 99.1%

      \[\leadsto \color{blue}{\frac{x}{\left(1 + wj\right) \cdot e^{wj}}} \]
    5. Step-by-step derivation
      1. *-un-lft-identity99.1%

        \[\leadsto \frac{\color{blue}{1 \cdot x}}{\left(1 + wj\right) \cdot e^{wj}} \]
      2. +-commutative99.1%

        \[\leadsto \frac{1 \cdot x}{\color{blue}{\left(wj + 1\right)} \cdot e^{wj}} \]
      3. times-frac99.1%

        \[\leadsto \color{blue}{\frac{1}{wj + 1} \cdot \frac{x}{e^{wj}}} \]
      4. +-commutative99.1%

        \[\leadsto \frac{1}{\color{blue}{1 + wj}} \cdot \frac{x}{e^{wj}} \]
    6. Applied egg-rr99.1%

      \[\leadsto \color{blue}{\frac{1}{1 + wj} \cdot \frac{x}{e^{wj}}} \]
    7. Step-by-step derivation
      1. associate-*l/99.1%

        \[\leadsto \color{blue}{\frac{1 \cdot \frac{x}{e^{wj}}}{1 + wj}} \]
      2. *-lft-identity99.1%

        \[\leadsto \frac{\color{blue}{\frac{x}{e^{wj}}}}{1 + wj} \]
    8. Simplified99.1%

      \[\leadsto \color{blue}{\frac{\frac{x}{e^{wj}}}{1 + wj}} \]
    9. Taylor expanded in wj around 0 84.5%

      \[\leadsto \frac{\color{blue}{-1 \cdot \left(wj \cdot x\right) + x}}{1 + wj} \]

    if -7.59999999999999933e153 < x < 5.20000000000000035e-21

    1. Initial program 48.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. distribute-rgt1-in78.9%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    3. Simplified78.9%

      \[\leadsto \color{blue}{wj - \frac{wj \cdot e^{wj} - x}{\left(wj + 1\right) \cdot e^{wj}}} \]
    4. Taylor expanded in wj around 0 48.1%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + 2 \cdot wj}} \]
    5. Step-by-step derivation
      1. *-commutative48.1%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{1 + \color{blue}{wj \cdot 2}} \]
    6. Simplified48.1%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + wj \cdot 2}} \]
    7. Taylor expanded in wj around 0 76.6%

      \[\leadsto \color{blue}{\left(1 - -4 \cdot x\right) \cdot {wj}^{2} + \left(-2 \cdot \left(wj \cdot x\right) + x\right)} \]
    8. Taylor expanded in x around 0 80.8%

      \[\leadsto \color{blue}{{wj}^{2}} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    9. Step-by-step derivation
      1. unpow280.8%

        \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    10. Simplified80.8%

      \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
  3. Recombined 2 regimes into one program.
  4. Final simplification82.4%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -7.6 \cdot 10^{+153} \lor \neg \left(x \leq 5.2 \cdot 10^{-21}\right):\\ \;\;\;\;\frac{x - wj \cdot x}{wj + 1}\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \]

Alternative 13: 77.2% accurate, 20.7× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;wj \leq -2.25 \cdot 10^{+266}:\\ \;\;\;\;wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\ \mathbf{elif}\;wj \leq -215:\\ \;\;\;\;wj - \left(wj + \left(wj \cdot wj\right) \cdot \left(wj + -1\right)\right)\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (if (<= wj -2.25e+266)
   (+ wj (/ (* x (- 1.0 wj)) wj))
   (if (<= wj -215.0)
     (- wj (+ wj (* (* wj wj) (+ wj -1.0))))
     (+ (+ x (* -2.0 (* wj x))) (* wj wj)))))
double code(double wj, double x) {
	double tmp;
	if (wj <= -2.25e+266) {
		tmp = wj + ((x * (1.0 - wj)) / wj);
	} else if (wj <= -215.0) {
		tmp = wj - (wj + ((wj * wj) * (wj + -1.0)));
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if (wj <= (-2.25d+266)) then
        tmp = wj + ((x * (1.0d0 - wj)) / wj)
    else if (wj <= (-215.0d0)) then
        tmp = wj - (wj + ((wj * wj) * (wj + (-1.0d0))))
    else
        tmp = (x + ((-2.0d0) * (wj * x))) + (wj * wj)
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if (wj <= -2.25e+266) {
		tmp = wj + ((x * (1.0 - wj)) / wj);
	} else if (wj <= -215.0) {
		tmp = wj - (wj + ((wj * wj) * (wj + -1.0)));
	} else {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if wj <= -2.25e+266:
		tmp = wj + ((x * (1.0 - wj)) / wj)
	elif wj <= -215.0:
		tmp = wj - (wj + ((wj * wj) * (wj + -1.0)))
	else:
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj)
	return tmp
function code(wj, x)
	tmp = 0.0
	if (wj <= -2.25e+266)
		tmp = Float64(wj + Float64(Float64(x * Float64(1.0 - wj)) / wj));
	elseif (wj <= -215.0)
		tmp = Float64(wj - Float64(wj + Float64(Float64(wj * wj) * Float64(wj + -1.0))));
	else
		tmp = Float64(Float64(x + Float64(-2.0 * Float64(wj * x))) + Float64(wj * wj));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if (wj <= -2.25e+266)
		tmp = wj + ((x * (1.0 - wj)) / wj);
	elseif (wj <= -215.0)
		tmp = wj - (wj + ((wj * wj) * (wj + -1.0)));
	else
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[LessEqual[wj, -2.25e+266], N[(wj + N[(N[(x * N[(1.0 - wj), $MachinePrecision]), $MachinePrecision] / wj), $MachinePrecision]), $MachinePrecision], If[LessEqual[wj, -215.0], N[(wj - N[(wj + N[(N[(wj * wj), $MachinePrecision] * N[(wj + -1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(wj * wj), $MachinePrecision]), $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;wj \leq -2.25 \cdot 10^{+266}:\\
\;\;\;\;wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\

\mathbf{elif}\;wj \leq -215:\\
\;\;\;\;wj - \left(wj + \left(wj \cdot wj\right) \cdot \left(wj + -1\right)\right)\\

\mathbf{else}:\\
\;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if wj < -2.25e266

    1. Initial program 0.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg0.0%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in0.0%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg0.0%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg0.0%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub0.0%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in wj around 0 73.4%

      \[\leadsto wj + \frac{\color{blue}{\left(-1 \cdot \left(wj \cdot x\right) + x\right)} - wj}{wj + 1} \]
    5. Taylor expanded in x around inf 73.4%

      \[\leadsto wj + \color{blue}{\frac{\left(1 + -1 \cdot wj\right) \cdot x}{1 + wj}} \]
    6. Step-by-step derivation
      1. associate-/l*5.7%

        \[\leadsto wj + \color{blue}{\frac{1 + -1 \cdot wj}{\frac{1 + wj}{x}}} \]
      2. mul-1-neg5.7%

        \[\leadsto wj + \frac{1 + \color{blue}{\left(-wj\right)}}{\frac{1 + wj}{x}} \]
    7. Simplified5.7%

      \[\leadsto wj + \color{blue}{\frac{1 + \left(-wj\right)}{\frac{1 + wj}{x}}} \]
    8. Taylor expanded in wj around inf 5.7%

      \[\leadsto wj + \frac{1 + \left(-wj\right)}{\color{blue}{\frac{wj}{x}}} \]
    9. Taylor expanded in x around 0 73.4%

      \[\leadsto wj + \color{blue}{\frac{\left(1 - wj\right) \cdot x}{wj}} \]

    if -2.25e266 < wj < -215

    1. Initial program 0.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg0.0%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in0.0%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg0.0%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg0.0%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub0.0%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around 0 2.6%

      \[\leadsto \color{blue}{wj - \frac{wj}{1 + wj}} \]
    5. Step-by-step derivation
      1. +-commutative2.6%

        \[\leadsto wj - \frac{wj}{\color{blue}{wj + 1}} \]
    6. Simplified2.6%

      \[\leadsto \color{blue}{wj - \frac{wj}{wj + 1}} \]
    7. Taylor expanded in wj around 0 36.7%

      \[\leadsto wj - \color{blue}{\left(-1 \cdot {wj}^{2} + \left({wj}^{3} + wj\right)\right)} \]
    8. Step-by-step derivation
      1. associate-+r+36.7%

        \[\leadsto wj - \color{blue}{\left(\left(-1 \cdot {wj}^{2} + {wj}^{3}\right) + wj\right)} \]
      2. +-commutative36.7%

        \[\leadsto wj - \color{blue}{\left(wj + \left(-1 \cdot {wj}^{2} + {wj}^{3}\right)\right)} \]
      3. cube-mult36.7%

        \[\leadsto wj - \left(wj + \left(-1 \cdot {wj}^{2} + \color{blue}{wj \cdot \left(wj \cdot wj\right)}\right)\right) \]
      4. unpow236.7%

        \[\leadsto wj - \left(wj + \left(-1 \cdot {wj}^{2} + wj \cdot \color{blue}{{wj}^{2}}\right)\right) \]
      5. distribute-rgt-out36.7%

        \[\leadsto wj - \left(wj + \color{blue}{{wj}^{2} \cdot \left(-1 + wj\right)}\right) \]
      6. unpow236.7%

        \[\leadsto wj - \left(wj + \color{blue}{\left(wj \cdot wj\right)} \cdot \left(-1 + wj\right)\right) \]
      7. +-commutative36.7%

        \[\leadsto wj - \left(wj + \left(wj \cdot wj\right) \cdot \color{blue}{\left(wj + -1\right)}\right) \]
    9. Simplified36.7%

      \[\leadsto wj - \color{blue}{\left(wj + \left(wj \cdot wj\right) \cdot \left(wj + -1\right)\right)} \]

    if -215 < wj

    1. Initial program 81.3%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. distribute-rgt1-in81.3%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    3. Simplified81.3%

      \[\leadsto \color{blue}{wj - \frac{wj \cdot e^{wj} - x}{\left(wj + 1\right) \cdot e^{wj}}} \]
    4. Taylor expanded in wj around 0 79.7%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + 2 \cdot wj}} \]
    5. Step-by-step derivation
      1. *-commutative79.7%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{1 + \color{blue}{wj \cdot 2}} \]
    6. Simplified79.7%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + wj \cdot 2}} \]
    7. Taylor expanded in wj around 0 97.6%

      \[\leadsto \color{blue}{\left(1 - -4 \cdot x\right) \cdot {wj}^{2} + \left(-2 \cdot \left(wj \cdot x\right) + x\right)} \]
    8. Taylor expanded in x around 0 97.5%

      \[\leadsto \color{blue}{{wj}^{2}} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    9. Step-by-step derivation
      1. unpow297.5%

        \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    10. Simplified97.5%

      \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
  3. Recombined 3 regimes into one program.
  4. Final simplification78.7%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -2.25 \cdot 10^{+266}:\\ \;\;\;\;wj + \frac{x \cdot \left(1 - wj\right)}{wj}\\ \mathbf{elif}\;wj \leq -215:\\ \;\;\;\;wj - \left(wj + \left(wj \cdot wj\right) \cdot \left(wj + -1\right)\right)\\ \mathbf{else}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \end{array} \]

Alternative 14: 78.4% accurate, 20.7× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq -7.6 \cdot 10^{+153}:\\ \;\;\;\;\frac{x - wj \cdot x}{wj + 1}\\ \mathbf{elif}\;x \leq 1.85 \cdot 10^{-18}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \mathbf{else}:\\ \;\;\;\;wj + \frac{x \cdot \left(1 - wj\right)}{wj + 1}\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (if (<= x -7.6e+153)
   (/ (- x (* wj x)) (+ wj 1.0))
   (if (<= x 1.85e-18)
     (+ (+ x (* -2.0 (* wj x))) (* wj wj))
     (+ wj (/ (* x (- 1.0 wj)) (+ wj 1.0))))))
double code(double wj, double x) {
	double tmp;
	if (x <= -7.6e+153) {
		tmp = (x - (wj * x)) / (wj + 1.0);
	} else if (x <= 1.85e-18) {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	} else {
		tmp = wj + ((x * (1.0 - wj)) / (wj + 1.0));
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if (x <= (-7.6d+153)) then
        tmp = (x - (wj * x)) / (wj + 1.0d0)
    else if (x <= 1.85d-18) then
        tmp = (x + ((-2.0d0) * (wj * x))) + (wj * wj)
    else
        tmp = wj + ((x * (1.0d0 - wj)) / (wj + 1.0d0))
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if (x <= -7.6e+153) {
		tmp = (x - (wj * x)) / (wj + 1.0);
	} else if (x <= 1.85e-18) {
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	} else {
		tmp = wj + ((x * (1.0 - wj)) / (wj + 1.0));
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if x <= -7.6e+153:
		tmp = (x - (wj * x)) / (wj + 1.0)
	elif x <= 1.85e-18:
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj)
	else:
		tmp = wj + ((x * (1.0 - wj)) / (wj + 1.0))
	return tmp
function code(wj, x)
	tmp = 0.0
	if (x <= -7.6e+153)
		tmp = Float64(Float64(x - Float64(wj * x)) / Float64(wj + 1.0));
	elseif (x <= 1.85e-18)
		tmp = Float64(Float64(x + Float64(-2.0 * Float64(wj * x))) + Float64(wj * wj));
	else
		tmp = Float64(wj + Float64(Float64(x * Float64(1.0 - wj)) / Float64(wj + 1.0)));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if (x <= -7.6e+153)
		tmp = (x - (wj * x)) / (wj + 1.0);
	elseif (x <= 1.85e-18)
		tmp = (x + (-2.0 * (wj * x))) + (wj * wj);
	else
		tmp = wj + ((x * (1.0 - wj)) / (wj + 1.0));
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[LessEqual[x, -7.6e+153], N[(N[(x - N[(wj * x), $MachinePrecision]), $MachinePrecision] / N[(wj + 1.0), $MachinePrecision]), $MachinePrecision], If[LessEqual[x, 1.85e-18], N[(N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(wj * wj), $MachinePrecision]), $MachinePrecision], N[(wj + N[(N[(x * N[(1.0 - wj), $MachinePrecision]), $MachinePrecision] / N[(wj + 1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq -7.6 \cdot 10^{+153}:\\
\;\;\;\;\frac{x - wj \cdot x}{wj + 1}\\

\mathbf{elif}\;x \leq 1.85 \cdot 10^{-18}:\\
\;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\

\mathbf{else}:\\
\;\;\;\;wj + \frac{x \cdot \left(1 - wj\right)}{wj + 1}\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if x < -7.59999999999999933e153

    1. Initial program 62.5%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg62.5%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub62.5%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg62.5%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative62.5%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in62.5%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg62.5%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg62.5%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub62.5%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in96.8%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/96.9%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around inf 96.9%

      \[\leadsto \color{blue}{\frac{x}{\left(1 + wj\right) \cdot e^{wj}}} \]
    5. Step-by-step derivation
      1. *-un-lft-identity96.9%

        \[\leadsto \frac{\color{blue}{1 \cdot x}}{\left(1 + wj\right) \cdot e^{wj}} \]
      2. +-commutative96.9%

        \[\leadsto \frac{1 \cdot x}{\color{blue}{\left(wj + 1\right)} \cdot e^{wj}} \]
      3. times-frac96.9%

        \[\leadsto \color{blue}{\frac{1}{wj + 1} \cdot \frac{x}{e^{wj}}} \]
      4. +-commutative96.9%

        \[\leadsto \frac{1}{\color{blue}{1 + wj}} \cdot \frac{x}{e^{wj}} \]
    6. Applied egg-rr96.9%

      \[\leadsto \color{blue}{\frac{1}{1 + wj} \cdot \frac{x}{e^{wj}}} \]
    7. Step-by-step derivation
      1. associate-*l/97.0%

        \[\leadsto \color{blue}{\frac{1 \cdot \frac{x}{e^{wj}}}{1 + wj}} \]
      2. *-lft-identity97.0%

        \[\leadsto \frac{\color{blue}{\frac{x}{e^{wj}}}}{1 + wj} \]
    8. Simplified97.0%

      \[\leadsto \color{blue}{\frac{\frac{x}{e^{wj}}}{1 + wj}} \]
    9. Taylor expanded in wj around 0 90.9%

      \[\leadsto \frac{\color{blue}{-1 \cdot \left(wj \cdot x\right) + x}}{1 + wj} \]

    if -7.59999999999999933e153 < x < 1.8500000000000002e-18

    1. Initial program 48.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. distribute-rgt1-in78.9%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    3. Simplified78.9%

      \[\leadsto \color{blue}{wj - \frac{wj \cdot e^{wj} - x}{\left(wj + 1\right) \cdot e^{wj}}} \]
    4. Taylor expanded in wj around 0 48.1%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + 2 \cdot wj}} \]
    5. Step-by-step derivation
      1. *-commutative48.1%

        \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{1 + \color{blue}{wj \cdot 2}} \]
    6. Simplified48.1%

      \[\leadsto wj - \frac{wj \cdot e^{wj} - x}{\color{blue}{1 + wj \cdot 2}} \]
    7. Taylor expanded in wj around 0 76.6%

      \[\leadsto \color{blue}{\left(1 - -4 \cdot x\right) \cdot {wj}^{2} + \left(-2 \cdot \left(wj \cdot x\right) + x\right)} \]
    8. Taylor expanded in x around 0 80.8%

      \[\leadsto \color{blue}{{wj}^{2}} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    9. Step-by-step derivation
      1. unpow280.8%

        \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]
    10. Simplified80.8%

      \[\leadsto \color{blue}{wj \cdot wj} + \left(-2 \cdot \left(wj \cdot x\right) + x\right) \]

    if 1.8500000000000002e-18 < x

    1. Initial program 61.5%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg61.5%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub61.5%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg61.5%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative61.5%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in61.5%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg61.5%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg61.5%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub61.5%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in wj around 0 82.0%

      \[\leadsto wj + \frac{\color{blue}{\left(-1 \cdot \left(wj \cdot x\right) + x\right)} - wj}{wj + 1} \]
    5. Taylor expanded in x around -inf 82.0%

      \[\leadsto wj + \color{blue}{-1 \cdot \frac{\left(wj - 1\right) \cdot x}{1 + wj}} \]
    6. Step-by-step derivation
      1. associate-*r/82.0%

        \[\leadsto wj + \color{blue}{\frac{-1 \cdot \left(\left(wj - 1\right) \cdot x\right)}{1 + wj}} \]
      2. *-commutative82.0%

        \[\leadsto wj + \frac{-1 \cdot \color{blue}{\left(x \cdot \left(wj - 1\right)\right)}}{1 + wj} \]
      3. associate-*r*82.0%

        \[\leadsto wj + \frac{\color{blue}{\left(-1 \cdot x\right) \cdot \left(wj - 1\right)}}{1 + wj} \]
      4. mul-1-neg82.0%

        \[\leadsto wj + \frac{\color{blue}{\left(-x\right)} \cdot \left(wj - 1\right)}{1 + wj} \]
      5. sub-neg82.0%

        \[\leadsto wj + \frac{\left(-x\right) \cdot \color{blue}{\left(wj + \left(-1\right)\right)}}{1 + wj} \]
      6. metadata-eval82.0%

        \[\leadsto wj + \frac{\left(-x\right) \cdot \left(wj + \color{blue}{-1}\right)}{1 + wj} \]
    7. Simplified82.0%

      \[\leadsto wj + \color{blue}{\frac{\left(-x\right) \cdot \left(wj + -1\right)}{1 + wj}} \]
  3. Recombined 3 regimes into one program.
  4. Final simplification82.4%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -7.6 \cdot 10^{+153}:\\ \;\;\;\;\frac{x - wj \cdot x}{wj + 1}\\ \mathbf{elif}\;x \leq 1.85 \cdot 10^{-18}:\\ \;\;\;\;\left(x + -2 \cdot \left(wj \cdot x\right)\right) + wj \cdot wj\\ \mathbf{else}:\\ \;\;\;\;wj + \frac{x \cdot \left(1 - wj\right)}{wj + 1}\\ \end{array} \]

Alternative 15: 66.8% accurate, 34.5× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;wj \leq -215:\\ \;\;\;\;wj \cdot wj\\ \mathbf{else}:\\ \;\;\;\;x + -2 \cdot \left(wj \cdot x\right)\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (if (<= wj -215.0) (* wj wj) (+ x (* -2.0 (* wj x)))))
double code(double wj, double x) {
	double tmp;
	if (wj <= -215.0) {
		tmp = wj * wj;
	} else {
		tmp = x + (-2.0 * (wj * x));
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if (wj <= (-215.0d0)) then
        tmp = wj * wj
    else
        tmp = x + ((-2.0d0) * (wj * x))
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if (wj <= -215.0) {
		tmp = wj * wj;
	} else {
		tmp = x + (-2.0 * (wj * x));
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if wj <= -215.0:
		tmp = wj * wj
	else:
		tmp = x + (-2.0 * (wj * x))
	return tmp
function code(wj, x)
	tmp = 0.0
	if (wj <= -215.0)
		tmp = Float64(wj * wj);
	else
		tmp = Float64(x + Float64(-2.0 * Float64(wj * x)));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if (wj <= -215.0)
		tmp = wj * wj;
	else
		tmp = x + (-2.0 * (wj * x));
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[LessEqual[wj, -215.0], N[(wj * wj), $MachinePrecision], N[(x + N[(-2.0 * N[(wj * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;wj \leq -215:\\
\;\;\;\;wj \cdot wj\\

\mathbf{else}:\\
\;\;\;\;x + -2 \cdot \left(wj \cdot x\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if wj < -215

    1. Initial program 0.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg0.0%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in0.0%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg0.0%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg0.0%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub0.0%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around 0 3.0%

      \[\leadsto \color{blue}{wj - \frac{wj}{1 + wj}} \]
    5. Step-by-step derivation
      1. +-commutative3.0%

        \[\leadsto wj - \frac{wj}{\color{blue}{wj + 1}} \]
    6. Simplified3.0%

      \[\leadsto \color{blue}{wj - \frac{wj}{wj + 1}} \]
    7. Taylor expanded in wj around 0 31.2%

      \[\leadsto \color{blue}{{wj}^{2}} \]
    8. Step-by-step derivation
      1. unpow231.2%

        \[\leadsto \color{blue}{wj \cdot wj} \]
    9. Simplified31.2%

      \[\leadsto \color{blue}{wj \cdot wj} \]

    if -215 < wj

    1. Initial program 81.3%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg81.3%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub81.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg81.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative81.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in81.3%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg81.3%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg81.3%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub81.3%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in81.3%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/81.2%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified81.8%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in wj around 0 87.1%

      \[\leadsto \color{blue}{-2 \cdot \left(wj \cdot x\right) + x} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification68.3%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -215:\\ \;\;\;\;wj \cdot wj\\ \mathbf{else}:\\ \;\;\;\;x + -2 \cdot \left(wj \cdot x\right)\\ \end{array} \]

Alternative 16: 66.9% accurate, 34.5× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;wj \leq -3 \cdot 10^{+98}:\\ \;\;\;\;wj \cdot wj\\ \mathbf{else}:\\ \;\;\;\;\frac{x}{1 + wj \cdot 2}\\ \end{array} \end{array} \]
(FPCore (wj x)
 :precision binary64
 (if (<= wj -3e+98) (* wj wj) (/ x (+ 1.0 (* wj 2.0)))))
double code(double wj, double x) {
	double tmp;
	if (wj <= -3e+98) {
		tmp = wj * wj;
	} else {
		tmp = x / (1.0 + (wj * 2.0));
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if (wj <= (-3d+98)) then
        tmp = wj * wj
    else
        tmp = x / (1.0d0 + (wj * 2.0d0))
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if (wj <= -3e+98) {
		tmp = wj * wj;
	} else {
		tmp = x / (1.0 + (wj * 2.0));
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if wj <= -3e+98:
		tmp = wj * wj
	else:
		tmp = x / (1.0 + (wj * 2.0))
	return tmp
function code(wj, x)
	tmp = 0.0
	if (wj <= -3e+98)
		tmp = Float64(wj * wj);
	else
		tmp = Float64(x / Float64(1.0 + Float64(wj * 2.0)));
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if (wj <= -3e+98)
		tmp = wj * wj;
	else
		tmp = x / (1.0 + (wj * 2.0));
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[LessEqual[wj, -3e+98], N[(wj * wj), $MachinePrecision], N[(x / N[(1.0 + N[(wj * 2.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;wj \leq -3 \cdot 10^{+98}:\\
\;\;\;\;wj \cdot wj\\

\mathbf{else}:\\
\;\;\;\;\frac{x}{1 + wj \cdot 2}\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if wj < -3.0000000000000001e98

    1. Initial program 0.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg0.0%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in0.0%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg0.0%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg0.0%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub0.0%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around 0 3.1%

      \[\leadsto \color{blue}{wj - \frac{wj}{1 + wj}} \]
    5. Step-by-step derivation
      1. +-commutative3.1%

        \[\leadsto wj - \frac{wj}{\color{blue}{wj + 1}} \]
    6. Simplified3.1%

      \[\leadsto \color{blue}{wj - \frac{wj}{wj + 1}} \]
    7. Taylor expanded in wj around 0 42.0%

      \[\leadsto \color{blue}{{wj}^{2}} \]
    8. Step-by-step derivation
      1. unpow242.0%

        \[\leadsto \color{blue}{wj \cdot wj} \]
    9. Simplified42.0%

      \[\leadsto \color{blue}{wj \cdot wj} \]

    if -3.0000000000000001e98 < wj

    1. Initial program 71.6%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg71.6%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub71.6%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg71.6%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative71.6%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in71.6%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg71.6%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg71.6%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub71.6%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in83.5%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/83.4%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified83.9%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around inf 89.6%

      \[\leadsto \color{blue}{\frac{x}{\left(1 + wj\right) \cdot e^{wj}}} \]
    5. Taylor expanded in wj around 0 77.1%

      \[\leadsto \frac{x}{\color{blue}{1 + 2 \cdot wj}} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification68.5%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -3 \cdot 10^{+98}:\\ \;\;\;\;wj \cdot wj\\ \mathbf{else}:\\ \;\;\;\;\frac{x}{1 + wj \cdot 2}\\ \end{array} \]

Alternative 17: 66.4% accurate, 61.8× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;wj \leq -215:\\ \;\;\;\;wj \cdot wj\\ \mathbf{else}:\\ \;\;\;\;x\\ \end{array} \end{array} \]
(FPCore (wj x) :precision binary64 (if (<= wj -215.0) (* wj wj) x))
double code(double wj, double x) {
	double tmp;
	if (wj <= -215.0) {
		tmp = wj * wj;
	} else {
		tmp = x;
	}
	return tmp;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    real(8) :: tmp
    if (wj <= (-215.0d0)) then
        tmp = wj * wj
    else
        tmp = x
    end if
    code = tmp
end function
public static double code(double wj, double x) {
	double tmp;
	if (wj <= -215.0) {
		tmp = wj * wj;
	} else {
		tmp = x;
	}
	return tmp;
}
def code(wj, x):
	tmp = 0
	if wj <= -215.0:
		tmp = wj * wj
	else:
		tmp = x
	return tmp
function code(wj, x)
	tmp = 0.0
	if (wj <= -215.0)
		tmp = Float64(wj * wj);
	else
		tmp = x;
	end
	return tmp
end
function tmp_2 = code(wj, x)
	tmp = 0.0;
	if (wj <= -215.0)
		tmp = wj * wj;
	else
		tmp = x;
	end
	tmp_2 = tmp;
end
code[wj_, x_] := If[LessEqual[wj, -215.0], N[(wj * wj), $MachinePrecision], x]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;wj \leq -215:\\
\;\;\;\;wj \cdot wj\\

\mathbf{else}:\\
\;\;\;\;x\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if wj < -215

    1. Initial program 0.0%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg0.0%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative0.0%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in0.0%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg0.0%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg0.0%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub0.0%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in100.0%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/100.0%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in x around 0 3.0%

      \[\leadsto \color{blue}{wj - \frac{wj}{1 + wj}} \]
    5. Step-by-step derivation
      1. +-commutative3.0%

        \[\leadsto wj - \frac{wj}{\color{blue}{wj + 1}} \]
    6. Simplified3.0%

      \[\leadsto \color{blue}{wj - \frac{wj}{wj + 1}} \]
    7. Taylor expanded in wj around 0 31.2%

      \[\leadsto \color{blue}{{wj}^{2}} \]
    8. Step-by-step derivation
      1. unpow231.2%

        \[\leadsto \color{blue}{wj \cdot wj} \]
    9. Simplified31.2%

      \[\leadsto \color{blue}{wj \cdot wj} \]

    if -215 < wj

    1. Initial program 81.3%

      \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
    2. Step-by-step derivation
      1. sub-neg81.3%

        \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      2. div-sub81.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      3. sub-neg81.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
      4. +-commutative81.3%

        \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
      5. distribute-neg-in81.3%

        \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
      6. remove-double-neg81.3%

        \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
      7. sub-neg81.3%

        \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
      8. div-sub81.3%

        \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
      9. distribute-rgt1-in81.3%

        \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
      10. associate-/l/81.2%

        \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
    3. Simplified81.8%

      \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
    4. Taylor expanded in wj around 0 86.7%

      \[\leadsto \color{blue}{x} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification68.1%

    \[\leadsto \begin{array}{l} \mathbf{if}\;wj \leq -215:\\ \;\;\;\;wj \cdot wj\\ \mathbf{else}:\\ \;\;\;\;x\\ \end{array} \]

Alternative 18: 3.6% accurate, 313.0× speedup?

\[\begin{array}{l} \\ wj \end{array} \]
(FPCore (wj x) :precision binary64 wj)
double code(double wj, double x) {
	return wj;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    code = wj
end function
public static double code(double wj, double x) {
	return wj;
}
def code(wj, x):
	return wj
function code(wj, x)
	return wj
end
function tmp = code(wj, x)
	tmp = wj;
end
code[wj_, x_] := wj
\begin{array}{l}

\\
wj
\end{array}
Derivation
  1. Initial program 54.0%

    \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
  2. Step-by-step derivation
    1. sub-neg54.0%

      \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
    2. div-sub54.0%

      \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
    3. sub-neg54.0%

      \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
    4. +-commutative54.0%

      \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
    5. distribute-neg-in54.0%

      \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
    6. remove-double-neg54.0%

      \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
    7. sub-neg54.0%

      \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
    8. div-sub54.0%

      \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
    9. distribute-rgt1-in87.5%

      \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    10. associate-/l/87.5%

      \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
  3. Simplified87.9%

    \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
  4. Taylor expanded in wj around inf 3.7%

    \[\leadsto \color{blue}{wj} \]
  5. Final simplification3.7%

    \[\leadsto wj \]

Alternative 19: 57.2% accurate, 313.0× speedup?

\[\begin{array}{l} \\ x \end{array} \]
(FPCore (wj x) :precision binary64 x)
double code(double wj, double x) {
	return x;
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    code = x
end function
public static double code(double wj, double x) {
	return x;
}
def code(wj, x):
	return x
function code(wj, x)
	return x
end
function tmp = code(wj, x)
	tmp = x;
end
code[wj_, x_] := x
\begin{array}{l}

\\
x
\end{array}
Derivation
  1. Initial program 54.0%

    \[wj - \frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}} \]
  2. Step-by-step derivation
    1. sub-neg54.0%

      \[\leadsto \color{blue}{wj + \left(-\frac{wj \cdot e^{wj} - x}{e^{wj} + wj \cdot e^{wj}}\right)} \]
    2. div-sub54.0%

      \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
    3. sub-neg54.0%

      \[\leadsto wj + \left(-\color{blue}{\left(\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}} + \left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right)}\right) \]
    4. +-commutative54.0%

      \[\leadsto wj + \left(-\color{blue}{\left(\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right) + \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)}\right) \]
    5. distribute-neg-in54.0%

      \[\leadsto wj + \color{blue}{\left(\left(-\left(-\frac{x}{e^{wj} + wj \cdot e^{wj}}\right)\right) + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right)} \]
    6. remove-double-neg54.0%

      \[\leadsto wj + \left(\color{blue}{\frac{x}{e^{wj} + wj \cdot e^{wj}}} + \left(-\frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)\right) \]
    7. sub-neg54.0%

      \[\leadsto wj + \color{blue}{\left(\frac{x}{e^{wj} + wj \cdot e^{wj}} - \frac{wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}\right)} \]
    8. div-sub54.0%

      \[\leadsto wj + \color{blue}{\frac{x - wj \cdot e^{wj}}{e^{wj} + wj \cdot e^{wj}}} \]
    9. distribute-rgt1-in87.5%

      \[\leadsto wj + \frac{x - wj \cdot e^{wj}}{\color{blue}{\left(wj + 1\right) \cdot e^{wj}}} \]
    10. associate-/l/87.5%

      \[\leadsto wj + \color{blue}{\frac{\frac{x - wj \cdot e^{wj}}{e^{wj}}}{wj + 1}} \]
  3. Simplified87.9%

    \[\leadsto \color{blue}{wj + \frac{\frac{x}{e^{wj}} - wj}{wj + 1}} \]
  4. Taylor expanded in wj around 0 57.8%

    \[\leadsto \color{blue}{x} \]
  5. Final simplification57.8%

    \[\leadsto x \]

Developer target: 52.6% accurate, 1.5× speedup?

\[\begin{array}{l} \\ wj - \left(\frac{wj}{wj + 1} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right) \end{array} \]
(FPCore (wj x)
 :precision binary64
 (- wj (- (/ wj (+ wj 1.0)) (/ x (+ (exp wj) (* wj (exp wj)))))))
double code(double wj, double x) {
	return wj - ((wj / (wj + 1.0)) - (x / (exp(wj) + (wj * exp(wj)))));
}
real(8) function code(wj, x)
    real(8), intent (in) :: wj
    real(8), intent (in) :: x
    code = wj - ((wj / (wj + 1.0d0)) - (x / (exp(wj) + (wj * exp(wj)))))
end function
public static double code(double wj, double x) {
	return wj - ((wj / (wj + 1.0)) - (x / (Math.exp(wj) + (wj * Math.exp(wj)))));
}
def code(wj, x):
	return wj - ((wj / (wj + 1.0)) - (x / (math.exp(wj) + (wj * math.exp(wj)))))
function code(wj, x)
	return Float64(wj - Float64(Float64(wj / Float64(wj + 1.0)) - Float64(x / Float64(exp(wj) + Float64(wj * exp(wj))))))
end
function tmp = code(wj, x)
	tmp = wj - ((wj / (wj + 1.0)) - (x / (exp(wj) + (wj * exp(wj)))));
end
code[wj_, x_] := N[(wj - N[(N[(wj / N[(wj + 1.0), $MachinePrecision]), $MachinePrecision] - N[(x / N[(N[Exp[wj], $MachinePrecision] + N[(wj * N[Exp[wj], $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]
\begin{array}{l}

\\
wj - \left(\frac{wj}{wj + 1} - \frac{x}{e^{wj} + wj \cdot e^{wj}}\right)
\end{array}

Reproduce

?
herbie shell --seed 2023278 
(FPCore (wj x)
  :name "Jmat.Real.lambertw, newton loop step"
  :precision binary64

  :herbie-target
  (- wj (- (/ wj (+ wj 1.0)) (/ x (+ (exp wj) (* wj (exp wj))))))

  (- wj (/ (- (* wj (exp wj)) x) (+ (exp wj) (* wj (exp wj))))))