System.Random.MWC.Distributions:truncatedExp from mwc-random-0.13.3.2

Percentage Accurate: 62.0% → 98.4%
Time: 13.8s
Alternatives: 10
Speedup: 211.0×

Specification

?
\[\begin{array}{l} \\ x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (- x (/ (log (+ (- 1.0 y) (* y (exp z)))) t)))
double code(double x, double y, double z, double t) {
	return x - (log(((1.0 - y) + (y * exp(z)))) / t);
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = x - (log(((1.0d0 - y) + (y * exp(z)))) / t)
end function
public static double code(double x, double y, double z, double t) {
	return x - (Math.log(((1.0 - y) + (y * Math.exp(z)))) / t);
}
def code(x, y, z, t):
	return x - (math.log(((1.0 - y) + (y * math.exp(z)))) / t)
function code(x, y, z, t)
	return Float64(x - Float64(log(Float64(Float64(1.0 - y) + Float64(y * exp(z)))) / t))
end
function tmp = code(x, y, z, t)
	tmp = x - (log(((1.0 - y) + (y * exp(z)))) / t);
end
code[x_, y_, z_, t_] := N[(x - N[(N[Log[N[(N[(1.0 - y), $MachinePrecision] + N[(y * N[Exp[z], $MachinePrecision]), $MachinePrecision]), $MachinePrecision]], $MachinePrecision] / t), $MachinePrecision]), $MachinePrecision]
\begin{array}{l}

\\
x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t}
\end{array}

Sampling outcomes in binary64 precision:

Local Percentage Accuracy vs ?

The average percentage accuracy by input value. Horizontal axis shows value of an input variable; the variable is choosen in the title. Vertical axis is accuracy; higher is better. Red represent the original program, while blue represents Herbie's suggestion. These can be toggled with buttons below the plot. The line is an average while dots represent individual samples.

Accuracy vs Speed?

Herbie found 10 alternatives:

AlternativeAccuracySpeedup
The accuracy (vertical axis) and speed (horizontal axis) of each alternatives. Up and to the right is better. The red square shows the initial program, and each blue circle shows an alternative.The line shows the best available speed-accuracy tradeoffs.

Initial Program: 62.0% accurate, 1.0× speedup?

\[\begin{array}{l} \\ x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (- x (/ (log (+ (- 1.0 y) (* y (exp z)))) t)))
double code(double x, double y, double z, double t) {
	return x - (log(((1.0 - y) + (y * exp(z)))) / t);
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = x - (log(((1.0d0 - y) + (y * exp(z)))) / t)
end function
public static double code(double x, double y, double z, double t) {
	return x - (Math.log(((1.0 - y) + (y * Math.exp(z)))) / t);
}
def code(x, y, z, t):
	return x - (math.log(((1.0 - y) + (y * math.exp(z)))) / t)
function code(x, y, z, t)
	return Float64(x - Float64(log(Float64(Float64(1.0 - y) + Float64(y * exp(z)))) / t))
end
function tmp = code(x, y, z, t)
	tmp = x - (log(((1.0 - y) + (y * exp(z)))) / t);
end
code[x_, y_, z_, t_] := N[(x - N[(N[Log[N[(N[(1.0 - y), $MachinePrecision] + N[(y * N[Exp[z], $MachinePrecision]), $MachinePrecision]), $MachinePrecision]], $MachinePrecision] / t), $MachinePrecision]), $MachinePrecision]
\begin{array}{l}

\\
x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t}
\end{array}

Alternative 1: 98.4% accurate, 1.0× speedup?

\[\begin{array}{l} \\ x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t} \end{array} \]
(FPCore (x y z t) :precision binary64 (- x (/ (log1p (* y (expm1 z))) t)))
double code(double x, double y, double z, double t) {
	return x - (log1p((y * expm1(z))) / t);
}
public static double code(double x, double y, double z, double t) {
	return x - (Math.log1p((y * Math.expm1(z))) / t);
}
def code(x, y, z, t):
	return x - (math.log1p((y * math.expm1(z))) / t)
function code(x, y, z, t)
	return Float64(x - Float64(log1p(Float64(y * expm1(z))) / t))
end
code[x_, y_, z_, t_] := N[(x - N[(N[Log[1 + N[(y * N[(Exp[z] - 1), $MachinePrecision]), $MachinePrecision]], $MachinePrecision] / t), $MachinePrecision]), $MachinePrecision]
\begin{array}{l}

\\
x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}
\end{array}
Derivation
  1. Initial program 62.7%

    \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
  2. Step-by-step derivation
    1. associate-+l-79.3%

      \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
    2. sub-neg79.3%

      \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
    3. log1p-def83.3%

      \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
    4. neg-sub083.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
    5. associate-+l-83.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
    6. neg-sub083.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
    7. neg-mul-183.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
    8. *-commutative83.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
    9. distribute-rgt-out83.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
    10. +-commutative83.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
    11. metadata-eval83.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
    12. sub-neg83.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
    13. expm1-def99.0%

      \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
  3. Simplified99.0%

    \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
  4. Final simplification99.0%

    \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t} \]

Alternative 2: 90.8% accurate, 1.9× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;z \leq -1.85 \cdot 10^{+14}:\\ \;\;\;\;x - \frac{y \cdot \mathsf{expm1}\left(z\right)}{t}\\ \mathbf{else}:\\ \;\;\;\;x + \mathsf{log1p}\left(y \cdot z\right) \cdot \frac{-1}{t}\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (if (<= z -1.85e+14)
   (- x (/ (* y (expm1 z)) t))
   (+ x (* (log1p (* y z)) (/ -1.0 t)))))
double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -1.85e+14) {
		tmp = x - ((y * expm1(z)) / t);
	} else {
		tmp = x + (log1p((y * z)) * (-1.0 / t));
	}
	return tmp;
}
public static double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -1.85e+14) {
		tmp = x - ((y * Math.expm1(z)) / t);
	} else {
		tmp = x + (Math.log1p((y * z)) * (-1.0 / t));
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if z <= -1.85e+14:
		tmp = x - ((y * math.expm1(z)) / t)
	else:
		tmp = x + (math.log1p((y * z)) * (-1.0 / t))
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if (z <= -1.85e+14)
		tmp = Float64(x - Float64(Float64(y * expm1(z)) / t));
	else
		tmp = Float64(x + Float64(log1p(Float64(y * z)) * Float64(-1.0 / t)));
	end
	return tmp
end
code[x_, y_, z_, t_] := If[LessEqual[z, -1.85e+14], N[(x - N[(N[(y * N[(Exp[z] - 1), $MachinePrecision]), $MachinePrecision] / t), $MachinePrecision]), $MachinePrecision], N[(x + N[(N[Log[1 + N[(y * z), $MachinePrecision]], $MachinePrecision] * N[(-1.0 / t), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;z \leq -1.85 \cdot 10^{+14}:\\
\;\;\;\;x - \frac{y \cdot \mathsf{expm1}\left(z\right)}{t}\\

\mathbf{else}:\\
\;\;\;\;x + \mathsf{log1p}\left(y \cdot z\right) \cdot \frac{-1}{t}\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if z < -1.85e14

    1. Initial program 87.9%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-87.9%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg87.9%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def99.9%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-199.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Taylor expanded in y around 0 77.3%

      \[\leadsto x - \frac{\color{blue}{\left(e^{z} - 1\right) \cdot y}}{t} \]
    5. Step-by-step derivation
      1. expm1-def77.3%

        \[\leadsto x - \frac{\color{blue}{\mathsf{expm1}\left(z\right)} \cdot y}{t} \]
      2. *-commutative77.3%

        \[\leadsto x - \frac{\color{blue}{y \cdot \mathsf{expm1}\left(z\right)}}{t} \]
    6. Simplified77.3%

      \[\leadsto x - \frac{\color{blue}{y \cdot \mathsf{expm1}\left(z\right)}}{t} \]

    if -1.85e14 < z

    1. Initial program 53.6%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-76.2%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg76.2%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def77.3%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub077.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub077.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-177.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def98.6%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified98.6%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Step-by-step derivation
      1. clear-num98.6%

        \[\leadsto x - \color{blue}{\frac{1}{\frac{t}{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}}} \]
      2. associate-/r/98.6%

        \[\leadsto x - \color{blue}{\frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)} \]
    5. Applied egg-rr98.6%

      \[\leadsto x - \color{blue}{\frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)} \]
    6. Taylor expanded in z around 0 96.9%

      \[\leadsto x - \frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \color{blue}{z}\right) \]
  3. Recombined 2 regimes into one program.
  4. Final simplification91.7%

    \[\leadsto \begin{array}{l} \mathbf{if}\;z \leq -1.85 \cdot 10^{+14}:\\ \;\;\;\;x - \frac{y \cdot \mathsf{expm1}\left(z\right)}{t}\\ \mathbf{else}:\\ \;\;\;\;x + \mathsf{log1p}\left(y \cdot z\right) \cdot \frac{-1}{t}\\ \end{array} \]

Alternative 3: 85.6% accurate, 1.9× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;z \leq -2 \cdot 10^{-218}:\\ \;\;\;\;x - \frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}\\ \mathbf{else}:\\ \;\;\;\;x - \left(\frac{\left(y \cdot 0.5\right) \cdot \left(z \cdot z\right)}{t} + \frac{y}{\frac{t}{z}}\right)\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (if (<= z -2e-218)
   (- x (/ (expm1 z) (/ t y)))
   (- x (+ (/ (* (* y 0.5) (* z z)) t) (/ y (/ t z))))))
double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -2e-218) {
		tmp = x - (expm1(z) / (t / y));
	} else {
		tmp = x - ((((y * 0.5) * (z * z)) / t) + (y / (t / z)));
	}
	return tmp;
}
public static double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -2e-218) {
		tmp = x - (Math.expm1(z) / (t / y));
	} else {
		tmp = x - ((((y * 0.5) * (z * z)) / t) + (y / (t / z)));
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if z <= -2e-218:
		tmp = x - (math.expm1(z) / (t / y))
	else:
		tmp = x - ((((y * 0.5) * (z * z)) / t) + (y / (t / z)))
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if (z <= -2e-218)
		tmp = Float64(x - Float64(expm1(z) / Float64(t / y)));
	else
		tmp = Float64(x - Float64(Float64(Float64(Float64(y * 0.5) * Float64(z * z)) / t) + Float64(y / Float64(t / z))));
	end
	return tmp
end
code[x_, y_, z_, t_] := If[LessEqual[z, -2e-218], N[(x - N[(N[(Exp[z] - 1), $MachinePrecision] / N[(t / y), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(x - N[(N[(N[(N[(y * 0.5), $MachinePrecision] * N[(z * z), $MachinePrecision]), $MachinePrecision] / t), $MachinePrecision] + N[(y / N[(t / z), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;z \leq -2 \cdot 10^{-218}:\\
\;\;\;\;x - \frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}\\

\mathbf{else}:\\
\;\;\;\;x - \left(\frac{\left(y \cdot 0.5\right) \cdot \left(z \cdot z\right)}{t} + \frac{y}{\frac{t}{z}}\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if z < -2.0000000000000001e-218

    1. Initial program 74.8%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-83.2%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg83.2%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def90.2%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub090.2%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-90.2%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub090.2%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-190.2%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative90.2%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out90.2%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative90.2%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval90.2%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg90.2%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Taylor expanded in y around 0 76.6%

      \[\leadsto x - \color{blue}{\frac{\left(e^{z} - 1\right) \cdot y}{t}} \]
    5. Step-by-step derivation
      1. expm1-def83.6%

        \[\leadsto x - \frac{\color{blue}{\mathsf{expm1}\left(z\right)} \cdot y}{t} \]
      2. associate-/l*83.6%

        \[\leadsto x - \color{blue}{\frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}} \]
    6. Simplified83.6%

      \[\leadsto x - \color{blue}{\frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}} \]

    if -2.0000000000000001e-218 < z

    1. Initial program 49.2%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-74.9%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg74.9%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def75.7%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub075.7%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-75.7%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub075.7%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-175.7%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative75.7%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out75.7%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative75.7%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval75.7%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg75.7%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def98.0%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified98.0%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Taylor expanded in y around 0 75.7%

      \[\leadsto x - \color{blue}{\frac{\left(e^{z} - 1\right) \cdot y}{t}} \]
    5. Step-by-step derivation
      1. expm1-def90.5%

        \[\leadsto x - \frac{\color{blue}{\mathsf{expm1}\left(z\right)} \cdot y}{t} \]
      2. associate-/l*85.6%

        \[\leadsto x - \color{blue}{\frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}} \]
    6. Simplified85.6%

      \[\leadsto x - \color{blue}{\frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}} \]
    7. Taylor expanded in z around 0 90.0%

      \[\leadsto x - \color{blue}{\left(\frac{y \cdot z}{t} + 0.5 \cdot \frac{y \cdot {z}^{2}}{t}\right)} \]
    8. Step-by-step derivation
      1. +-commutative90.0%

        \[\leadsto x - \color{blue}{\left(0.5 \cdot \frac{y \cdot {z}^{2}}{t} + \frac{y \cdot z}{t}\right)} \]
      2. associate-*r/90.0%

        \[\leadsto x - \left(\color{blue}{\frac{0.5 \cdot \left(y \cdot {z}^{2}\right)}{t}} + \frac{y \cdot z}{t}\right) \]
      3. associate-*r*90.0%

        \[\leadsto x - \left(\frac{\color{blue}{\left(0.5 \cdot y\right) \cdot {z}^{2}}}{t} + \frac{y \cdot z}{t}\right) \]
      4. *-commutative90.0%

        \[\leadsto x - \left(\frac{\color{blue}{\left(y \cdot 0.5\right)} \cdot {z}^{2}}{t} + \frac{y \cdot z}{t}\right) \]
      5. unpow290.0%

        \[\leadsto x - \left(\frac{\left(y \cdot 0.5\right) \cdot \color{blue}{\left(z \cdot z\right)}}{t} + \frac{y \cdot z}{t}\right) \]
      6. associate-/l*90.1%

        \[\leadsto x - \left(\frac{\left(y \cdot 0.5\right) \cdot \left(z \cdot z\right)}{t} + \color{blue}{\frac{y}{\frac{t}{z}}}\right) \]
    9. Simplified90.1%

      \[\leadsto x - \color{blue}{\left(\frac{\left(y \cdot 0.5\right) \cdot \left(z \cdot z\right)}{t} + \frac{y}{\frac{t}{z}}\right)} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification86.7%

    \[\leadsto \begin{array}{l} \mathbf{if}\;z \leq -2 \cdot 10^{-218}:\\ \;\;\;\;x - \frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}\\ \mathbf{else}:\\ \;\;\;\;x - \left(\frac{\left(y \cdot 0.5\right) \cdot \left(z \cdot z\right)}{t} + \frac{y}{\frac{t}{z}}\right)\\ \end{array} \]

Alternative 4: 90.8% accurate, 1.9× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;z \leq -1.1 \cdot 10^{+14}:\\ \;\;\;\;x - \frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}\\ \mathbf{else}:\\ \;\;\;\;x - \frac{\mathsf{log1p}\left(y \cdot z\right)}{t}\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (if (<= z -1.1e+14) (- x (/ (expm1 z) (/ t y))) (- x (/ (log1p (* y z)) t))))
double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -1.1e+14) {
		tmp = x - (expm1(z) / (t / y));
	} else {
		tmp = x - (log1p((y * z)) / t);
	}
	return tmp;
}
public static double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -1.1e+14) {
		tmp = x - (Math.expm1(z) / (t / y));
	} else {
		tmp = x - (Math.log1p((y * z)) / t);
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if z <= -1.1e+14:
		tmp = x - (math.expm1(z) / (t / y))
	else:
		tmp = x - (math.log1p((y * z)) / t)
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if (z <= -1.1e+14)
		tmp = Float64(x - Float64(expm1(z) / Float64(t / y)));
	else
		tmp = Float64(x - Float64(log1p(Float64(y * z)) / t));
	end
	return tmp
end
code[x_, y_, z_, t_] := If[LessEqual[z, -1.1e+14], N[(x - N[(N[(Exp[z] - 1), $MachinePrecision] / N[(t / y), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(x - N[(N[Log[1 + N[(y * z), $MachinePrecision]], $MachinePrecision] / t), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;z \leq -1.1 \cdot 10^{+14}:\\
\;\;\;\;x - \frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}\\

\mathbf{else}:\\
\;\;\;\;x - \frac{\mathsf{log1p}\left(y \cdot z\right)}{t}\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if z < -1.1e14

    1. Initial program 87.9%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-87.9%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg87.9%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def99.9%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-199.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Taylor expanded in y around 0 77.3%

      \[\leadsto x - \color{blue}{\frac{\left(e^{z} - 1\right) \cdot y}{t}} \]
    5. Step-by-step derivation
      1. expm1-def77.3%

        \[\leadsto x - \frac{\color{blue}{\mathsf{expm1}\left(z\right)} \cdot y}{t} \]
      2. associate-/l*77.3%

        \[\leadsto x - \color{blue}{\frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}} \]
    6. Simplified77.3%

      \[\leadsto x - \color{blue}{\frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}} \]

    if -1.1e14 < z

    1. Initial program 53.6%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-76.2%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg76.2%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def77.3%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub077.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub077.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-177.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def98.6%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified98.6%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Taylor expanded in z around 0 96.9%

      \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{z}\right)}{t} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification91.7%

    \[\leadsto \begin{array}{l} \mathbf{if}\;z \leq -1.1 \cdot 10^{+14}:\\ \;\;\;\;x - \frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}\\ \mathbf{else}:\\ \;\;\;\;x - \frac{\mathsf{log1p}\left(y \cdot z\right)}{t}\\ \end{array} \]

Alternative 5: 90.8% accurate, 1.9× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;z \leq -4 \cdot 10^{+14}:\\ \;\;\;\;x - \frac{y \cdot \mathsf{expm1}\left(z\right)}{t}\\ \mathbf{else}:\\ \;\;\;\;x - \frac{\mathsf{log1p}\left(y \cdot z\right)}{t}\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (if (<= z -4e+14) (- x (/ (* y (expm1 z)) t)) (- x (/ (log1p (* y z)) t))))
double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -4e+14) {
		tmp = x - ((y * expm1(z)) / t);
	} else {
		tmp = x - (log1p((y * z)) / t);
	}
	return tmp;
}
public static double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -4e+14) {
		tmp = x - ((y * Math.expm1(z)) / t);
	} else {
		tmp = x - (Math.log1p((y * z)) / t);
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if z <= -4e+14:
		tmp = x - ((y * math.expm1(z)) / t)
	else:
		tmp = x - (math.log1p((y * z)) / t)
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if (z <= -4e+14)
		tmp = Float64(x - Float64(Float64(y * expm1(z)) / t));
	else
		tmp = Float64(x - Float64(log1p(Float64(y * z)) / t));
	end
	return tmp
end
code[x_, y_, z_, t_] := If[LessEqual[z, -4e+14], N[(x - N[(N[(y * N[(Exp[z] - 1), $MachinePrecision]), $MachinePrecision] / t), $MachinePrecision]), $MachinePrecision], N[(x - N[(N[Log[1 + N[(y * z), $MachinePrecision]], $MachinePrecision] / t), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;z \leq -4 \cdot 10^{+14}:\\
\;\;\;\;x - \frac{y \cdot \mathsf{expm1}\left(z\right)}{t}\\

\mathbf{else}:\\
\;\;\;\;x - \frac{\mathsf{log1p}\left(y \cdot z\right)}{t}\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if z < -4e14

    1. Initial program 87.9%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-87.9%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg87.9%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def99.9%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-199.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Taylor expanded in y around 0 77.3%

      \[\leadsto x - \frac{\color{blue}{\left(e^{z} - 1\right) \cdot y}}{t} \]
    5. Step-by-step derivation
      1. expm1-def77.3%

        \[\leadsto x - \frac{\color{blue}{\mathsf{expm1}\left(z\right)} \cdot y}{t} \]
      2. *-commutative77.3%

        \[\leadsto x - \frac{\color{blue}{y \cdot \mathsf{expm1}\left(z\right)}}{t} \]
    6. Simplified77.3%

      \[\leadsto x - \frac{\color{blue}{y \cdot \mathsf{expm1}\left(z\right)}}{t} \]

    if -4e14 < z

    1. Initial program 53.6%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-76.2%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg76.2%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def77.3%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub077.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub077.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-177.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg77.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def98.6%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified98.6%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Taylor expanded in z around 0 96.9%

      \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{z}\right)}{t} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification91.7%

    \[\leadsto \begin{array}{l} \mathbf{if}\;z \leq -4 \cdot 10^{+14}:\\ \;\;\;\;x - \frac{y \cdot \mathsf{expm1}\left(z\right)}{t}\\ \mathbf{else}:\\ \;\;\;\;x - \frac{\mathsf{log1p}\left(y \cdot z\right)}{t}\\ \end{array} \]

Alternative 6: 82.4% accurate, 11.1× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;z \leq -0.072:\\ \;\;\;\;x\\ \mathbf{else}:\\ \;\;\;\;x - \left(\frac{\left(y \cdot 0.5\right) \cdot \left(z \cdot z\right)}{t} + \frac{y}{\frac{t}{z}}\right)\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (if (<= z -0.072) x (- x (+ (/ (* (* y 0.5) (* z z)) t) (/ y (/ t z))))))
double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -0.072) {
		tmp = x;
	} else {
		tmp = x - ((((y * 0.5) * (z * z)) / t) + (y / (t / z)));
	}
	return tmp;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    real(8) :: tmp
    if (z <= (-0.072d0)) then
        tmp = x
    else
        tmp = x - ((((y * 0.5d0) * (z * z)) / t) + (y / (t / z)))
    end if
    code = tmp
end function
public static double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -0.072) {
		tmp = x;
	} else {
		tmp = x - ((((y * 0.5) * (z * z)) / t) + (y / (t / z)));
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if z <= -0.072:
		tmp = x
	else:
		tmp = x - ((((y * 0.5) * (z * z)) / t) + (y / (t / z)))
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if (z <= -0.072)
		tmp = x;
	else
		tmp = Float64(x - Float64(Float64(Float64(Float64(y * 0.5) * Float64(z * z)) / t) + Float64(y / Float64(t / z))));
	end
	return tmp
end
function tmp_2 = code(x, y, z, t)
	tmp = 0.0;
	if (z <= -0.072)
		tmp = x;
	else
		tmp = x - ((((y * 0.5) * (z * z)) / t) + (y / (t / z)));
	end
	tmp_2 = tmp;
end
code[x_, y_, z_, t_] := If[LessEqual[z, -0.072], x, N[(x - N[(N[(N[(N[(y * 0.5), $MachinePrecision] * N[(z * z), $MachinePrecision]), $MachinePrecision] / t), $MachinePrecision] + N[(y / N[(t / z), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;z \leq -0.072:\\
\;\;\;\;x\\

\mathbf{else}:\\
\;\;\;\;x - \left(\frac{\left(y \cdot 0.5\right) \cdot \left(z \cdot z\right)}{t} + \frac{y}{\frac{t}{z}}\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if z < -0.0719999999999999946

    1. Initial program 88.5%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-88.5%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg88.5%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def99.9%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-199.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Step-by-step derivation
      1. clear-num99.9%

        \[\leadsto x - \color{blue}{\frac{1}{\frac{t}{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}}} \]
      2. associate-/r/99.9%

        \[\leadsto x - \color{blue}{\frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)} \]
    5. Applied egg-rr99.9%

      \[\leadsto x - \color{blue}{\frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)} \]
    6. Taylor expanded in x around inf 69.3%

      \[\leadsto \color{blue}{x} \]

    if -0.0719999999999999946 < z

    1. Initial program 52.6%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-75.7%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg75.7%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def76.8%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub076.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub076.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-176.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def98.6%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified98.6%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Taylor expanded in y around 0 76.2%

      \[\leadsto x - \color{blue}{\frac{\left(e^{z} - 1\right) \cdot y}{t}} \]
    5. Step-by-step derivation
      1. expm1-def91.1%

        \[\leadsto x - \frac{\color{blue}{\mathsf{expm1}\left(z\right)} \cdot y}{t} \]
      2. associate-/l*87.9%

        \[\leadsto x - \color{blue}{\frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}} \]
    6. Simplified87.9%

      \[\leadsto x - \color{blue}{\frac{\mathsf{expm1}\left(z\right)}{\frac{t}{y}}} \]
    7. Taylor expanded in z around 0 90.7%

      \[\leadsto x - \color{blue}{\left(\frac{y \cdot z}{t} + 0.5 \cdot \frac{y \cdot {z}^{2}}{t}\right)} \]
    8. Step-by-step derivation
      1. +-commutative90.7%

        \[\leadsto x - \color{blue}{\left(0.5 \cdot \frac{y \cdot {z}^{2}}{t} + \frac{y \cdot z}{t}\right)} \]
      2. associate-*r/90.7%

        \[\leadsto x - \left(\color{blue}{\frac{0.5 \cdot \left(y \cdot {z}^{2}\right)}{t}} + \frac{y \cdot z}{t}\right) \]
      3. associate-*r*90.7%

        \[\leadsto x - \left(\frac{\color{blue}{\left(0.5 \cdot y\right) \cdot {z}^{2}}}{t} + \frac{y \cdot z}{t}\right) \]
      4. *-commutative90.7%

        \[\leadsto x - \left(\frac{\color{blue}{\left(y \cdot 0.5\right)} \cdot {z}^{2}}{t} + \frac{y \cdot z}{t}\right) \]
      5. unpow290.7%

        \[\leadsto x - \left(\frac{\left(y \cdot 0.5\right) \cdot \color{blue}{\left(z \cdot z\right)}}{t} + \frac{y \cdot z}{t}\right) \]
      6. associate-/l*90.3%

        \[\leadsto x - \left(\frac{\left(y \cdot 0.5\right) \cdot \left(z \cdot z\right)}{t} + \color{blue}{\frac{y}{\frac{t}{z}}}\right) \]
    9. Simplified90.3%

      \[\leadsto x - \color{blue}{\left(\frac{\left(y \cdot 0.5\right) \cdot \left(z \cdot z\right)}{t} + \frac{y}{\frac{t}{z}}\right)} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification84.4%

    \[\leadsto \begin{array}{l} \mathbf{if}\;z \leq -0.072:\\ \;\;\;\;x\\ \mathbf{else}:\\ \;\;\;\;x - \left(\frac{\left(y \cdot 0.5\right) \cdot \left(z \cdot z\right)}{t} + \frac{y}{\frac{t}{z}}\right)\\ \end{array} \]

Alternative 7: 82.4% accurate, 12.4× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;z \leq -5 \cdot 10^{-5}:\\ \;\;\;\;x\\ \mathbf{else}:\\ \;\;\;\;x - y \cdot \left(\frac{z}{t} + 0.5 \cdot \frac{z \cdot z}{t}\right)\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (if (<= z -5e-5) x (- x (* y (+ (/ z t) (* 0.5 (/ (* z z) t)))))))
double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -5e-5) {
		tmp = x;
	} else {
		tmp = x - (y * ((z / t) + (0.5 * ((z * z) / t))));
	}
	return tmp;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    real(8) :: tmp
    if (z <= (-5d-5)) then
        tmp = x
    else
        tmp = x - (y * ((z / t) + (0.5d0 * ((z * z) / t))))
    end if
    code = tmp
end function
public static double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -5e-5) {
		tmp = x;
	} else {
		tmp = x - (y * ((z / t) + (0.5 * ((z * z) / t))));
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if z <= -5e-5:
		tmp = x
	else:
		tmp = x - (y * ((z / t) + (0.5 * ((z * z) / t))))
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if (z <= -5e-5)
		tmp = x;
	else
		tmp = Float64(x - Float64(y * Float64(Float64(z / t) + Float64(0.5 * Float64(Float64(z * z) / t)))));
	end
	return tmp
end
function tmp_2 = code(x, y, z, t)
	tmp = 0.0;
	if (z <= -5e-5)
		tmp = x;
	else
		tmp = x - (y * ((z / t) + (0.5 * ((z * z) / t))));
	end
	tmp_2 = tmp;
end
code[x_, y_, z_, t_] := If[LessEqual[z, -5e-5], x, N[(x - N[(y * N[(N[(z / t), $MachinePrecision] + N[(0.5 * N[(N[(z * z), $MachinePrecision] / t), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;z \leq -5 \cdot 10^{-5}:\\
\;\;\;\;x\\

\mathbf{else}:\\
\;\;\;\;x - y \cdot \left(\frac{z}{t} + 0.5 \cdot \frac{z \cdot z}{t}\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if z < -5.00000000000000024e-5

    1. Initial program 88.5%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-88.5%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg88.5%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def99.9%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-199.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Step-by-step derivation
      1. clear-num99.9%

        \[\leadsto x - \color{blue}{\frac{1}{\frac{t}{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}}} \]
      2. associate-/r/99.9%

        \[\leadsto x - \color{blue}{\frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)} \]
    5. Applied egg-rr99.9%

      \[\leadsto x - \color{blue}{\frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)} \]
    6. Taylor expanded in x around inf 69.3%

      \[\leadsto \color{blue}{x} \]

    if -5.00000000000000024e-5 < z

    1. Initial program 52.6%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-75.7%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg75.7%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def76.8%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub076.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub076.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-176.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def98.6%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified98.6%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Taylor expanded in z around 0 98.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(0.5 \cdot {z}^{2} + z\right)}\right)}{t} \]
    5. Step-by-step derivation
      1. fma-def98.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{fma}\left(0.5, {z}^{2}, z\right)}\right)}{t} \]
      2. unpow298.3%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{fma}\left(0.5, \color{blue}{z \cdot z}, z\right)\right)}{t} \]
    6. Simplified98.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{fma}\left(0.5, z \cdot z, z\right)}\right)}{t} \]
    7. Taylor expanded in y around 0 90.1%

      \[\leadsto \color{blue}{-1 \cdot \left(y \cdot \left(\frac{z}{t} + 0.5 \cdot \frac{{z}^{2}}{t}\right)\right) + x} \]
    8. Step-by-step derivation
      1. +-commutative90.1%

        \[\leadsto \color{blue}{x + -1 \cdot \left(y \cdot \left(\frac{z}{t} + 0.5 \cdot \frac{{z}^{2}}{t}\right)\right)} \]
      2. mul-1-neg90.1%

        \[\leadsto x + \color{blue}{\left(-y \cdot \left(\frac{z}{t} + 0.5 \cdot \frac{{z}^{2}}{t}\right)\right)} \]
      3. unsub-neg90.1%

        \[\leadsto \color{blue}{x - y \cdot \left(\frac{z}{t} + 0.5 \cdot \frac{{z}^{2}}{t}\right)} \]
      4. unpow290.1%

        \[\leadsto x - y \cdot \left(\frac{z}{t} + 0.5 \cdot \frac{\color{blue}{z \cdot z}}{t}\right) \]
    9. Simplified90.1%

      \[\leadsto \color{blue}{x - y \cdot \left(\frac{z}{t} + 0.5 \cdot \frac{z \cdot z}{t}\right)} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification84.3%

    \[\leadsto \begin{array}{l} \mathbf{if}\;z \leq -5 \cdot 10^{-5}:\\ \;\;\;\;x\\ \mathbf{else}:\\ \;\;\;\;x - y \cdot \left(\frac{z}{t} + 0.5 \cdot \frac{z \cdot z}{t}\right)\\ \end{array} \]

Alternative 8: 79.2% accurate, 23.3× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;z \leq -0.0004:\\ \;\;\;\;x\\ \mathbf{else}:\\ \;\;\;\;x - z \cdot \frac{y}{t}\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (if (<= z -0.0004) x (- x (* z (/ y t)))))
double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -0.0004) {
		tmp = x;
	} else {
		tmp = x - (z * (y / t));
	}
	return tmp;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    real(8) :: tmp
    if (z <= (-0.0004d0)) then
        tmp = x
    else
        tmp = x - (z * (y / t))
    end if
    code = tmp
end function
public static double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -0.0004) {
		tmp = x;
	} else {
		tmp = x - (z * (y / t));
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if z <= -0.0004:
		tmp = x
	else:
		tmp = x - (z * (y / t))
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if (z <= -0.0004)
		tmp = x;
	else
		tmp = Float64(x - Float64(z * Float64(y / t)));
	end
	return tmp
end
function tmp_2 = code(x, y, z, t)
	tmp = 0.0;
	if (z <= -0.0004)
		tmp = x;
	else
		tmp = x - (z * (y / t));
	end
	tmp_2 = tmp;
end
code[x_, y_, z_, t_] := If[LessEqual[z, -0.0004], x, N[(x - N[(z * N[(y / t), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;z \leq -0.0004:\\
\;\;\;\;x\\

\mathbf{else}:\\
\;\;\;\;x - z \cdot \frac{y}{t}\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if z < -4.00000000000000019e-4

    1. Initial program 88.5%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-88.5%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg88.5%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def99.9%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-199.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Step-by-step derivation
      1. clear-num99.9%

        \[\leadsto x - \color{blue}{\frac{1}{\frac{t}{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}}} \]
      2. associate-/r/99.9%

        \[\leadsto x - \color{blue}{\frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)} \]
    5. Applied egg-rr99.9%

      \[\leadsto x - \color{blue}{\frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)} \]
    6. Taylor expanded in x around inf 69.3%

      \[\leadsto \color{blue}{x} \]

    if -4.00000000000000019e-4 < z

    1. Initial program 52.6%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-75.7%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg75.7%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def76.8%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub076.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub076.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-176.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def98.6%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified98.6%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Taylor expanded in z around 0 89.8%

      \[\leadsto x - \color{blue}{\frac{y \cdot z}{t}} \]
    5. Step-by-step derivation
      1. associate-/l*89.9%

        \[\leadsto x - \color{blue}{\frac{y}{\frac{t}{z}}} \]
      2. associate-/r/85.7%

        \[\leadsto x - \color{blue}{\frac{y}{t} \cdot z} \]
    6. Simplified85.7%

      \[\leadsto x - \color{blue}{\frac{y}{t} \cdot z} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification81.1%

    \[\leadsto \begin{array}{l} \mathbf{if}\;z \leq -0.0004:\\ \;\;\;\;x\\ \mathbf{else}:\\ \;\;\;\;x - z \cdot \frac{y}{t}\\ \end{array} \]

Alternative 9: 82.3% accurate, 23.3× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;z \leq -0.056:\\ \;\;\;\;x\\ \mathbf{else}:\\ \;\;\;\;x - \frac{y}{\frac{t}{z}}\\ \end{array} \end{array} \]
(FPCore (x y z t) :precision binary64 (if (<= z -0.056) x (- x (/ y (/ t z)))))
double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -0.056) {
		tmp = x;
	} else {
		tmp = x - (y / (t / z));
	}
	return tmp;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    real(8) :: tmp
    if (z <= (-0.056d0)) then
        tmp = x
    else
        tmp = x - (y / (t / z))
    end if
    code = tmp
end function
public static double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -0.056) {
		tmp = x;
	} else {
		tmp = x - (y / (t / z));
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if z <= -0.056:
		tmp = x
	else:
		tmp = x - (y / (t / z))
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if (z <= -0.056)
		tmp = x;
	else
		tmp = Float64(x - Float64(y / Float64(t / z)));
	end
	return tmp
end
function tmp_2 = code(x, y, z, t)
	tmp = 0.0;
	if (z <= -0.056)
		tmp = x;
	else
		tmp = x - (y / (t / z));
	end
	tmp_2 = tmp;
end
code[x_, y_, z_, t_] := If[LessEqual[z, -0.056], x, N[(x - N[(y / N[(t / z), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;z \leq -0.056:\\
\;\;\;\;x\\

\mathbf{else}:\\
\;\;\;\;x - \frac{y}{\frac{t}{z}}\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if z < -0.0560000000000000012

    1. Initial program 88.5%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-88.5%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg88.5%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def99.9%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub099.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-199.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def99.9%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Step-by-step derivation
      1. clear-num99.9%

        \[\leadsto x - \color{blue}{\frac{1}{\frac{t}{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}}} \]
      2. associate-/r/99.9%

        \[\leadsto x - \color{blue}{\frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)} \]
    5. Applied egg-rr99.9%

      \[\leadsto x - \color{blue}{\frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)} \]
    6. Taylor expanded in x around inf 69.3%

      \[\leadsto \color{blue}{x} \]

    if -0.0560000000000000012 < z

    1. Initial program 52.6%

      \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
    2. Step-by-step derivation
      1. associate-+l-75.7%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
      2. sub-neg75.7%

        \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
      3. log1p-def76.8%

        \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
      4. neg-sub076.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
      5. associate-+l-76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
      6. neg-sub076.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
      7. neg-mul-176.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
      8. *-commutative76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
      9. distribute-rgt-out76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
      10. +-commutative76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
      11. metadata-eval76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
      12. sub-neg76.8%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
      13. expm1-def98.6%

        \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
    3. Simplified98.6%

      \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
    4. Taylor expanded in z around 0 89.8%

      \[\leadsto x - \color{blue}{\frac{y \cdot z}{t}} \]
    5. Step-by-step derivation
      1. associate-/l*89.9%

        \[\leadsto x - \color{blue}{\frac{y}{\frac{t}{z}}} \]
    6. Simplified89.9%

      \[\leadsto x - \color{blue}{\frac{y}{\frac{t}{z}}} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification84.1%

    \[\leadsto \begin{array}{l} \mathbf{if}\;z \leq -0.056:\\ \;\;\;\;x\\ \mathbf{else}:\\ \;\;\;\;x - \frac{y}{\frac{t}{z}}\\ \end{array} \]

Alternative 10: 72.0% accurate, 211.0× speedup?

\[\begin{array}{l} \\ x \end{array} \]
(FPCore (x y z t) :precision binary64 x)
double code(double x, double y, double z, double t) {
	return x;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = x
end function
public static double code(double x, double y, double z, double t) {
	return x;
}
def code(x, y, z, t):
	return x
function code(x, y, z, t)
	return x
end
function tmp = code(x, y, z, t)
	tmp = x;
end
code[x_, y_, z_, t_] := x
\begin{array}{l}

\\
x
\end{array}
Derivation
  1. Initial program 62.7%

    \[x - \frac{\log \left(\left(1 - y\right) + y \cdot e^{z}\right)}{t} \]
  2. Step-by-step derivation
    1. associate-+l-79.3%

      \[\leadsto x - \frac{\log \color{blue}{\left(1 - \left(y - y \cdot e^{z}\right)\right)}}{t} \]
    2. sub-neg79.3%

      \[\leadsto x - \frac{\log \color{blue}{\left(1 + \left(-\left(y - y \cdot e^{z}\right)\right)\right)}}{t} \]
    3. log1p-def83.3%

      \[\leadsto x - \frac{\color{blue}{\mathsf{log1p}\left(-\left(y - y \cdot e^{z}\right)\right)}}{t} \]
    4. neg-sub083.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{0 - \left(y - y \cdot e^{z}\right)}\right)}{t} \]
    5. associate-+l-83.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(0 - y\right) + y \cdot e^{z}}\right)}{t} \]
    6. neg-sub083.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{\left(-y\right)} + y \cdot e^{z}\right)}{t} \]
    7. neg-mul-183.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{-1 \cdot y} + y \cdot e^{z}\right)}{t} \]
    8. *-commutative83.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(-1 \cdot y + \color{blue}{e^{z} \cdot y}\right)}{t} \]
    9. distribute-rgt-out83.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(\color{blue}{y \cdot \left(-1 + e^{z}\right)}\right)}{t} \]
    10. +-commutative83.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} + -1\right)}\right)}{t} \]
    11. metadata-eval83.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \left(e^{z} + \color{blue}{\left(-1\right)}\right)\right)}{t} \]
    12. sub-neg83.3%

      \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\left(e^{z} - 1\right)}\right)}{t} \]
    13. expm1-def99.0%

      \[\leadsto x - \frac{\mathsf{log1p}\left(y \cdot \color{blue}{\mathsf{expm1}\left(z\right)}\right)}{t} \]
  3. Simplified99.0%

    \[\leadsto \color{blue}{x - \frac{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}{t}} \]
  4. Step-by-step derivation
    1. clear-num98.9%

      \[\leadsto x - \color{blue}{\frac{1}{\frac{t}{\mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)}}} \]
    2. associate-/r/99.0%

      \[\leadsto x - \color{blue}{\frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)} \]
  5. Applied egg-rr99.0%

    \[\leadsto x - \color{blue}{\frac{1}{t} \cdot \mathsf{log1p}\left(y \cdot \mathsf{expm1}\left(z\right)\right)} \]
  6. Taylor expanded in x around inf 73.0%

    \[\leadsto \color{blue}{x} \]
  7. Final simplification73.0%

    \[\leadsto x \]

Developer target: 75.1% accurate, 1.9× speedup?

\[\begin{array}{l} \\ \begin{array}{l} t_1 := \frac{-0.5}{y \cdot t}\\ \mathbf{if}\;z < -2.8874623088207947 \cdot 10^{+119}:\\ \;\;\;\;\left(x - \frac{t_1}{z \cdot z}\right) - t_1 \cdot \frac{\frac{2}{z}}{z \cdot z}\\ \mathbf{else}:\\ \;\;\;\;x - \frac{\log \left(1 + z \cdot y\right)}{t}\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (let* ((t_1 (/ (- 0.5) (* y t))))
   (if (< z -2.8874623088207947e+119)
     (- (- x (/ t_1 (* z z))) (* t_1 (/ (/ 2.0 z) (* z z))))
     (- x (/ (log (+ 1.0 (* z y))) t)))))
double code(double x, double y, double z, double t) {
	double t_1 = -0.5 / (y * t);
	double tmp;
	if (z < -2.8874623088207947e+119) {
		tmp = (x - (t_1 / (z * z))) - (t_1 * ((2.0 / z) / (z * z)));
	} else {
		tmp = x - (log((1.0 + (z * y))) / t);
	}
	return tmp;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    real(8) :: t_1
    real(8) :: tmp
    t_1 = -0.5d0 / (y * t)
    if (z < (-2.8874623088207947d+119)) then
        tmp = (x - (t_1 / (z * z))) - (t_1 * ((2.0d0 / z) / (z * z)))
    else
        tmp = x - (log((1.0d0 + (z * y))) / t)
    end if
    code = tmp
end function
public static double code(double x, double y, double z, double t) {
	double t_1 = -0.5 / (y * t);
	double tmp;
	if (z < -2.8874623088207947e+119) {
		tmp = (x - (t_1 / (z * z))) - (t_1 * ((2.0 / z) / (z * z)));
	} else {
		tmp = x - (Math.log((1.0 + (z * y))) / t);
	}
	return tmp;
}
def code(x, y, z, t):
	t_1 = -0.5 / (y * t)
	tmp = 0
	if z < -2.8874623088207947e+119:
		tmp = (x - (t_1 / (z * z))) - (t_1 * ((2.0 / z) / (z * z)))
	else:
		tmp = x - (math.log((1.0 + (z * y))) / t)
	return tmp
function code(x, y, z, t)
	t_1 = Float64(Float64(-0.5) / Float64(y * t))
	tmp = 0.0
	if (z < -2.8874623088207947e+119)
		tmp = Float64(Float64(x - Float64(t_1 / Float64(z * z))) - Float64(t_1 * Float64(Float64(2.0 / z) / Float64(z * z))));
	else
		tmp = Float64(x - Float64(log(Float64(1.0 + Float64(z * y))) / t));
	end
	return tmp
end
function tmp_2 = code(x, y, z, t)
	t_1 = -0.5 / (y * t);
	tmp = 0.0;
	if (z < -2.8874623088207947e+119)
		tmp = (x - (t_1 / (z * z))) - (t_1 * ((2.0 / z) / (z * z)));
	else
		tmp = x - (log((1.0 + (z * y))) / t);
	end
	tmp_2 = tmp;
end
code[x_, y_, z_, t_] := Block[{t$95$1 = N[((-0.5) / N[(y * t), $MachinePrecision]), $MachinePrecision]}, If[Less[z, -2.8874623088207947e+119], N[(N[(x - N[(t$95$1 / N[(z * z), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - N[(t$95$1 * N[(N[(2.0 / z), $MachinePrecision] / N[(z * z), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(x - N[(N[Log[N[(1.0 + N[(z * y), $MachinePrecision]), $MachinePrecision]], $MachinePrecision] / t), $MachinePrecision]), $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
t_1 := \frac{-0.5}{y \cdot t}\\
\mathbf{if}\;z < -2.8874623088207947 \cdot 10^{+119}:\\
\;\;\;\;\left(x - \frac{t_1}{z \cdot z}\right) - t_1 \cdot \frac{\frac{2}{z}}{z \cdot z}\\

\mathbf{else}:\\
\;\;\;\;x - \frac{\log \left(1 + z \cdot y\right)}{t}\\


\end{array}
\end{array}

Reproduce

?
herbie shell --seed 2023228 
(FPCore (x y z t)
  :name "System.Random.MWC.Distributions:truncatedExp from mwc-random-0.13.3.2"
  :precision binary64

  :herbie-target
  (if (< z -2.8874623088207947e+119) (- (- x (/ (/ (- 0.5) (* y t)) (* z z))) (* (/ (- 0.5) (* y t)) (/ (/ 2.0 z) (* z z)))) (- x (/ (log (+ 1.0 (* z y))) t)))

  (- x (/ (log (+ (- 1.0 y) (* y (exp z)))) t)))