Statistics.Distribution.Beta:$cdensity from math-functions-0.1.5.2

Percentage Accurate: 89.4% → 99.8%
Time: 20.0s
Alternatives: 17
Speedup: 1.9×

Specification

?
\[\begin{array}{l} \\ \left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (- (+ (* (- x 1.0) (log y)) (* (- z 1.0) (log (- 1.0 y)))) t))
double code(double x, double y, double z, double t) {
	return (((x - 1.0) * log(y)) + ((z - 1.0) * log((1.0 - y)))) - t;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = (((x - 1.0d0) * log(y)) + ((z - 1.0d0) * log((1.0d0 - y)))) - t
end function
public static double code(double x, double y, double z, double t) {
	return (((x - 1.0) * Math.log(y)) + ((z - 1.0) * Math.log((1.0 - y)))) - t;
}
def code(x, y, z, t):
	return (((x - 1.0) * math.log(y)) + ((z - 1.0) * math.log((1.0 - y)))) - t
function code(x, y, z, t)
	return Float64(Float64(Float64(Float64(x - 1.0) * log(y)) + Float64(Float64(z - 1.0) * log(Float64(1.0 - y)))) - t)
end
function tmp = code(x, y, z, t)
	tmp = (((x - 1.0) * log(y)) + ((z - 1.0) * log((1.0 - y)))) - t;
end
code[x_, y_, z_, t_] := N[(N[(N[(N[(x - 1.0), $MachinePrecision] * N[Log[y], $MachinePrecision]), $MachinePrecision] + N[(N[(z - 1.0), $MachinePrecision] * N[Log[N[(1.0 - y), $MachinePrecision]], $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]
\begin{array}{l}

\\
\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t
\end{array}

Sampling outcomes in binary64 precision:

Local Percentage Accuracy vs ?

The average percentage accuracy by input value. Horizontal axis shows value of an input variable; the variable is choosen in the title. Vertical axis is accuracy; higher is better. Red represent the original program, while blue represents Herbie's suggestion. These can be toggled with buttons below the plot. The line is an average while dots represent individual samples.

Accuracy vs Speed?

Herbie found 17 alternatives:

AlternativeAccuracySpeedup
The accuracy (vertical axis) and speed (horizontal axis) of each alternatives. Up and to the right is better. The red square shows the initial program, and each blue circle shows an alternative.The line shows the best available speed-accuracy tradeoffs.

Initial Program: 89.4% accurate, 1.0× speedup?

\[\begin{array}{l} \\ \left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (- (+ (* (- x 1.0) (log y)) (* (- z 1.0) (log (- 1.0 y)))) t))
double code(double x, double y, double z, double t) {
	return (((x - 1.0) * log(y)) + ((z - 1.0) * log((1.0 - y)))) - t;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = (((x - 1.0d0) * log(y)) + ((z - 1.0d0) * log((1.0d0 - y)))) - t
end function
public static double code(double x, double y, double z, double t) {
	return (((x - 1.0) * Math.log(y)) + ((z - 1.0) * Math.log((1.0 - y)))) - t;
}
def code(x, y, z, t):
	return (((x - 1.0) * math.log(y)) + ((z - 1.0) * math.log((1.0 - y)))) - t
function code(x, y, z, t)
	return Float64(Float64(Float64(Float64(x - 1.0) * log(y)) + Float64(Float64(z - 1.0) * log(Float64(1.0 - y)))) - t)
end
function tmp = code(x, y, z, t)
	tmp = (((x - 1.0) * log(y)) + ((z - 1.0) * log((1.0 - y)))) - t;
end
code[x_, y_, z_, t_] := N[(N[(N[(N[(x - 1.0), $MachinePrecision] * N[Log[y], $MachinePrecision]), $MachinePrecision] + N[(N[(z - 1.0), $MachinePrecision] * N[Log[N[(1.0 - y), $MachinePrecision]], $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]
\begin{array}{l}

\\
\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t
\end{array}

Alternative 1: 99.8% accurate, 0.5× speedup?

\[\begin{array}{l} \\ \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \mathsf{fma}\left(-1 + x, \log y, -t\right)\right) \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (fma (+ z -1.0) (log1p (- y)) (fma (+ -1.0 x) (log y) (- t))))
double code(double x, double y, double z, double t) {
	return fma((z + -1.0), log1p(-y), fma((-1.0 + x), log(y), -t));
}
function code(x, y, z, t)
	return fma(Float64(z + -1.0), log1p(Float64(-y)), fma(Float64(-1.0 + x), log(y), Float64(-t)))
end
code[x_, y_, z_, t_] := N[(N[(z + -1.0), $MachinePrecision] * N[Log[1 + (-y)], $MachinePrecision] + N[(N[(-1.0 + x), $MachinePrecision] * N[Log[y], $MachinePrecision] + (-t)), $MachinePrecision]), $MachinePrecision]
\begin{array}{l}

\\
\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \mathsf{fma}\left(-1 + x, \log y, -t\right)\right)
\end{array}
Derivation
  1. Initial program 88.2%

    \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
  2. Step-by-step derivation
    1. sub-neg88.2%

      \[\leadsto \color{blue}{\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) + \left(-t\right)} \]
    2. +-commutative88.2%

      \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} + \left(-t\right) \]
    3. associate-+l+88.2%

      \[\leadsto \color{blue}{\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(\left(x - 1\right) \cdot \log y + \left(-t\right)\right)} \]
    4. fma-define88.2%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y + \left(-t\right)\right)} \]
    5. sub-neg88.2%

      \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y + \left(-t\right)\right) \]
    6. metadata-eval88.2%

      \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y + \left(-t\right)\right) \]
    7. sub-neg88.2%

      \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y + \left(-t\right)\right) \]
    8. log1p-define99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y + \left(-t\right)\right) \]
    9. fma-define99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\mathsf{fma}\left(x - 1, \log y, -t\right)}\right) \]
    10. sub-neg99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \mathsf{fma}\left(\color{blue}{x + \left(-1\right)}, \log y, -t\right)\right) \]
    11. metadata-eval99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \mathsf{fma}\left(x + \color{blue}{-1}, \log y, -t\right)\right) \]
  3. Simplified99.8%

    \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \mathsf{fma}\left(x + -1, \log y, -t\right)\right)} \]
  4. Add Preprocessing
  5. Final simplification99.8%

    \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \mathsf{fma}\left(-1 + x, \log y, -t\right)\right) \]
  6. Add Preprocessing

Alternative 2: 99.8% accurate, 0.7× speedup?

\[\begin{array}{l} \\ \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \log y \cdot \left(-1 + x\right)\right) - t \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (- (fma (+ z -1.0) (log1p (- y)) (* (log y) (+ -1.0 x))) t))
double code(double x, double y, double z, double t) {
	return fma((z + -1.0), log1p(-y), (log(y) * (-1.0 + x))) - t;
}
function code(x, y, z, t)
	return Float64(fma(Float64(z + -1.0), log1p(Float64(-y)), Float64(log(y) * Float64(-1.0 + x))) - t)
end
code[x_, y_, z_, t_] := N[(N[(N[(z + -1.0), $MachinePrecision] * N[Log[1 + (-y)], $MachinePrecision] + N[(N[Log[y], $MachinePrecision] * N[(-1.0 + x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]
\begin{array}{l}

\\
\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \log y \cdot \left(-1 + x\right)\right) - t
\end{array}
Derivation
  1. Initial program 88.2%

    \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
  2. Step-by-step derivation
    1. +-commutative88.2%

      \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
    2. fma-define88.2%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
    3. sub-neg88.2%

      \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
    4. metadata-eval88.2%

      \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
    5. sub-neg88.2%

      \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
    6. log1p-define99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
    7. sub-neg99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
    8. metadata-eval99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
  3. Simplified99.8%

    \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
  4. Add Preprocessing
  5. Final simplification99.8%

    \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \log y \cdot \left(-1 + x\right)\right) - t \]
  6. Add Preprocessing

Alternative 3: 99.7% accurate, 1.6× speedup?

\[\begin{array}{l} \\ \left(y \cdot \left(y \cdot \left(\left(z + -1\right) \cdot -0.5 + y \cdot \left(\left(z + -1\right) \cdot -0.3333333333333333 - -0.25 \cdot \left(y \cdot \left(1 - z\right)\right)\right)\right) + \left(1 - z\right)\right) + \log y \cdot \left(-1 + x\right)\right) - t \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (-
  (+
   (*
    y
    (+
     (*
      y
      (+
       (* (+ z -1.0) -0.5)
       (* y (- (* (+ z -1.0) -0.3333333333333333) (* -0.25 (* y (- 1.0 z)))))))
     (- 1.0 z)))
   (* (log y) (+ -1.0 x)))
  t))
double code(double x, double y, double z, double t) {
	return ((y * ((y * (((z + -1.0) * -0.5) + (y * (((z + -1.0) * -0.3333333333333333) - (-0.25 * (y * (1.0 - z))))))) + (1.0 - z))) + (log(y) * (-1.0 + x))) - t;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = ((y * ((y * (((z + (-1.0d0)) * (-0.5d0)) + (y * (((z + (-1.0d0)) * (-0.3333333333333333d0)) - ((-0.25d0) * (y * (1.0d0 - z))))))) + (1.0d0 - z))) + (log(y) * ((-1.0d0) + x))) - t
end function
public static double code(double x, double y, double z, double t) {
	return ((y * ((y * (((z + -1.0) * -0.5) + (y * (((z + -1.0) * -0.3333333333333333) - (-0.25 * (y * (1.0 - z))))))) + (1.0 - z))) + (Math.log(y) * (-1.0 + x))) - t;
}
def code(x, y, z, t):
	return ((y * ((y * (((z + -1.0) * -0.5) + (y * (((z + -1.0) * -0.3333333333333333) - (-0.25 * (y * (1.0 - z))))))) + (1.0 - z))) + (math.log(y) * (-1.0 + x))) - t
function code(x, y, z, t)
	return Float64(Float64(Float64(y * Float64(Float64(y * Float64(Float64(Float64(z + -1.0) * -0.5) + Float64(y * Float64(Float64(Float64(z + -1.0) * -0.3333333333333333) - Float64(-0.25 * Float64(y * Float64(1.0 - z))))))) + Float64(1.0 - z))) + Float64(log(y) * Float64(-1.0 + x))) - t)
end
function tmp = code(x, y, z, t)
	tmp = ((y * ((y * (((z + -1.0) * -0.5) + (y * (((z + -1.0) * -0.3333333333333333) - (-0.25 * (y * (1.0 - z))))))) + (1.0 - z))) + (log(y) * (-1.0 + x))) - t;
end
code[x_, y_, z_, t_] := N[(N[(N[(y * N[(N[(y * N[(N[(N[(z + -1.0), $MachinePrecision] * -0.5), $MachinePrecision] + N[(y * N[(N[(N[(z + -1.0), $MachinePrecision] * -0.3333333333333333), $MachinePrecision] - N[(-0.25 * N[(y * N[(1.0 - z), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(1.0 - z), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(N[Log[y], $MachinePrecision] * N[(-1.0 + x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]
\begin{array}{l}

\\
\left(y \cdot \left(y \cdot \left(\left(z + -1\right) \cdot -0.5 + y \cdot \left(\left(z + -1\right) \cdot -0.3333333333333333 - -0.25 \cdot \left(y \cdot \left(1 - z\right)\right)\right)\right) + \left(1 - z\right)\right) + \log y \cdot \left(-1 + x\right)\right) - t
\end{array}
Derivation
  1. Initial program 88.2%

    \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
  2. Add Preprocessing
  3. Taylor expanded in y around 0 99.6%

    \[\leadsto \left(\left(x - 1\right) \cdot \log y + \color{blue}{y \cdot \left(-1 \cdot \left(z - 1\right) + y \cdot \left(-0.5 \cdot \left(z - 1\right) + y \cdot \left(-0.3333333333333333 \cdot \left(z - 1\right) + -0.25 \cdot \left(y \cdot \left(z - 1\right)\right)\right)\right)\right)}\right) - t \]
  4. Final simplification99.6%

    \[\leadsto \left(y \cdot \left(y \cdot \left(\left(z + -1\right) \cdot -0.5 + y \cdot \left(\left(z + -1\right) \cdot -0.3333333333333333 - -0.25 \cdot \left(y \cdot \left(1 - z\right)\right)\right)\right) + \left(1 - z\right)\right) + \log y \cdot \left(-1 + x\right)\right) - t \]
  5. Add Preprocessing

Alternative 4: 99.7% accurate, 1.7× speedup?

\[\begin{array}{l} \\ \left(\left(z + -1\right) \cdot \left(y \cdot \left(-1 - y \cdot \left(0.5 + y \cdot \left(0.3333333333333333 - y \cdot -0.25\right)\right)\right)\right) + \log y \cdot \left(-1 + x\right)\right) - t \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (-
  (+
   (*
    (+ z -1.0)
    (* y (- -1.0 (* y (+ 0.5 (* y (- 0.3333333333333333 (* y -0.25))))))))
   (* (log y) (+ -1.0 x)))
  t))
double code(double x, double y, double z, double t) {
	return (((z + -1.0) * (y * (-1.0 - (y * (0.5 + (y * (0.3333333333333333 - (y * -0.25)))))))) + (log(y) * (-1.0 + x))) - t;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = (((z + (-1.0d0)) * (y * ((-1.0d0) - (y * (0.5d0 + (y * (0.3333333333333333d0 - (y * (-0.25d0))))))))) + (log(y) * ((-1.0d0) + x))) - t
end function
public static double code(double x, double y, double z, double t) {
	return (((z + -1.0) * (y * (-1.0 - (y * (0.5 + (y * (0.3333333333333333 - (y * -0.25)))))))) + (Math.log(y) * (-1.0 + x))) - t;
}
def code(x, y, z, t):
	return (((z + -1.0) * (y * (-1.0 - (y * (0.5 + (y * (0.3333333333333333 - (y * -0.25)))))))) + (math.log(y) * (-1.0 + x))) - t
function code(x, y, z, t)
	return Float64(Float64(Float64(Float64(z + -1.0) * Float64(y * Float64(-1.0 - Float64(y * Float64(0.5 + Float64(y * Float64(0.3333333333333333 - Float64(y * -0.25)))))))) + Float64(log(y) * Float64(-1.0 + x))) - t)
end
function tmp = code(x, y, z, t)
	tmp = (((z + -1.0) * (y * (-1.0 - (y * (0.5 + (y * (0.3333333333333333 - (y * -0.25)))))))) + (log(y) * (-1.0 + x))) - t;
end
code[x_, y_, z_, t_] := N[(N[(N[(N[(z + -1.0), $MachinePrecision] * N[(y * N[(-1.0 - N[(y * N[(0.5 + N[(y * N[(0.3333333333333333 - N[(y * -0.25), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(N[Log[y], $MachinePrecision] * N[(-1.0 + x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]
\begin{array}{l}

\\
\left(\left(z + -1\right) \cdot \left(y \cdot \left(-1 - y \cdot \left(0.5 + y \cdot \left(0.3333333333333333 - y \cdot -0.25\right)\right)\right)\right) + \log y \cdot \left(-1 + x\right)\right) - t
\end{array}
Derivation
  1. Initial program 88.2%

    \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
  2. Add Preprocessing
  3. Taylor expanded in y around 0 99.6%

    \[\leadsto \left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \color{blue}{\left(y \cdot \left(y \cdot \left(y \cdot \left(-0.25 \cdot y - 0.3333333333333333\right) - 0.5\right) - 1\right)\right)}\right) - t \]
  4. Final simplification99.6%

    \[\leadsto \left(\left(z + -1\right) \cdot \left(y \cdot \left(-1 - y \cdot \left(0.5 + y \cdot \left(0.3333333333333333 - y \cdot -0.25\right)\right)\right)\right) + \log y \cdot \left(-1 + x\right)\right) - t \]
  5. Add Preprocessing

Alternative 5: 97.8% accurate, 1.7× speedup?

\[\begin{array}{l} \\ \begin{array}{l} t_1 := y \cdot \left(1 - z\right)\\ \mathbf{if}\;-1 + x \leq -500000000000 \lor \neg \left(-1 + x \leq -1\right):\\ \;\;\;\;\left(x \cdot \log y + t\_1\right) - t\\ \mathbf{else}:\\ \;\;\;\;\left(t\_1 - \log y\right) - t\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (let* ((t_1 (* y (- 1.0 z))))
   (if (or (<= (+ -1.0 x) -500000000000.0) (not (<= (+ -1.0 x) -1.0)))
     (- (+ (* x (log y)) t_1) t)
     (- (- t_1 (log y)) t))))
double code(double x, double y, double z, double t) {
	double t_1 = y * (1.0 - z);
	double tmp;
	if (((-1.0 + x) <= -500000000000.0) || !((-1.0 + x) <= -1.0)) {
		tmp = ((x * log(y)) + t_1) - t;
	} else {
		tmp = (t_1 - log(y)) - t;
	}
	return tmp;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    real(8) :: t_1
    real(8) :: tmp
    t_1 = y * (1.0d0 - z)
    if ((((-1.0d0) + x) <= (-500000000000.0d0)) .or. (.not. (((-1.0d0) + x) <= (-1.0d0)))) then
        tmp = ((x * log(y)) + t_1) - t
    else
        tmp = (t_1 - log(y)) - t
    end if
    code = tmp
end function
public static double code(double x, double y, double z, double t) {
	double t_1 = y * (1.0 - z);
	double tmp;
	if (((-1.0 + x) <= -500000000000.0) || !((-1.0 + x) <= -1.0)) {
		tmp = ((x * Math.log(y)) + t_1) - t;
	} else {
		tmp = (t_1 - Math.log(y)) - t;
	}
	return tmp;
}
def code(x, y, z, t):
	t_1 = y * (1.0 - z)
	tmp = 0
	if ((-1.0 + x) <= -500000000000.0) or not ((-1.0 + x) <= -1.0):
		tmp = ((x * math.log(y)) + t_1) - t
	else:
		tmp = (t_1 - math.log(y)) - t
	return tmp
function code(x, y, z, t)
	t_1 = Float64(y * Float64(1.0 - z))
	tmp = 0.0
	if ((Float64(-1.0 + x) <= -500000000000.0) || !(Float64(-1.0 + x) <= -1.0))
		tmp = Float64(Float64(Float64(x * log(y)) + t_1) - t);
	else
		tmp = Float64(Float64(t_1 - log(y)) - t);
	end
	return tmp
end
function tmp_2 = code(x, y, z, t)
	t_1 = y * (1.0 - z);
	tmp = 0.0;
	if (((-1.0 + x) <= -500000000000.0) || ~(((-1.0 + x) <= -1.0)))
		tmp = ((x * log(y)) + t_1) - t;
	else
		tmp = (t_1 - log(y)) - t;
	end
	tmp_2 = tmp;
end
code[x_, y_, z_, t_] := Block[{t$95$1 = N[(y * N[(1.0 - z), $MachinePrecision]), $MachinePrecision]}, If[Or[LessEqual[N[(-1.0 + x), $MachinePrecision], -500000000000.0], N[Not[LessEqual[N[(-1.0 + x), $MachinePrecision], -1.0]], $MachinePrecision]], N[(N[(N[(x * N[Log[y], $MachinePrecision]), $MachinePrecision] + t$95$1), $MachinePrecision] - t), $MachinePrecision], N[(N[(t$95$1 - N[Log[y], $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
t_1 := y \cdot \left(1 - z\right)\\
\mathbf{if}\;-1 + x \leq -500000000000 \lor \neg \left(-1 + x \leq -1\right):\\
\;\;\;\;\left(x \cdot \log y + t\_1\right) - t\\

\mathbf{else}:\\
\;\;\;\;\left(t\_1 - \log y\right) - t\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if (-.f64 x #s(literal 1 binary64)) < -5e11 or -1 < (-.f64 x #s(literal 1 binary64))

    1. Initial program 94.9%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative94.9%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define94.9%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg94.9%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval94.9%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg94.9%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified99.7%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 98.9%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative98.9%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg98.9%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval98.9%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg98.9%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg98.9%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative98.9%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg98.9%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval98.9%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative98.9%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified98.9%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in x around inf 98.2%

      \[\leadsto \left(\color{blue}{x \cdot \log y} - y \cdot \left(-1 + z\right)\right) - t \]
    9. Step-by-step derivation
      1. *-commutative98.2%

        \[\leadsto \left(\color{blue}{\log y \cdot x} - y \cdot \left(-1 + z\right)\right) - t \]
    10. Simplified98.2%

      \[\leadsto \left(\color{blue}{\log y \cdot x} - y \cdot \left(-1 + z\right)\right) - t \]

    if -5e11 < (-.f64 x #s(literal 1 binary64)) < -1

    1. Initial program 80.1%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative80.1%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define80.1%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg80.1%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval80.1%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg80.1%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 99.1%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative99.1%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg99.1%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval99.1%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg99.1%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg99.1%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative99.1%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg99.1%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval99.1%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative99.1%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified99.1%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in x around 0 99.1%

      \[\leadsto \color{blue}{\left(-1 \cdot \log y - y \cdot \left(z - 1\right)\right)} - t \]
    9. Step-by-step derivation
      1. mul-1-neg99.1%

        \[\leadsto \left(\color{blue}{\left(-\log y\right)} - y \cdot \left(z - 1\right)\right) - t \]
      2. sub-neg99.1%

        \[\leadsto \left(\left(-\log y\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      3. metadata-eval99.1%

        \[\leadsto \left(\left(-\log y\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
    10. Simplified99.1%

      \[\leadsto \color{blue}{\left(\left(-\log y\right) - y \cdot \left(z + -1\right)\right)} - t \]
  3. Recombined 2 regimes into one program.
  4. Final simplification98.6%

    \[\leadsto \begin{array}{l} \mathbf{if}\;-1 + x \leq -500000000000 \lor \neg \left(-1 + x \leq -1\right):\\ \;\;\;\;\left(x \cdot \log y + y \cdot \left(1 - z\right)\right) - t\\ \mathbf{else}:\\ \;\;\;\;\left(y \cdot \left(1 - z\right) - \log y\right) - t\\ \end{array} \]
  5. Add Preprocessing

Alternative 6: 95.6% accurate, 1.7× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;-1 + x \leq -500000000000 \lor \neg \left(-1 + x \leq 50\right):\\ \;\;\;\;\log y \cdot \left(-1 + x\right) - t\\ \mathbf{else}:\\ \;\;\;\;\left(y \cdot \left(1 - z\right) - \log y\right) - t\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (if (or (<= (+ -1.0 x) -500000000000.0) (not (<= (+ -1.0 x) 50.0)))
   (- (* (log y) (+ -1.0 x)) t)
   (- (- (* y (- 1.0 z)) (log y)) t)))
double code(double x, double y, double z, double t) {
	double tmp;
	if (((-1.0 + x) <= -500000000000.0) || !((-1.0 + x) <= 50.0)) {
		tmp = (log(y) * (-1.0 + x)) - t;
	} else {
		tmp = ((y * (1.0 - z)) - log(y)) - t;
	}
	return tmp;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    real(8) :: tmp
    if ((((-1.0d0) + x) <= (-500000000000.0d0)) .or. (.not. (((-1.0d0) + x) <= 50.0d0))) then
        tmp = (log(y) * ((-1.0d0) + x)) - t
    else
        tmp = ((y * (1.0d0 - z)) - log(y)) - t
    end if
    code = tmp
end function
public static double code(double x, double y, double z, double t) {
	double tmp;
	if (((-1.0 + x) <= -500000000000.0) || !((-1.0 + x) <= 50.0)) {
		tmp = (Math.log(y) * (-1.0 + x)) - t;
	} else {
		tmp = ((y * (1.0 - z)) - Math.log(y)) - t;
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if ((-1.0 + x) <= -500000000000.0) or not ((-1.0 + x) <= 50.0):
		tmp = (math.log(y) * (-1.0 + x)) - t
	else:
		tmp = ((y * (1.0 - z)) - math.log(y)) - t
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if ((Float64(-1.0 + x) <= -500000000000.0) || !(Float64(-1.0 + x) <= 50.0))
		tmp = Float64(Float64(log(y) * Float64(-1.0 + x)) - t);
	else
		tmp = Float64(Float64(Float64(y * Float64(1.0 - z)) - log(y)) - t);
	end
	return tmp
end
function tmp_2 = code(x, y, z, t)
	tmp = 0.0;
	if (((-1.0 + x) <= -500000000000.0) || ~(((-1.0 + x) <= 50.0)))
		tmp = (log(y) * (-1.0 + x)) - t;
	else
		tmp = ((y * (1.0 - z)) - log(y)) - t;
	end
	tmp_2 = tmp;
end
code[x_, y_, z_, t_] := If[Or[LessEqual[N[(-1.0 + x), $MachinePrecision], -500000000000.0], N[Not[LessEqual[N[(-1.0 + x), $MachinePrecision], 50.0]], $MachinePrecision]], N[(N[(N[Log[y], $MachinePrecision] * N[(-1.0 + x), $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision], N[(N[(N[(y * N[(1.0 - z), $MachinePrecision]), $MachinePrecision] - N[Log[y], $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;-1 + x \leq -500000000000 \lor \neg \left(-1 + x \leq 50\right):\\
\;\;\;\;\log y \cdot \left(-1 + x\right) - t\\

\mathbf{else}:\\
\;\;\;\;\left(y \cdot \left(1 - z\right) - \log y\right) - t\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if (-.f64 x #s(literal 1 binary64)) < -5e11 or 50 < (-.f64 x #s(literal 1 binary64))

    1. Initial program 94.9%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative94.9%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define94.9%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg94.9%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval94.9%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg94.9%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified99.7%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 99.4%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative99.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg99.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval99.4%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg99.4%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg99.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative99.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg99.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval99.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative99.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified99.4%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in z around 0 94.3%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) - -1 \cdot y\right)} - t \]
    9. Step-by-step derivation
      1. sub-neg94.3%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} - -1 \cdot y\right) - t \]
      2. metadata-eval94.3%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) - -1 \cdot y\right) - t \]
      3. +-commutative94.3%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - -1 \cdot y\right) - t \]
      4. neg-mul-194.3%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - \color{blue}{\left(-y\right)}\right) - t \]
    10. Simplified94.3%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - \left(-y\right)\right)} - t \]
    11. Taylor expanded in y around 0 94.3%

      \[\leadsto \color{blue}{\log y \cdot \left(x - 1\right)} - t \]

    if -5e11 < (-.f64 x #s(literal 1 binary64)) < 50

    1. Initial program 80.3%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative80.3%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define80.3%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg80.3%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval80.3%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg80.3%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 98.4%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative98.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg98.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval98.4%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg98.4%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg98.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative98.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg98.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval98.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative98.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified98.4%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in x around 0 98.4%

      \[\leadsto \color{blue}{\left(-1 \cdot \log y - y \cdot \left(z - 1\right)\right)} - t \]
    9. Step-by-step derivation
      1. mul-1-neg98.4%

        \[\leadsto \left(\color{blue}{\left(-\log y\right)} - y \cdot \left(z - 1\right)\right) - t \]
      2. sub-neg98.4%

        \[\leadsto \left(\left(-\log y\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      3. metadata-eval98.4%

        \[\leadsto \left(\left(-\log y\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
    10. Simplified98.4%

      \[\leadsto \color{blue}{\left(\left(-\log y\right) - y \cdot \left(z + -1\right)\right)} - t \]
  3. Recombined 2 regimes into one program.
  4. Final simplification96.2%

    \[\leadsto \begin{array}{l} \mathbf{if}\;-1 + x \leq -500000000000 \lor \neg \left(-1 + x \leq 50\right):\\ \;\;\;\;\log y \cdot \left(-1 + x\right) - t\\ \mathbf{else}:\\ \;\;\;\;\left(y \cdot \left(1 - z\right) - \log y\right) - t\\ \end{array} \]
  5. Add Preprocessing

Alternative 7: 95.6% accurate, 1.7× speedup?

\[\begin{array}{l} \\ \begin{array}{l} t_1 := \log y \cdot \left(-1 + x\right)\\ \mathbf{if}\;-1 + x \leq -500000000000:\\ \;\;\;\;t\_1 - t\\ \mathbf{elif}\;-1 + x \leq 50:\\ \;\;\;\;\left(y \cdot \left(1 - z\right) - \log y\right) - t\\ \mathbf{else}:\\ \;\;\;\;\left(y + t\_1\right) - t\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (let* ((t_1 (* (log y) (+ -1.0 x))))
   (if (<= (+ -1.0 x) -500000000000.0)
     (- t_1 t)
     (if (<= (+ -1.0 x) 50.0)
       (- (- (* y (- 1.0 z)) (log y)) t)
       (- (+ y t_1) t)))))
double code(double x, double y, double z, double t) {
	double t_1 = log(y) * (-1.0 + x);
	double tmp;
	if ((-1.0 + x) <= -500000000000.0) {
		tmp = t_1 - t;
	} else if ((-1.0 + x) <= 50.0) {
		tmp = ((y * (1.0 - z)) - log(y)) - t;
	} else {
		tmp = (y + t_1) - t;
	}
	return tmp;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    real(8) :: t_1
    real(8) :: tmp
    t_1 = log(y) * ((-1.0d0) + x)
    if (((-1.0d0) + x) <= (-500000000000.0d0)) then
        tmp = t_1 - t
    else if (((-1.0d0) + x) <= 50.0d0) then
        tmp = ((y * (1.0d0 - z)) - log(y)) - t
    else
        tmp = (y + t_1) - t
    end if
    code = tmp
end function
public static double code(double x, double y, double z, double t) {
	double t_1 = Math.log(y) * (-1.0 + x);
	double tmp;
	if ((-1.0 + x) <= -500000000000.0) {
		tmp = t_1 - t;
	} else if ((-1.0 + x) <= 50.0) {
		tmp = ((y * (1.0 - z)) - Math.log(y)) - t;
	} else {
		tmp = (y + t_1) - t;
	}
	return tmp;
}
def code(x, y, z, t):
	t_1 = math.log(y) * (-1.0 + x)
	tmp = 0
	if (-1.0 + x) <= -500000000000.0:
		tmp = t_1 - t
	elif (-1.0 + x) <= 50.0:
		tmp = ((y * (1.0 - z)) - math.log(y)) - t
	else:
		tmp = (y + t_1) - t
	return tmp
function code(x, y, z, t)
	t_1 = Float64(log(y) * Float64(-1.0 + x))
	tmp = 0.0
	if (Float64(-1.0 + x) <= -500000000000.0)
		tmp = Float64(t_1 - t);
	elseif (Float64(-1.0 + x) <= 50.0)
		tmp = Float64(Float64(Float64(y * Float64(1.0 - z)) - log(y)) - t);
	else
		tmp = Float64(Float64(y + t_1) - t);
	end
	return tmp
end
function tmp_2 = code(x, y, z, t)
	t_1 = log(y) * (-1.0 + x);
	tmp = 0.0;
	if ((-1.0 + x) <= -500000000000.0)
		tmp = t_1 - t;
	elseif ((-1.0 + x) <= 50.0)
		tmp = ((y * (1.0 - z)) - log(y)) - t;
	else
		tmp = (y + t_1) - t;
	end
	tmp_2 = tmp;
end
code[x_, y_, z_, t_] := Block[{t$95$1 = N[(N[Log[y], $MachinePrecision] * N[(-1.0 + x), $MachinePrecision]), $MachinePrecision]}, If[LessEqual[N[(-1.0 + x), $MachinePrecision], -500000000000.0], N[(t$95$1 - t), $MachinePrecision], If[LessEqual[N[(-1.0 + x), $MachinePrecision], 50.0], N[(N[(N[(y * N[(1.0 - z), $MachinePrecision]), $MachinePrecision] - N[Log[y], $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision], N[(N[(y + t$95$1), $MachinePrecision] - t), $MachinePrecision]]]]
\begin{array}{l}

\\
\begin{array}{l}
t_1 := \log y \cdot \left(-1 + x\right)\\
\mathbf{if}\;-1 + x \leq -500000000000:\\
\;\;\;\;t\_1 - t\\

\mathbf{elif}\;-1 + x \leq 50:\\
\;\;\;\;\left(y \cdot \left(1 - z\right) - \log y\right) - t\\

\mathbf{else}:\\
\;\;\;\;\left(y + t\_1\right) - t\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if (-.f64 x #s(literal 1 binary64)) < -5e11

    1. Initial program 98.3%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative98.3%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define98.3%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg98.3%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval98.3%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg98.3%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define99.6%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg99.6%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval99.6%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified99.6%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 99.6%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative99.6%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg99.6%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval99.6%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg99.6%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg99.6%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative99.6%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg99.6%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval99.6%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative99.6%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified99.6%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in z around 0 98.3%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) - -1 \cdot y\right)} - t \]
    9. Step-by-step derivation
      1. sub-neg98.3%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} - -1 \cdot y\right) - t \]
      2. metadata-eval98.3%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) - -1 \cdot y\right) - t \]
      3. +-commutative98.3%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - -1 \cdot y\right) - t \]
      4. neg-mul-198.3%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - \color{blue}{\left(-y\right)}\right) - t \]
    10. Simplified98.3%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - \left(-y\right)\right)} - t \]
    11. Taylor expanded in y around 0 98.3%

      \[\leadsto \color{blue}{\log y \cdot \left(x - 1\right)} - t \]

    if -5e11 < (-.f64 x #s(literal 1 binary64)) < 50

    1. Initial program 80.3%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative80.3%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define80.3%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg80.3%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval80.3%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg80.3%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 98.4%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative98.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg98.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval98.4%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg98.4%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg98.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative98.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg98.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval98.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative98.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified98.4%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in x around 0 98.4%

      \[\leadsto \color{blue}{\left(-1 \cdot \log y - y \cdot \left(z - 1\right)\right)} - t \]
    9. Step-by-step derivation
      1. mul-1-neg98.4%

        \[\leadsto \left(\color{blue}{\left(-\log y\right)} - y \cdot \left(z - 1\right)\right) - t \]
      2. sub-neg98.4%

        \[\leadsto \left(\left(-\log y\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      3. metadata-eval98.4%

        \[\leadsto \left(\left(-\log y\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
    10. Simplified98.4%

      \[\leadsto \color{blue}{\left(\left(-\log y\right) - y \cdot \left(z + -1\right)\right)} - t \]

    if 50 < (-.f64 x #s(literal 1 binary64))

    1. Initial program 91.1%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative91.1%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define91.1%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg91.1%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval91.1%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg91.1%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define99.8%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg99.8%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval99.8%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified99.8%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 99.2%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative99.2%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg99.2%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval99.2%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg99.2%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg99.2%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative99.2%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg99.2%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval99.2%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative99.2%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified99.2%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in z around 0 89.9%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) - -1 \cdot y\right)} - t \]
    9. Step-by-step derivation
      1. sub-neg89.9%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} - -1 \cdot y\right) - t \]
      2. metadata-eval89.9%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) - -1 \cdot y\right) - t \]
      3. +-commutative89.9%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - -1 \cdot y\right) - t \]
      4. neg-mul-189.9%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - \color{blue}{\left(-y\right)}\right) - t \]
    10. Simplified89.9%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - \left(-y\right)\right)} - t \]
  3. Recombined 3 regimes into one program.
  4. Final simplification96.2%

    \[\leadsto \begin{array}{l} \mathbf{if}\;-1 + x \leq -500000000000:\\ \;\;\;\;\log y \cdot \left(-1 + x\right) - t\\ \mathbf{elif}\;-1 + x \leq 50:\\ \;\;\;\;\left(y \cdot \left(1 - z\right) - \log y\right) - t\\ \mathbf{else}:\\ \;\;\;\;\left(y + \log y \cdot \left(-1 + x\right)\right) - t\\ \end{array} \]
  5. Add Preprocessing

Alternative 8: 99.6% accurate, 1.7× speedup?

\[\begin{array}{l} \\ \left(\left(z + -1\right) \cdot \left(y \cdot \left(-1 + y \cdot \left(y \cdot -0.3333333333333333 - 0.5\right)\right)\right) + \log y \cdot \left(-1 + x\right)\right) - t \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (-
  (+
   (* (+ z -1.0) (* y (+ -1.0 (* y (- (* y -0.3333333333333333) 0.5)))))
   (* (log y) (+ -1.0 x)))
  t))
double code(double x, double y, double z, double t) {
	return (((z + -1.0) * (y * (-1.0 + (y * ((y * -0.3333333333333333) - 0.5))))) + (log(y) * (-1.0 + x))) - t;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = (((z + (-1.0d0)) * (y * ((-1.0d0) + (y * ((y * (-0.3333333333333333d0)) - 0.5d0))))) + (log(y) * ((-1.0d0) + x))) - t
end function
public static double code(double x, double y, double z, double t) {
	return (((z + -1.0) * (y * (-1.0 + (y * ((y * -0.3333333333333333) - 0.5))))) + (Math.log(y) * (-1.0 + x))) - t;
}
def code(x, y, z, t):
	return (((z + -1.0) * (y * (-1.0 + (y * ((y * -0.3333333333333333) - 0.5))))) + (math.log(y) * (-1.0 + x))) - t
function code(x, y, z, t)
	return Float64(Float64(Float64(Float64(z + -1.0) * Float64(y * Float64(-1.0 + Float64(y * Float64(Float64(y * -0.3333333333333333) - 0.5))))) + Float64(log(y) * Float64(-1.0 + x))) - t)
end
function tmp = code(x, y, z, t)
	tmp = (((z + -1.0) * (y * (-1.0 + (y * ((y * -0.3333333333333333) - 0.5))))) + (log(y) * (-1.0 + x))) - t;
end
code[x_, y_, z_, t_] := N[(N[(N[(N[(z + -1.0), $MachinePrecision] * N[(y * N[(-1.0 + N[(y * N[(N[(y * -0.3333333333333333), $MachinePrecision] - 0.5), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(N[Log[y], $MachinePrecision] * N[(-1.0 + x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]
\begin{array}{l}

\\
\left(\left(z + -1\right) \cdot \left(y \cdot \left(-1 + y \cdot \left(y \cdot -0.3333333333333333 - 0.5\right)\right)\right) + \log y \cdot \left(-1 + x\right)\right) - t
\end{array}
Derivation
  1. Initial program 88.2%

    \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
  2. Add Preprocessing
  3. Taylor expanded in y around 0 99.5%

    \[\leadsto \left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \color{blue}{\left(y \cdot \left(y \cdot \left(-0.3333333333333333 \cdot y - 0.5\right) - 1\right)\right)}\right) - t \]
  4. Final simplification99.5%

    \[\leadsto \left(\left(z + -1\right) \cdot \left(y \cdot \left(-1 + y \cdot \left(y \cdot -0.3333333333333333 - 0.5\right)\right)\right) + \log y \cdot \left(-1 + x\right)\right) - t \]
  5. Add Preprocessing

Alternative 9: 99.4% accurate, 1.8× speedup?

\[\begin{array}{l} \\ \left(y \cdot \left(\left(z + -1\right) \cdot \left(-1 + y \cdot -0.5\right)\right) + \log y \cdot \left(-1 + x\right)\right) - t \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (- (+ (* y (* (+ z -1.0) (+ -1.0 (* y -0.5)))) (* (log y) (+ -1.0 x))) t))
double code(double x, double y, double z, double t) {
	return ((y * ((z + -1.0) * (-1.0 + (y * -0.5)))) + (log(y) * (-1.0 + x))) - t;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = ((y * ((z + (-1.0d0)) * ((-1.0d0) + (y * (-0.5d0))))) + (log(y) * ((-1.0d0) + x))) - t
end function
public static double code(double x, double y, double z, double t) {
	return ((y * ((z + -1.0) * (-1.0 + (y * -0.5)))) + (Math.log(y) * (-1.0 + x))) - t;
}
def code(x, y, z, t):
	return ((y * ((z + -1.0) * (-1.0 + (y * -0.5)))) + (math.log(y) * (-1.0 + x))) - t
function code(x, y, z, t)
	return Float64(Float64(Float64(y * Float64(Float64(z + -1.0) * Float64(-1.0 + Float64(y * -0.5)))) + Float64(log(y) * Float64(-1.0 + x))) - t)
end
function tmp = code(x, y, z, t)
	tmp = ((y * ((z + -1.0) * (-1.0 + (y * -0.5)))) + (log(y) * (-1.0 + x))) - t;
end
code[x_, y_, z_, t_] := N[(N[(N[(y * N[(N[(z + -1.0), $MachinePrecision] * N[(-1.0 + N[(y * -0.5), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + N[(N[Log[y], $MachinePrecision] * N[(-1.0 + x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]
\begin{array}{l}

\\
\left(y \cdot \left(\left(z + -1\right) \cdot \left(-1 + y \cdot -0.5\right)\right) + \log y \cdot \left(-1 + x\right)\right) - t
\end{array}
Derivation
  1. Initial program 88.2%

    \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
  2. Add Preprocessing
  3. Taylor expanded in y around 0 99.6%

    \[\leadsto \left(\left(x - 1\right) \cdot \log y + \color{blue}{y \cdot \left(-1 \cdot \left(z - 1\right) + y \cdot \left(-0.5 \cdot \left(z - 1\right) + y \cdot \left(-0.3333333333333333 \cdot \left(z - 1\right) + -0.25 \cdot \left(y \cdot \left(z - 1\right)\right)\right)\right)\right)}\right) - t \]
  4. Taylor expanded in y around 0 99.4%

    \[\leadsto \left(\left(x - 1\right) \cdot \log y + \color{blue}{y \cdot \left(-1 \cdot \left(z - 1\right) + -0.5 \cdot \left(y \cdot \left(z - 1\right)\right)\right)}\right) - t \]
  5. Step-by-step derivation
    1. +-commutative99.4%

      \[\leadsto \left(\left(x - 1\right) \cdot \log y + y \cdot \color{blue}{\left(-0.5 \cdot \left(y \cdot \left(z - 1\right)\right) + -1 \cdot \left(z - 1\right)\right)}\right) - t \]
    2. associate-*r*99.4%

      \[\leadsto \left(\left(x - 1\right) \cdot \log y + y \cdot \left(\color{blue}{\left(-0.5 \cdot y\right) \cdot \left(z - 1\right)} + -1 \cdot \left(z - 1\right)\right)\right) - t \]
    3. distribute-rgt-out99.4%

      \[\leadsto \left(\left(x - 1\right) \cdot \log y + y \cdot \color{blue}{\left(\left(z - 1\right) \cdot \left(-0.5 \cdot y + -1\right)\right)}\right) - t \]
    4. sub-neg99.4%

      \[\leadsto \left(\left(x - 1\right) \cdot \log y + y \cdot \left(\color{blue}{\left(z + \left(-1\right)\right)} \cdot \left(-0.5 \cdot y + -1\right)\right)\right) - t \]
    5. metadata-eval99.4%

      \[\leadsto \left(\left(x - 1\right) \cdot \log y + y \cdot \left(\left(z + \color{blue}{-1}\right) \cdot \left(-0.5 \cdot y + -1\right)\right)\right) - t \]
    6. *-commutative99.4%

      \[\leadsto \left(\left(x - 1\right) \cdot \log y + y \cdot \left(\left(z + -1\right) \cdot \left(\color{blue}{y \cdot -0.5} + -1\right)\right)\right) - t \]
  6. Simplified99.4%

    \[\leadsto \left(\left(x - 1\right) \cdot \log y + \color{blue}{y \cdot \left(\left(z + -1\right) \cdot \left(y \cdot -0.5 + -1\right)\right)}\right) - t \]
  7. Final simplification99.4%

    \[\leadsto \left(y \cdot \left(\left(z + -1\right) \cdot \left(-1 + y \cdot -0.5\right)\right) + \log y \cdot \left(-1 + x\right)\right) - t \]
  8. Add Preprocessing

Alternative 10: 89.6% accurate, 1.8× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;z \leq -5 \cdot 10^{+182}:\\ \;\;\;\;z \cdot \mathsf{log1p}\left(-y\right) - t\\ \mathbf{elif}\;z \leq 2.8 \cdot 10^{+254}:\\ \;\;\;\;\log y \cdot \left(-1 + x\right) - t\\ \mathbf{else}:\\ \;\;\;\;z \cdot \left(\frac{-t}{z} - y\right)\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (if (<= z -5e+182)
   (- (* z (log1p (- y))) t)
   (if (<= z 2.8e+254) (- (* (log y) (+ -1.0 x)) t) (* z (- (/ (- t) z) y)))))
double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -5e+182) {
		tmp = (z * log1p(-y)) - t;
	} else if (z <= 2.8e+254) {
		tmp = (log(y) * (-1.0 + x)) - t;
	} else {
		tmp = z * ((-t / z) - y);
	}
	return tmp;
}
public static double code(double x, double y, double z, double t) {
	double tmp;
	if (z <= -5e+182) {
		tmp = (z * Math.log1p(-y)) - t;
	} else if (z <= 2.8e+254) {
		tmp = (Math.log(y) * (-1.0 + x)) - t;
	} else {
		tmp = z * ((-t / z) - y);
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if z <= -5e+182:
		tmp = (z * math.log1p(-y)) - t
	elif z <= 2.8e+254:
		tmp = (math.log(y) * (-1.0 + x)) - t
	else:
		tmp = z * ((-t / z) - y)
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if (z <= -5e+182)
		tmp = Float64(Float64(z * log1p(Float64(-y))) - t);
	elseif (z <= 2.8e+254)
		tmp = Float64(Float64(log(y) * Float64(-1.0 + x)) - t);
	else
		tmp = Float64(z * Float64(Float64(Float64(-t) / z) - y));
	end
	return tmp
end
code[x_, y_, z_, t_] := If[LessEqual[z, -5e+182], N[(N[(z * N[Log[1 + (-y)], $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision], If[LessEqual[z, 2.8e+254], N[(N[(N[Log[y], $MachinePrecision] * N[(-1.0 + x), $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision], N[(z * N[(N[((-t) / z), $MachinePrecision] - y), $MachinePrecision]), $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;z \leq -5 \cdot 10^{+182}:\\
\;\;\;\;z \cdot \mathsf{log1p}\left(-y\right) - t\\

\mathbf{elif}\;z \leq 2.8 \cdot 10^{+254}:\\
\;\;\;\;\log y \cdot \left(-1 + x\right) - t\\

\mathbf{else}:\\
\;\;\;\;z \cdot \left(\frac{-t}{z} - y\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if z < -4.99999999999999973e182

    1. Initial program 69.4%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative69.4%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define69.4%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg69.4%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval69.4%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg69.4%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified99.7%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in z around inf 69.3%

      \[\leadsto \color{blue}{z \cdot \left(\log \left(1 - y\right) + \left(-1 \cdot \frac{\log \left(1 - y\right)}{z} + \frac{\log y \cdot \left(x - 1\right)}{z}\right)\right)} - t \]
    6. Step-by-step derivation
      1. sub-neg69.3%

        \[\leadsto z \cdot \left(\log \color{blue}{\left(1 + \left(-y\right)\right)} + \left(-1 \cdot \frac{\log \left(1 - y\right)}{z} + \frac{\log y \cdot \left(x - 1\right)}{z}\right)\right) - t \]
      2. log1p-define99.7%

        \[\leadsto z \cdot \left(\color{blue}{\mathsf{log1p}\left(-y\right)} + \left(-1 \cdot \frac{\log \left(1 - y\right)}{z} + \frac{\log y \cdot \left(x - 1\right)}{z}\right)\right) - t \]
      3. +-commutative99.7%

        \[\leadsto z \cdot \left(\mathsf{log1p}\left(-y\right) + \color{blue}{\left(\frac{\log y \cdot \left(x - 1\right)}{z} + -1 \cdot \frac{\log \left(1 - y\right)}{z}\right)}\right) - t \]
      4. sub-neg99.7%

        \[\leadsto z \cdot \left(\mathsf{log1p}\left(-y\right) + \left(\frac{\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)}}{z} + -1 \cdot \frac{\log \left(1 - y\right)}{z}\right)\right) - t \]
      5. metadata-eval99.7%

        \[\leadsto z \cdot \left(\mathsf{log1p}\left(-y\right) + \left(\frac{\log y \cdot \left(x + \color{blue}{-1}\right)}{z} + -1 \cdot \frac{\log \left(1 - y\right)}{z}\right)\right) - t \]
      6. associate-/l*99.7%

        \[\leadsto z \cdot \left(\mathsf{log1p}\left(-y\right) + \left(\color{blue}{\log y \cdot \frac{x + -1}{z}} + -1 \cdot \frac{\log \left(1 - y\right)}{z}\right)\right) - t \]
      7. fma-define99.7%

        \[\leadsto z \cdot \left(\mathsf{log1p}\left(-y\right) + \color{blue}{\mathsf{fma}\left(\log y, \frac{x + -1}{z}, -1 \cdot \frac{\log \left(1 - y\right)}{z}\right)}\right) - t \]
      8. +-commutative99.7%

        \[\leadsto z \cdot \left(\mathsf{log1p}\left(-y\right) + \mathsf{fma}\left(\log y, \frac{\color{blue}{-1 + x}}{z}, -1 \cdot \frac{\log \left(1 - y\right)}{z}\right)\right) - t \]
      9. mul-1-neg99.7%

        \[\leadsto z \cdot \left(\mathsf{log1p}\left(-y\right) + \mathsf{fma}\left(\log y, \frac{-1 + x}{z}, \color{blue}{-\frac{\log \left(1 - y\right)}{z}}\right)\right) - t \]
      10. distribute-neg-frac299.7%

        \[\leadsto z \cdot \left(\mathsf{log1p}\left(-y\right) + \mathsf{fma}\left(\log y, \frac{-1 + x}{z}, \color{blue}{\frac{\log \left(1 - y\right)}{-z}}\right)\right) - t \]
      11. sub-neg99.7%

        \[\leadsto z \cdot \left(\mathsf{log1p}\left(-y\right) + \mathsf{fma}\left(\log y, \frac{-1 + x}{z}, \frac{\log \color{blue}{\left(1 + \left(-y\right)\right)}}{-z}\right)\right) - t \]
      12. log1p-define99.7%

        \[\leadsto z \cdot \left(\mathsf{log1p}\left(-y\right) + \mathsf{fma}\left(\log y, \frac{-1 + x}{z}, \frac{\color{blue}{\mathsf{log1p}\left(-y\right)}}{-z}\right)\right) - t \]
    7. Simplified99.7%

      \[\leadsto \color{blue}{z \cdot \left(\mathsf{log1p}\left(-y\right) + \mathsf{fma}\left(\log y, \frac{-1 + x}{z}, \frac{\mathsf{log1p}\left(-y\right)}{-z}\right)\right)} - t \]
    8. Taylor expanded in z around inf 41.7%

      \[\leadsto \color{blue}{z \cdot \log \left(1 - y\right)} - t \]
    9. Step-by-step derivation
      1. sub-neg41.7%

        \[\leadsto z \cdot \log \color{blue}{\left(1 + \left(-y\right)\right)} - t \]
      2. log1p-undefine72.8%

        \[\leadsto z \cdot \color{blue}{\mathsf{log1p}\left(-y\right)} - t \]
    10. Simplified72.8%

      \[\leadsto \color{blue}{z \cdot \mathsf{log1p}\left(-y\right)} - t \]

    if -4.99999999999999973e182 < z < 2.79999999999999982e254

    1. Initial program 94.4%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative94.4%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define94.4%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg94.4%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval94.4%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg94.4%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define99.8%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg99.8%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval99.8%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified99.8%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 99.7%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative99.7%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg99.7%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval99.7%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg99.7%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg99.7%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative99.7%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg99.7%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval99.7%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative99.7%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified99.7%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in z around 0 94.3%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) - -1 \cdot y\right)} - t \]
    9. Step-by-step derivation
      1. sub-neg94.3%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} - -1 \cdot y\right) - t \]
      2. metadata-eval94.3%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) - -1 \cdot y\right) - t \]
      3. +-commutative94.3%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - -1 \cdot y\right) - t \]
      4. neg-mul-194.3%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - \color{blue}{\left(-y\right)}\right) - t \]
    10. Simplified94.3%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - \left(-y\right)\right)} - t \]
    11. Taylor expanded in y around 0 94.0%

      \[\leadsto \color{blue}{\log y \cdot \left(x - 1\right)} - t \]

    if 2.79999999999999982e254 < z

    1. Initial program 34.7%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative34.7%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define34.7%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg34.7%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval34.7%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg34.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 100.0%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative100.0%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg100.0%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval100.0%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg100.0%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg100.0%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative100.0%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg100.0%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval100.0%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative100.0%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified100.0%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in z around inf 83.9%

      \[\leadsto \color{blue}{-1 \cdot \left(y \cdot z\right)} - t \]
    9. Step-by-step derivation
      1. mul-1-neg83.9%

        \[\leadsto \color{blue}{\left(-y \cdot z\right)} - t \]
      2. distribute-rgt-neg-in83.9%

        \[\leadsto \color{blue}{y \cdot \left(-z\right)} - t \]
    10. Simplified83.9%

      \[\leadsto \color{blue}{y \cdot \left(-z\right)} - t \]
    11. Taylor expanded in z around inf 83.9%

      \[\leadsto \color{blue}{z \cdot \left(-1 \cdot y + -1 \cdot \frac{t}{z}\right)} \]
    12. Step-by-step derivation
      1. neg-mul-183.9%

        \[\leadsto z \cdot \left(\color{blue}{\left(-y\right)} + -1 \cdot \frac{t}{z}\right) \]
      2. +-commutative83.9%

        \[\leadsto z \cdot \color{blue}{\left(-1 \cdot \frac{t}{z} + \left(-y\right)\right)} \]
      3. unsub-neg83.9%

        \[\leadsto z \cdot \color{blue}{\left(-1 \cdot \frac{t}{z} - y\right)} \]
      4. mul-1-neg83.9%

        \[\leadsto z \cdot \left(\color{blue}{\left(-\frac{t}{z}\right)} - y\right) \]
      5. distribute-neg-frac283.9%

        \[\leadsto z \cdot \left(\color{blue}{\frac{t}{-z}} - y\right) \]
    13. Simplified83.9%

      \[\leadsto \color{blue}{z \cdot \left(\frac{t}{-z} - y\right)} \]
  3. Recombined 3 regimes into one program.
  4. Final simplification90.6%

    \[\leadsto \begin{array}{l} \mathbf{if}\;z \leq -5 \cdot 10^{+182}:\\ \;\;\;\;z \cdot \mathsf{log1p}\left(-y\right) - t\\ \mathbf{elif}\;z \leq 2.8 \cdot 10^{+254}:\\ \;\;\;\;\log y \cdot \left(-1 + x\right) - t\\ \mathbf{else}:\\ \;\;\;\;z \cdot \left(\frac{-t}{z} - y\right)\\ \end{array} \]
  5. Add Preprocessing

Alternative 11: 87.3% accurate, 1.9× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq -6.6 \cdot 10^{-21} \lor \neg \left(x \leq 38\right):\\ \;\;\;\;x \cdot \log y - t\\ \mathbf{else}:\\ \;\;\;\;\left(y - \log y\right) - t\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (if (or (<= x -6.6e-21) (not (<= x 38.0)))
   (- (* x (log y)) t)
   (- (- y (log y)) t)))
double code(double x, double y, double z, double t) {
	double tmp;
	if ((x <= -6.6e-21) || !(x <= 38.0)) {
		tmp = (x * log(y)) - t;
	} else {
		tmp = (y - log(y)) - t;
	}
	return tmp;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    real(8) :: tmp
    if ((x <= (-6.6d-21)) .or. (.not. (x <= 38.0d0))) then
        tmp = (x * log(y)) - t
    else
        tmp = (y - log(y)) - t
    end if
    code = tmp
end function
public static double code(double x, double y, double z, double t) {
	double tmp;
	if ((x <= -6.6e-21) || !(x <= 38.0)) {
		tmp = (x * Math.log(y)) - t;
	} else {
		tmp = (y - Math.log(y)) - t;
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if (x <= -6.6e-21) or not (x <= 38.0):
		tmp = (x * math.log(y)) - t
	else:
		tmp = (y - math.log(y)) - t
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if ((x <= -6.6e-21) || !(x <= 38.0))
		tmp = Float64(Float64(x * log(y)) - t);
	else
		tmp = Float64(Float64(y - log(y)) - t);
	end
	return tmp
end
function tmp_2 = code(x, y, z, t)
	tmp = 0.0;
	if ((x <= -6.6e-21) || ~((x <= 38.0)))
		tmp = (x * log(y)) - t;
	else
		tmp = (y - log(y)) - t;
	end
	tmp_2 = tmp;
end
code[x_, y_, z_, t_] := If[Or[LessEqual[x, -6.6e-21], N[Not[LessEqual[x, 38.0]], $MachinePrecision]], N[(N[(x * N[Log[y], $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision], N[(N[(y - N[Log[y], $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq -6.6 \cdot 10^{-21} \lor \neg \left(x \leq 38\right):\\
\;\;\;\;x \cdot \log y - t\\

\mathbf{else}:\\
\;\;\;\;\left(y - \log y\right) - t\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if x < -6.60000000000000018e-21 or 38 < x

    1. Initial program 93.6%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative93.6%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define93.6%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg93.6%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval93.6%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg93.6%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified99.7%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 99.4%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative99.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg99.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval99.4%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg99.4%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg99.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative99.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg99.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval99.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative99.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified99.4%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in z around 0 93.0%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) - -1 \cdot y\right)} - t \]
    9. Step-by-step derivation
      1. sub-neg93.0%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} - -1 \cdot y\right) - t \]
      2. metadata-eval93.0%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) - -1 \cdot y\right) - t \]
      3. +-commutative93.0%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - -1 \cdot y\right) - t \]
      4. neg-mul-193.0%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - \color{blue}{\left(-y\right)}\right) - t \]
    10. Simplified93.0%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - \left(-y\right)\right)} - t \]
    11. Taylor expanded in x around inf 92.3%

      \[\leadsto \color{blue}{x \cdot \log y} - t \]
    12. Step-by-step derivation
      1. *-commutative92.3%

        \[\leadsto \color{blue}{\log y \cdot x} - t \]
    13. Simplified92.3%

      \[\leadsto \color{blue}{\log y \cdot x} - t \]

    if -6.60000000000000018e-21 < x < 38

    1. Initial program 81.6%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative81.6%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define81.6%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg81.6%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval81.6%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg81.6%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 98.4%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative98.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg98.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval98.4%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg98.4%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg98.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative98.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg98.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval98.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative98.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified98.4%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in z around 0 79.4%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) - -1 \cdot y\right)} - t \]
    9. Step-by-step derivation
      1. sub-neg79.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} - -1 \cdot y\right) - t \]
      2. metadata-eval79.4%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) - -1 \cdot y\right) - t \]
      3. +-commutative79.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - -1 \cdot y\right) - t \]
      4. neg-mul-179.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - \color{blue}{\left(-y\right)}\right) - t \]
    10. Simplified79.4%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - \left(-y\right)\right)} - t \]
    11. Taylor expanded in x around 0 79.4%

      \[\leadsto \color{blue}{\left(y + -1 \cdot \log y\right)} - t \]
    12. Step-by-step derivation
      1. mul-1-neg79.4%

        \[\leadsto \left(y + \color{blue}{\left(-\log y\right)}\right) - t \]
      2. unsub-neg79.4%

        \[\leadsto \color{blue}{\left(y - \log y\right)} - t \]
    13. Simplified79.4%

      \[\leadsto \color{blue}{\left(y - \log y\right)} - t \]
  3. Recombined 2 regimes into one program.
  4. Final simplification86.5%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -6.6 \cdot 10^{-21} \lor \neg \left(x \leq 38\right):\\ \;\;\;\;x \cdot \log y - t\\ \mathbf{else}:\\ \;\;\;\;\left(y - \log y\right) - t\\ \end{array} \]
  5. Add Preprocessing

Alternative 12: 77.2% accurate, 1.9× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq -255000 \lor \neg \left(x \leq 1860\right):\\ \;\;\;\;x \cdot \log y - t\\ \mathbf{else}:\\ \;\;\;\;\left(-t\right) - z \cdot y\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (if (or (<= x -255000.0) (not (<= x 1860.0)))
   (- (* x (log y)) t)
   (- (- t) (* z y))))
double code(double x, double y, double z, double t) {
	double tmp;
	if ((x <= -255000.0) || !(x <= 1860.0)) {
		tmp = (x * log(y)) - t;
	} else {
		tmp = -t - (z * y);
	}
	return tmp;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    real(8) :: tmp
    if ((x <= (-255000.0d0)) .or. (.not. (x <= 1860.0d0))) then
        tmp = (x * log(y)) - t
    else
        tmp = -t - (z * y)
    end if
    code = tmp
end function
public static double code(double x, double y, double z, double t) {
	double tmp;
	if ((x <= -255000.0) || !(x <= 1860.0)) {
		tmp = (x * Math.log(y)) - t;
	} else {
		tmp = -t - (z * y);
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if (x <= -255000.0) or not (x <= 1860.0):
		tmp = (x * math.log(y)) - t
	else:
		tmp = -t - (z * y)
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if ((x <= -255000.0) || !(x <= 1860.0))
		tmp = Float64(Float64(x * log(y)) - t);
	else
		tmp = Float64(Float64(-t) - Float64(z * y));
	end
	return tmp
end
function tmp_2 = code(x, y, z, t)
	tmp = 0.0;
	if ((x <= -255000.0) || ~((x <= 1860.0)))
		tmp = (x * log(y)) - t;
	else
		tmp = -t - (z * y);
	end
	tmp_2 = tmp;
end
code[x_, y_, z_, t_] := If[Or[LessEqual[x, -255000.0], N[Not[LessEqual[x, 1860.0]], $MachinePrecision]], N[(N[(x * N[Log[y], $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision], N[((-t) - N[(z * y), $MachinePrecision]), $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq -255000 \lor \neg \left(x \leq 1860\right):\\
\;\;\;\;x \cdot \log y - t\\

\mathbf{else}:\\
\;\;\;\;\left(-t\right) - z \cdot y\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if x < -255000 or 1860 < x

    1. Initial program 94.9%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative94.9%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define94.9%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg94.9%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval94.9%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg94.9%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval99.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified99.7%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 99.4%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative99.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg99.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval99.4%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg99.4%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg99.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative99.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg99.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval99.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative99.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified99.4%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in z around 0 94.3%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) - -1 \cdot y\right)} - t \]
    9. Step-by-step derivation
      1. sub-neg94.3%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} - -1 \cdot y\right) - t \]
      2. metadata-eval94.3%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) - -1 \cdot y\right) - t \]
      3. +-commutative94.3%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - -1 \cdot y\right) - t \]
      4. neg-mul-194.3%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - \color{blue}{\left(-y\right)}\right) - t \]
    10. Simplified94.3%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - \left(-y\right)\right)} - t \]
    11. Taylor expanded in x around inf 93.7%

      \[\leadsto \color{blue}{x \cdot \log y} - t \]
    12. Step-by-step derivation
      1. *-commutative93.7%

        \[\leadsto \color{blue}{\log y \cdot x} - t \]
    13. Simplified93.7%

      \[\leadsto \color{blue}{\log y \cdot x} - t \]

    if -255000 < x < 1860

    1. Initial program 80.3%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative80.3%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define80.3%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg80.3%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval80.3%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg80.3%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval100.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 98.4%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative98.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg98.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval98.4%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg98.4%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg98.4%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative98.4%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg98.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval98.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative98.4%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified98.4%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in z around inf 64.7%

      \[\leadsto \color{blue}{-1 \cdot \left(y \cdot z\right)} - t \]
    9. Step-by-step derivation
      1. mul-1-neg64.7%

        \[\leadsto \color{blue}{\left(-y \cdot z\right)} - t \]
      2. distribute-rgt-neg-in64.7%

        \[\leadsto \color{blue}{y \cdot \left(-z\right)} - t \]
    10. Simplified64.7%

      \[\leadsto \color{blue}{y \cdot \left(-z\right)} - t \]
  3. Recombined 2 regimes into one program.
  4. Final simplification80.4%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -255000 \lor \neg \left(x \leq 1860\right):\\ \;\;\;\;x \cdot \log y - t\\ \mathbf{else}:\\ \;\;\;\;\left(-t\right) - z \cdot y\\ \end{array} \]
  5. Add Preprocessing

Alternative 13: 99.1% accurate, 1.9× speedup?

\[\begin{array}{l} \\ \left(\log y \cdot \left(-1 + x\right) + y \cdot \left(1 - z\right)\right) - t \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (- (+ (* (log y) (+ -1.0 x)) (* y (- 1.0 z))) t))
double code(double x, double y, double z, double t) {
	return ((log(y) * (-1.0 + x)) + (y * (1.0 - z))) - t;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = ((log(y) * ((-1.0d0) + x)) + (y * (1.0d0 - z))) - t
end function
public static double code(double x, double y, double z, double t) {
	return ((Math.log(y) * (-1.0 + x)) + (y * (1.0 - z))) - t;
}
def code(x, y, z, t):
	return ((math.log(y) * (-1.0 + x)) + (y * (1.0 - z))) - t
function code(x, y, z, t)
	return Float64(Float64(Float64(log(y) * Float64(-1.0 + x)) + Float64(y * Float64(1.0 - z))) - t)
end
function tmp = code(x, y, z, t)
	tmp = ((log(y) * (-1.0 + x)) + (y * (1.0 - z))) - t;
end
code[x_, y_, z_, t_] := N[(N[(N[(N[Log[y], $MachinePrecision] * N[(-1.0 + x), $MachinePrecision]), $MachinePrecision] + N[(y * N[(1.0 - z), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]
\begin{array}{l}

\\
\left(\log y \cdot \left(-1 + x\right) + y \cdot \left(1 - z\right)\right) - t
\end{array}
Derivation
  1. Initial program 88.2%

    \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
  2. Step-by-step derivation
    1. +-commutative88.2%

      \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
    2. fma-define88.2%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
    3. sub-neg88.2%

      \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
    4. metadata-eval88.2%

      \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
    5. sub-neg88.2%

      \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
    6. log1p-define99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
    7. sub-neg99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
    8. metadata-eval99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
  3. Simplified99.8%

    \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
  4. Add Preprocessing
  5. Taylor expanded in y around 0 99.0%

    \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
  6. Step-by-step derivation
    1. +-commutative99.0%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
    2. sub-neg99.0%

      \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
    3. metadata-eval99.0%

      \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
    4. mul-1-neg99.0%

      \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
    5. unsub-neg99.0%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
    6. +-commutative99.0%

      \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
    7. sub-neg99.0%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
    8. metadata-eval99.0%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
    9. +-commutative99.0%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
  7. Simplified99.0%

    \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
  8. Final simplification99.0%

    \[\leadsto \left(\log y \cdot \left(-1 + x\right) + y \cdot \left(1 - z\right)\right) - t \]
  9. Add Preprocessing

Alternative 14: 42.9% accurate, 15.3× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;t \leq -190000000000:\\ \;\;\;\;-t\\ \mathbf{elif}\;t \leq 390000000:\\ \;\;\;\;z \cdot \left(-y\right)\\ \mathbf{else}:\\ \;\;\;\;y - t\\ \end{array} \end{array} \]
(FPCore (x y z t)
 :precision binary64
 (if (<= t -190000000000.0) (- t) (if (<= t 390000000.0) (* z (- y)) (- y t))))
double code(double x, double y, double z, double t) {
	double tmp;
	if (t <= -190000000000.0) {
		tmp = -t;
	} else if (t <= 390000000.0) {
		tmp = z * -y;
	} else {
		tmp = y - t;
	}
	return tmp;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    real(8) :: tmp
    if (t <= (-190000000000.0d0)) then
        tmp = -t
    else if (t <= 390000000.0d0) then
        tmp = z * -y
    else
        tmp = y - t
    end if
    code = tmp
end function
public static double code(double x, double y, double z, double t) {
	double tmp;
	if (t <= -190000000000.0) {
		tmp = -t;
	} else if (t <= 390000000.0) {
		tmp = z * -y;
	} else {
		tmp = y - t;
	}
	return tmp;
}
def code(x, y, z, t):
	tmp = 0
	if t <= -190000000000.0:
		tmp = -t
	elif t <= 390000000.0:
		tmp = z * -y
	else:
		tmp = y - t
	return tmp
function code(x, y, z, t)
	tmp = 0.0
	if (t <= -190000000000.0)
		tmp = Float64(-t);
	elseif (t <= 390000000.0)
		tmp = Float64(z * Float64(-y));
	else
		tmp = Float64(y - t);
	end
	return tmp
end
function tmp_2 = code(x, y, z, t)
	tmp = 0.0;
	if (t <= -190000000000.0)
		tmp = -t;
	elseif (t <= 390000000.0)
		tmp = z * -y;
	else
		tmp = y - t;
	end
	tmp_2 = tmp;
end
code[x_, y_, z_, t_] := If[LessEqual[t, -190000000000.0], (-t), If[LessEqual[t, 390000000.0], N[(z * (-y)), $MachinePrecision], N[(y - t), $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;t \leq -190000000000:\\
\;\;\;\;-t\\

\mathbf{elif}\;t \leq 390000000:\\
\;\;\;\;z \cdot \left(-y\right)\\

\mathbf{else}:\\
\;\;\;\;y - t\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if t < -1.9e11

    1. Initial program 95.7%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative95.7%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define95.7%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg95.7%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval95.7%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg95.7%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define99.8%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg99.8%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval99.8%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified99.8%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 99.6%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative99.6%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg99.6%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval99.6%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg99.6%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg99.6%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative99.6%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg99.6%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval99.6%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative99.6%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified99.6%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in z around inf 69.9%

      \[\leadsto \color{blue}{-1 \cdot \left(y \cdot z\right)} - t \]
    9. Step-by-step derivation
      1. mul-1-neg69.9%

        \[\leadsto \color{blue}{\left(-y \cdot z\right)} - t \]
      2. distribute-rgt-neg-in69.9%

        \[\leadsto \color{blue}{y \cdot \left(-z\right)} - t \]
    10. Simplified69.9%

      \[\leadsto \color{blue}{y \cdot \left(-z\right)} - t \]
    11. Taylor expanded in y around 0 65.5%

      \[\leadsto \color{blue}{-1 \cdot t} \]
    12. Step-by-step derivation
      1. neg-mul-165.5%

        \[\leadsto \color{blue}{-t} \]
    13. Simplified65.5%

      \[\leadsto \color{blue}{-t} \]

    if -1.9e11 < t < 3.9e8

    1. Initial program 80.9%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative80.9%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define80.9%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg80.9%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval80.9%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg80.9%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define99.8%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg99.8%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval99.8%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified99.8%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 98.9%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative98.9%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg98.9%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval98.9%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg98.9%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg98.9%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative98.9%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg98.9%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval98.9%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative98.9%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified98.9%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in z around inf 22.3%

      \[\leadsto \color{blue}{-1 \cdot \left(y \cdot z\right)} - t \]
    9. Step-by-step derivation
      1. mul-1-neg22.3%

        \[\leadsto \color{blue}{\left(-y \cdot z\right)} - t \]
      2. distribute-rgt-neg-in22.3%

        \[\leadsto \color{blue}{y \cdot \left(-z\right)} - t \]
    10. Simplified22.3%

      \[\leadsto \color{blue}{y \cdot \left(-z\right)} - t \]
    11. Taylor expanded in y around inf 22.2%

      \[\leadsto \color{blue}{-1 \cdot \left(y \cdot z\right)} \]
    12. Step-by-step derivation
      1. mul-1-neg22.2%

        \[\leadsto \color{blue}{-y \cdot z} \]
      2. distribute-rgt-neg-out22.2%

        \[\leadsto \color{blue}{y \cdot \left(-z\right)} \]
    13. Simplified22.2%

      \[\leadsto \color{blue}{y \cdot \left(-z\right)} \]

    if 3.9e8 < t

    1. Initial program 96.0%

      \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
    2. Step-by-step derivation
      1. +-commutative96.0%

        \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
      2. fma-define96.0%

        \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
      3. sub-neg96.0%

        \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      4. metadata-eval96.0%

        \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
      5. sub-neg96.0%

        \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      6. log1p-define99.9%

        \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
      7. sub-neg99.9%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
      8. metadata-eval99.9%

        \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
    4. Add Preprocessing
    5. Taylor expanded in y around 0 98.5%

      \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
    6. Step-by-step derivation
      1. +-commutative98.5%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
      2. sub-neg98.5%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      3. metadata-eval98.5%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
      4. mul-1-neg98.5%

        \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
      5. unsub-neg98.5%

        \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
      6. +-commutative98.5%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
      7. sub-neg98.5%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
      8. metadata-eval98.5%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
      9. +-commutative98.5%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
    7. Simplified98.5%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
    8. Taylor expanded in z around 0 93.8%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) - -1 \cdot y\right)} - t \]
    9. Step-by-step derivation
      1. sub-neg93.8%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} - -1 \cdot y\right) - t \]
      2. metadata-eval93.8%

        \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) - -1 \cdot y\right) - t \]
      3. +-commutative93.8%

        \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - -1 \cdot y\right) - t \]
      4. neg-mul-193.8%

        \[\leadsto \left(\log y \cdot \left(-1 + x\right) - \color{blue}{\left(-y\right)}\right) - t \]
    10. Simplified93.8%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - \left(-y\right)\right)} - t \]
    11. Taylor expanded in y around inf 67.1%

      \[\leadsto \color{blue}{y} - t \]
  3. Recombined 3 regimes into one program.
  4. Final simplification43.8%

    \[\leadsto \begin{array}{l} \mathbf{if}\;t \leq -190000000000:\\ \;\;\;\;-t\\ \mathbf{elif}\;t \leq 390000000:\\ \;\;\;\;z \cdot \left(-y\right)\\ \mathbf{else}:\\ \;\;\;\;y - t\\ \end{array} \]
  5. Add Preprocessing

Alternative 15: 45.9% accurate, 35.8× speedup?

\[\begin{array}{l} \\ \left(-t\right) - z \cdot y \end{array} \]
(FPCore (x y z t) :precision binary64 (- (- t) (* z y)))
double code(double x, double y, double z, double t) {
	return -t - (z * y);
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = -t - (z * y)
end function
public static double code(double x, double y, double z, double t) {
	return -t - (z * y);
}
def code(x, y, z, t):
	return -t - (z * y)
function code(x, y, z, t)
	return Float64(Float64(-t) - Float64(z * y))
end
function tmp = code(x, y, z, t)
	tmp = -t - (z * y);
end
code[x_, y_, z_, t_] := N[((-t) - N[(z * y), $MachinePrecision]), $MachinePrecision]
\begin{array}{l}

\\
\left(-t\right) - z \cdot y
\end{array}
Derivation
  1. Initial program 88.2%

    \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
  2. Step-by-step derivation
    1. +-commutative88.2%

      \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
    2. fma-define88.2%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
    3. sub-neg88.2%

      \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
    4. metadata-eval88.2%

      \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
    5. sub-neg88.2%

      \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
    6. log1p-define99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
    7. sub-neg99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
    8. metadata-eval99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
  3. Simplified99.8%

    \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
  4. Add Preprocessing
  5. Taylor expanded in y around 0 99.0%

    \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
  6. Step-by-step derivation
    1. +-commutative99.0%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
    2. sub-neg99.0%

      \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
    3. metadata-eval99.0%

      \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
    4. mul-1-neg99.0%

      \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
    5. unsub-neg99.0%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
    6. +-commutative99.0%

      \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
    7. sub-neg99.0%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
    8. metadata-eval99.0%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
    9. +-commutative99.0%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
  7. Simplified99.0%

    \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
  8. Taylor expanded in z around inf 46.1%

    \[\leadsto \color{blue}{-1 \cdot \left(y \cdot z\right)} - t \]
  9. Step-by-step derivation
    1. mul-1-neg46.1%

      \[\leadsto \color{blue}{\left(-y \cdot z\right)} - t \]
    2. distribute-rgt-neg-in46.1%

      \[\leadsto \color{blue}{y \cdot \left(-z\right)} - t \]
  10. Simplified46.1%

    \[\leadsto \color{blue}{y \cdot \left(-z\right)} - t \]
  11. Final simplification46.1%

    \[\leadsto \left(-t\right) - z \cdot y \]
  12. Add Preprocessing

Alternative 16: 35.9% accurate, 71.7× speedup?

\[\begin{array}{l} \\ y - t \end{array} \]
(FPCore (x y z t) :precision binary64 (- y t))
double code(double x, double y, double z, double t) {
	return y - t;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = y - t
end function
public static double code(double x, double y, double z, double t) {
	return y - t;
}
def code(x, y, z, t):
	return y - t
function code(x, y, z, t)
	return Float64(y - t)
end
function tmp = code(x, y, z, t)
	tmp = y - t;
end
code[x_, y_, z_, t_] := N[(y - t), $MachinePrecision]
\begin{array}{l}

\\
y - t
\end{array}
Derivation
  1. Initial program 88.2%

    \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
  2. Step-by-step derivation
    1. +-commutative88.2%

      \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
    2. fma-define88.2%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
    3. sub-neg88.2%

      \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
    4. metadata-eval88.2%

      \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
    5. sub-neg88.2%

      \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
    6. log1p-define99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
    7. sub-neg99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
    8. metadata-eval99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
  3. Simplified99.8%

    \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
  4. Add Preprocessing
  5. Taylor expanded in y around 0 99.0%

    \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
  6. Step-by-step derivation
    1. +-commutative99.0%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
    2. sub-neg99.0%

      \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
    3. metadata-eval99.0%

      \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
    4. mul-1-neg99.0%

      \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
    5. unsub-neg99.0%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
    6. +-commutative99.0%

      \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
    7. sub-neg99.0%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
    8. metadata-eval99.0%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
    9. +-commutative99.0%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
  7. Simplified99.0%

    \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
  8. Taylor expanded in z around 0 86.9%

    \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) - -1 \cdot y\right)} - t \]
  9. Step-by-step derivation
    1. sub-neg86.9%

      \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} - -1 \cdot y\right) - t \]
    2. metadata-eval86.9%

      \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) - -1 \cdot y\right) - t \]
    3. +-commutative86.9%

      \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - -1 \cdot y\right) - t \]
    4. neg-mul-186.9%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - \color{blue}{\left(-y\right)}\right) - t \]
  10. Simplified86.9%

    \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - \left(-y\right)\right)} - t \]
  11. Taylor expanded in y around inf 34.1%

    \[\leadsto \color{blue}{y} - t \]
  12. Add Preprocessing

Alternative 17: 35.6% accurate, 107.5× speedup?

\[\begin{array}{l} \\ -t \end{array} \]
(FPCore (x y z t) :precision binary64 (- t))
double code(double x, double y, double z, double t) {
	return -t;
}
real(8) function code(x, y, z, t)
    real(8), intent (in) :: x
    real(8), intent (in) :: y
    real(8), intent (in) :: z
    real(8), intent (in) :: t
    code = -t
end function
public static double code(double x, double y, double z, double t) {
	return -t;
}
def code(x, y, z, t):
	return -t
function code(x, y, z, t)
	return Float64(-t)
end
function tmp = code(x, y, z, t)
	tmp = -t;
end
code[x_, y_, z_, t_] := (-t)
\begin{array}{l}

\\
-t
\end{array}
Derivation
  1. Initial program 88.2%

    \[\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t \]
  2. Step-by-step derivation
    1. +-commutative88.2%

      \[\leadsto \color{blue}{\left(\left(z - 1\right) \cdot \log \left(1 - y\right) + \left(x - 1\right) \cdot \log y\right)} - t \]
    2. fma-define88.2%

      \[\leadsto \color{blue}{\mathsf{fma}\left(z - 1, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right)} - t \]
    3. sub-neg88.2%

      \[\leadsto \mathsf{fma}\left(\color{blue}{z + \left(-1\right)}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
    4. metadata-eval88.2%

      \[\leadsto \mathsf{fma}\left(z + \color{blue}{-1}, \log \left(1 - y\right), \left(x - 1\right) \cdot \log y\right) - t \]
    5. sub-neg88.2%

      \[\leadsto \mathsf{fma}\left(z + -1, \log \color{blue}{\left(1 + \left(-y\right)\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
    6. log1p-define99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \color{blue}{\mathsf{log1p}\left(-y\right)}, \left(x - 1\right) \cdot \log y\right) - t \]
    7. sub-neg99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \color{blue}{\left(x + \left(-1\right)\right)} \cdot \log y\right) - t \]
    8. metadata-eval99.8%

      \[\leadsto \mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + \color{blue}{-1}\right) \cdot \log y\right) - t \]
  3. Simplified99.8%

    \[\leadsto \color{blue}{\mathsf{fma}\left(z + -1, \mathsf{log1p}\left(-y\right), \left(x + -1\right) \cdot \log y\right) - t} \]
  4. Add Preprocessing
  5. Taylor expanded in y around 0 99.0%

    \[\leadsto \color{blue}{\left(-1 \cdot \left(y \cdot \left(z - 1\right)\right) + \log y \cdot \left(x - 1\right)\right)} - t \]
  6. Step-by-step derivation
    1. +-commutative99.0%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x - 1\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right)} - t \]
    2. sub-neg99.0%

      \[\leadsto \left(\log y \cdot \color{blue}{\left(x + \left(-1\right)\right)} + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
    3. metadata-eval99.0%

      \[\leadsto \left(\log y \cdot \left(x + \color{blue}{-1}\right) + -1 \cdot \left(y \cdot \left(z - 1\right)\right)\right) - t \]
    4. mul-1-neg99.0%

      \[\leadsto \left(\log y \cdot \left(x + -1\right) + \color{blue}{\left(-y \cdot \left(z - 1\right)\right)}\right) - t \]
    5. unsub-neg99.0%

      \[\leadsto \color{blue}{\left(\log y \cdot \left(x + -1\right) - y \cdot \left(z - 1\right)\right)} - t \]
    6. +-commutative99.0%

      \[\leadsto \left(\log y \cdot \color{blue}{\left(-1 + x\right)} - y \cdot \left(z - 1\right)\right) - t \]
    7. sub-neg99.0%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(z + \left(-1\right)\right)}\right) - t \]
    8. metadata-eval99.0%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \left(z + \color{blue}{-1}\right)\right) - t \]
    9. +-commutative99.0%

      \[\leadsto \left(\log y \cdot \left(-1 + x\right) - y \cdot \color{blue}{\left(-1 + z\right)}\right) - t \]
  7. Simplified99.0%

    \[\leadsto \color{blue}{\left(\log y \cdot \left(-1 + x\right) - y \cdot \left(-1 + z\right)\right)} - t \]
  8. Taylor expanded in z around inf 46.1%

    \[\leadsto \color{blue}{-1 \cdot \left(y \cdot z\right)} - t \]
  9. Step-by-step derivation
    1. mul-1-neg46.1%

      \[\leadsto \color{blue}{\left(-y \cdot z\right)} - t \]
    2. distribute-rgt-neg-in46.1%

      \[\leadsto \color{blue}{y \cdot \left(-z\right)} - t \]
  10. Simplified46.1%

    \[\leadsto \color{blue}{y \cdot \left(-z\right)} - t \]
  11. Taylor expanded in y around 0 33.8%

    \[\leadsto \color{blue}{-1 \cdot t} \]
  12. Step-by-step derivation
    1. neg-mul-133.8%

      \[\leadsto \color{blue}{-t} \]
  13. Simplified33.8%

    \[\leadsto \color{blue}{-t} \]
  14. Add Preprocessing

Reproduce

?
herbie shell --seed 2024086 
(FPCore (x y z t)
  :name "Statistics.Distribution.Beta:$cdensity from math-functions-0.1.5.2"
  :precision binary64
  (- (+ (* (- x 1.0) (log y)) (* (- z 1.0) (log (- 1.0 y)))) t))