Hyperbolic arcsine

Percentage Accurate: 18.2% → 100.0%
Time: 9.9s
Alternatives: 10
Speedup: 207.0×

Specification

?
\[\begin{array}{l} \\ \log \left(x + \sqrt{x \cdot x + 1}\right) \end{array} \]
(FPCore (x) :precision binary64 (log (+ x (sqrt (+ (* x x) 1.0)))))
double code(double x) {
	return log((x + sqrt(((x * x) + 1.0))));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log((x + sqrt(((x * x) + 1.0d0))))
end function
public static double code(double x) {
	return Math.log((x + Math.sqrt(((x * x) + 1.0))));
}
def code(x):
	return math.log((x + math.sqrt(((x * x) + 1.0))))
function code(x)
	return log(Float64(x + sqrt(Float64(Float64(x * x) + 1.0))))
end
function tmp = code(x)
	tmp = log((x + sqrt(((x * x) + 1.0))));
end
code[x_] := N[Log[N[(x + N[Sqrt[N[(N[(x * x), $MachinePrecision] + 1.0), $MachinePrecision]], $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(x + \sqrt{x \cdot x + 1}\right)
\end{array}

Sampling outcomes in binary64 precision:

Local Percentage Accuracy vs ?

The average percentage accuracy by input value. Horizontal axis shows value of an input variable; the variable is choosen in the title. Vertical axis is accuracy; higher is better. Red represent the original program, while blue represents Herbie's suggestion. These can be toggled with buttons below the plot. The line is an average while dots represent individual samples.

Accuracy vs Speed?

Herbie found 10 alternatives:

AlternativeAccuracySpeedup
The accuracy (vertical axis) and speed (horizontal axis) of each alternatives. Up and to the right is better. The red square shows the initial program, and each blue circle shows an alternative.The line shows the best available speed-accuracy tradeoffs.

Initial Program: 18.2% accurate, 1.0× speedup?

\[\begin{array}{l} \\ \log \left(x + \sqrt{x \cdot x + 1}\right) \end{array} \]
(FPCore (x) :precision binary64 (log (+ x (sqrt (+ (* x x) 1.0)))))
double code(double x) {
	return log((x + sqrt(((x * x) + 1.0))));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log((x + sqrt(((x * x) + 1.0d0))))
end function
public static double code(double x) {
	return Math.log((x + Math.sqrt(((x * x) + 1.0))));
}
def code(x):
	return math.log((x + math.sqrt(((x * x) + 1.0))))
function code(x)
	return log(Float64(x + sqrt(Float64(Float64(x * x) + 1.0))))
end
function tmp = code(x)
	tmp = log((x + sqrt(((x * x) + 1.0))));
end
code[x_] := N[Log[N[(x + N[Sqrt[N[(N[(x * x), $MachinePrecision] + 1.0), $MachinePrecision]], $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(x + \sqrt{x \cdot x + 1}\right)
\end{array}

Alternative 1: 100.0% accurate, 0.6× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq -0.0245:\\ \;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\ \mathbf{elif}\;x \leq 0.024:\\ \;\;\;\;x \cdot \left(1 + {x}^{2} \cdot \left({x}^{2} \cdot \left(0.075 + {x}^{2} \cdot -0.044642857142857144\right) - 0.16666666666666666\right)\right)\\ \mathbf{else}:\\ \;\;\;\;\log \left(x + \mathsf{hypot}\left(1, x\right)\right)\\ \end{array} \end{array} \]
(FPCore (x)
 :precision binary64
 (if (<= x -0.0245)
   (- (log (- (hypot 1.0 x) x)))
   (if (<= x 0.024)
     (*
      x
      (+
       1.0
       (*
        (pow x 2.0)
        (-
         (* (pow x 2.0) (+ 0.075 (* (pow x 2.0) -0.044642857142857144)))
         0.16666666666666666))))
     (log (+ x (hypot 1.0 x))))))
double code(double x) {
	double tmp;
	if (x <= -0.0245) {
		tmp = -log((hypot(1.0, x) - x));
	} else if (x <= 0.024) {
		tmp = x * (1.0 + (pow(x, 2.0) * ((pow(x, 2.0) * (0.075 + (pow(x, 2.0) * -0.044642857142857144))) - 0.16666666666666666)));
	} else {
		tmp = log((x + hypot(1.0, x)));
	}
	return tmp;
}
public static double code(double x) {
	double tmp;
	if (x <= -0.0245) {
		tmp = -Math.log((Math.hypot(1.0, x) - x));
	} else if (x <= 0.024) {
		tmp = x * (1.0 + (Math.pow(x, 2.0) * ((Math.pow(x, 2.0) * (0.075 + (Math.pow(x, 2.0) * -0.044642857142857144))) - 0.16666666666666666)));
	} else {
		tmp = Math.log((x + Math.hypot(1.0, x)));
	}
	return tmp;
}
def code(x):
	tmp = 0
	if x <= -0.0245:
		tmp = -math.log((math.hypot(1.0, x) - x))
	elif x <= 0.024:
		tmp = x * (1.0 + (math.pow(x, 2.0) * ((math.pow(x, 2.0) * (0.075 + (math.pow(x, 2.0) * -0.044642857142857144))) - 0.16666666666666666)))
	else:
		tmp = math.log((x + math.hypot(1.0, x)))
	return tmp
function code(x)
	tmp = 0.0
	if (x <= -0.0245)
		tmp = Float64(-log(Float64(hypot(1.0, x) - x)));
	elseif (x <= 0.024)
		tmp = Float64(x * Float64(1.0 + Float64((x ^ 2.0) * Float64(Float64((x ^ 2.0) * Float64(0.075 + Float64((x ^ 2.0) * -0.044642857142857144))) - 0.16666666666666666))));
	else
		tmp = log(Float64(x + hypot(1.0, x)));
	end
	return tmp
end
function tmp_2 = code(x)
	tmp = 0.0;
	if (x <= -0.0245)
		tmp = -log((hypot(1.0, x) - x));
	elseif (x <= 0.024)
		tmp = x * (1.0 + ((x ^ 2.0) * (((x ^ 2.0) * (0.075 + ((x ^ 2.0) * -0.044642857142857144))) - 0.16666666666666666)));
	else
		tmp = log((x + hypot(1.0, x)));
	end
	tmp_2 = tmp;
end
code[x_] := If[LessEqual[x, -0.0245], (-N[Log[N[(N[Sqrt[1.0 ^ 2 + x ^ 2], $MachinePrecision] - x), $MachinePrecision]], $MachinePrecision]), If[LessEqual[x, 0.024], N[(x * N[(1.0 + N[(N[Power[x, 2.0], $MachinePrecision] * N[(N[(N[Power[x, 2.0], $MachinePrecision] * N[(0.075 + N[(N[Power[x, 2.0], $MachinePrecision] * -0.044642857142857144), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - 0.16666666666666666), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[Log[N[(x + N[Sqrt[1.0 ^ 2 + x ^ 2], $MachinePrecision]), $MachinePrecision]], $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq -0.0245:\\
\;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\

\mathbf{elif}\;x \leq 0.024:\\
\;\;\;\;x \cdot \left(1 + {x}^{2} \cdot \left({x}^{2} \cdot \left(0.075 + {x}^{2} \cdot -0.044642857142857144\right) - 0.16666666666666666\right)\right)\\

\mathbf{else}:\\
\;\;\;\;\log \left(x + \mathsf{hypot}\left(1, x\right)\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if x < -0.024500000000000001

    1. Initial program 4.0%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def5.1%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified5.1%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Step-by-step derivation
      1. flip-+4.4%

        \[\leadsto \log \color{blue}{\left(\frac{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}{x - \mathsf{hypot}\left(1, x\right)}\right)} \]
      2. clear-num4.4%

        \[\leadsto \log \color{blue}{\left(\frac{1}{\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}}\right)} \]
      3. log-div2.7%

        \[\leadsto \color{blue}{\log 1 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right)} \]
      4. metadata-eval2.7%

        \[\leadsto \color{blue}{0} - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      5. pow22.7%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{\color{blue}{{x}^{2}} - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      6. hypot-1-def3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\sqrt{1 + x \cdot x}} \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      7. hypot-1-def2.7%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \sqrt{1 + x \cdot x} \cdot \color{blue}{\sqrt{1 + x \cdot x}}}\right) \]
      8. add-sqr-sqrt3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(1 + x \cdot x\right)}}\right) \]
      9. +-commutative3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}}\right) \]
      10. fma-define3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\mathsf{fma}\left(x, x, 1\right)}}\right) \]
    6. Applied egg-rr3.5%

      \[\leadsto \color{blue}{0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
    7. Step-by-step derivation
      1. neg-sub03.5%

        \[\leadsto \color{blue}{-\log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
      2. div-sub3.5%

        \[\leadsto -\log \color{blue}{\left(\frac{x}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
      3. fma-undefine3.5%

        \[\leadsto -\log \left(\frac{x}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      4. unpow23.5%

        \[\leadsto -\log \left(\frac{x}{{x}^{2} - \left(\color{blue}{{x}^{2}} + 1\right)} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      5. associate--r+3.5%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{\left({x}^{2} - {x}^{2}\right) - 1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      6. +-inverses3.5%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{0} - 1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      7. metadata-eval3.5%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{-1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      8. *-rgt-identity3.5%

        \[\leadsto -\log \left(\frac{\color{blue}{x \cdot 1}}{-1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      9. associate-/l*3.5%

        \[\leadsto -\log \left(\color{blue}{x \cdot \frac{1}{-1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      10. metadata-eval3.5%

        \[\leadsto -\log \left(x \cdot \color{blue}{-1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      11. fma-undefine3.5%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}}\right) \]
      12. unpow23.5%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \left(\color{blue}{{x}^{2}} + 1\right)}\right) \]
      13. associate--r+56.1%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{\left({x}^{2} - {x}^{2}\right) - 1}}\right) \]
      14. +-inverses100.0%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{0} - 1}\right) \]
      15. metadata-eval100.0%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{-1}}\right) \]
      16. *-rgt-identity100.0%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\color{blue}{\mathsf{hypot}\left(1, x\right) \cdot 1}}{-1}\right) \]
      17. associate-/l*100.0%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{\mathsf{hypot}\left(1, x\right) \cdot \frac{1}{-1}}\right) \]
      18. metadata-eval100.0%

        \[\leadsto -\log \left(x \cdot -1 - \mathsf{hypot}\left(1, x\right) \cdot \color{blue}{-1}\right) \]
      19. *-commutative100.0%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{-1 \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      20. neg-mul-1100.0%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{\left(-\mathsf{hypot}\left(1, x\right)\right)}\right) \]
    8. Simplified100.0%

      \[\leadsto \color{blue}{-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)} \]

    if -0.024500000000000001 < x < 0.024

    1. Initial program 10.9%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg10.9%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative10.9%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg10.9%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def10.9%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified10.9%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around 0 100.0%

      \[\leadsto \color{blue}{x \cdot \left(1 + {x}^{2} \cdot \left({x}^{2} \cdot \left(0.075 + -0.044642857142857144 \cdot {x}^{2}\right) - 0.16666666666666666\right)\right)} \]

    if 0.024 < x

    1. Initial program 56.0%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg56.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative56.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg56.0%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def100.0%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
  3. Recombined 3 regimes into one program.
  4. Final simplification100.0%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -0.0245:\\ \;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\ \mathbf{elif}\;x \leq 0.024:\\ \;\;\;\;x \cdot \left(1 + {x}^{2} \cdot \left({x}^{2} \cdot \left(0.075 + {x}^{2} \cdot -0.044642857142857144\right) - 0.16666666666666666\right)\right)\\ \mathbf{else}:\\ \;\;\;\;\log \left(x + \mathsf{hypot}\left(1, x\right)\right)\\ \end{array} \]
  5. Add Preprocessing

Alternative 2: 99.9% accurate, 0.6× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq -0.0076:\\ \;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\ \mathbf{elif}\;x \leq 0.006:\\ \;\;\;\;x + \mathsf{fma}\left({x}^{2}, 0.075, -0.16666666666666666\right) \cdot {x}^{3}\\ \mathbf{else}:\\ \;\;\;\;2 \cdot \log \left(\sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)\\ \end{array} \end{array} \]
(FPCore (x)
 :precision binary64
 (if (<= x -0.0076)
   (- (log (- (hypot 1.0 x) x)))
   (if (<= x 0.006)
     (+ x (* (fma (pow x 2.0) 0.075 -0.16666666666666666) (pow x 3.0)))
     (* 2.0 (log (sqrt (+ x (hypot 1.0 x))))))))
double code(double x) {
	double tmp;
	if (x <= -0.0076) {
		tmp = -log((hypot(1.0, x) - x));
	} else if (x <= 0.006) {
		tmp = x + (fma(pow(x, 2.0), 0.075, -0.16666666666666666) * pow(x, 3.0));
	} else {
		tmp = 2.0 * log(sqrt((x + hypot(1.0, x))));
	}
	return tmp;
}
function code(x)
	tmp = 0.0
	if (x <= -0.0076)
		tmp = Float64(-log(Float64(hypot(1.0, x) - x)));
	elseif (x <= 0.006)
		tmp = Float64(x + Float64(fma((x ^ 2.0), 0.075, -0.16666666666666666) * (x ^ 3.0)));
	else
		tmp = Float64(2.0 * log(sqrt(Float64(x + hypot(1.0, x)))));
	end
	return tmp
end
code[x_] := If[LessEqual[x, -0.0076], (-N[Log[N[(N[Sqrt[1.0 ^ 2 + x ^ 2], $MachinePrecision] - x), $MachinePrecision]], $MachinePrecision]), If[LessEqual[x, 0.006], N[(x + N[(N[(N[Power[x, 2.0], $MachinePrecision] * 0.075 + -0.16666666666666666), $MachinePrecision] * N[Power[x, 3.0], $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(2.0 * N[Log[N[Sqrt[N[(x + N[Sqrt[1.0 ^ 2 + x ^ 2], $MachinePrecision]), $MachinePrecision]], $MachinePrecision]], $MachinePrecision]), $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq -0.0076:\\
\;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\

\mathbf{elif}\;x \leq 0.006:\\
\;\;\;\;x + \mathsf{fma}\left({x}^{2}, 0.075, -0.16666666666666666\right) \cdot {x}^{3}\\

\mathbf{else}:\\
\;\;\;\;2 \cdot \log \left(\sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if x < -0.00759999999999999998

    1. Initial program 4.0%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def5.1%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified5.1%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Step-by-step derivation
      1. flip-+4.4%

        \[\leadsto \log \color{blue}{\left(\frac{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}{x - \mathsf{hypot}\left(1, x\right)}\right)} \]
      2. clear-num4.4%

        \[\leadsto \log \color{blue}{\left(\frac{1}{\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}}\right)} \]
      3. log-div2.7%

        \[\leadsto \color{blue}{\log 1 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right)} \]
      4. metadata-eval2.7%

        \[\leadsto \color{blue}{0} - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      5. pow22.7%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{\color{blue}{{x}^{2}} - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      6. hypot-1-def3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\sqrt{1 + x \cdot x}} \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      7. hypot-1-def2.7%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \sqrt{1 + x \cdot x} \cdot \color{blue}{\sqrt{1 + x \cdot x}}}\right) \]
      8. add-sqr-sqrt3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(1 + x \cdot x\right)}}\right) \]
      9. +-commutative3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}}\right) \]
      10. fma-define3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\mathsf{fma}\left(x, x, 1\right)}}\right) \]
    6. Applied egg-rr3.5%

      \[\leadsto \color{blue}{0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
    7. Step-by-step derivation
      1. neg-sub03.5%

        \[\leadsto \color{blue}{-\log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
      2. div-sub3.5%

        \[\leadsto -\log \color{blue}{\left(\frac{x}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
      3. fma-undefine3.5%

        \[\leadsto -\log \left(\frac{x}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      4. unpow23.5%

        \[\leadsto -\log \left(\frac{x}{{x}^{2} - \left(\color{blue}{{x}^{2}} + 1\right)} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      5. associate--r+3.5%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{\left({x}^{2} - {x}^{2}\right) - 1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      6. +-inverses3.5%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{0} - 1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      7. metadata-eval3.5%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{-1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      8. *-rgt-identity3.5%

        \[\leadsto -\log \left(\frac{\color{blue}{x \cdot 1}}{-1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      9. associate-/l*3.5%

        \[\leadsto -\log \left(\color{blue}{x \cdot \frac{1}{-1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      10. metadata-eval3.5%

        \[\leadsto -\log \left(x \cdot \color{blue}{-1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      11. fma-undefine3.5%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}}\right) \]
      12. unpow23.5%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \left(\color{blue}{{x}^{2}} + 1\right)}\right) \]
      13. associate--r+56.1%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{\left({x}^{2} - {x}^{2}\right) - 1}}\right) \]
      14. +-inverses100.0%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{0} - 1}\right) \]
      15. metadata-eval100.0%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{-1}}\right) \]
      16. *-rgt-identity100.0%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\color{blue}{\mathsf{hypot}\left(1, x\right) \cdot 1}}{-1}\right) \]
      17. associate-/l*100.0%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{\mathsf{hypot}\left(1, x\right) \cdot \frac{1}{-1}}\right) \]
      18. metadata-eval100.0%

        \[\leadsto -\log \left(x \cdot -1 - \mathsf{hypot}\left(1, x\right) \cdot \color{blue}{-1}\right) \]
      19. *-commutative100.0%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{-1 \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      20. neg-mul-1100.0%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{\left(-\mathsf{hypot}\left(1, x\right)\right)}\right) \]
    8. Simplified100.0%

      \[\leadsto \color{blue}{-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)} \]

    if -0.00759999999999999998 < x < 0.0060000000000000001

    1. Initial program 10.2%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg10.2%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative10.2%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg10.2%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def10.2%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified10.2%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around 0 100.0%

      \[\leadsto \color{blue}{x \cdot \left(1 + {x}^{2} \cdot \left(0.075 \cdot {x}^{2} - 0.16666666666666666\right)\right)} \]
    6. Step-by-step derivation
      1. distribute-rgt-in100.0%

        \[\leadsto \color{blue}{1 \cdot x + \left({x}^{2} \cdot \left(0.075 \cdot {x}^{2} - 0.16666666666666666\right)\right) \cdot x} \]
      2. *-lft-identity100.0%

        \[\leadsto \color{blue}{x} + \left({x}^{2} \cdot \left(0.075 \cdot {x}^{2} - 0.16666666666666666\right)\right) \cdot x \]
      3. *-commutative100.0%

        \[\leadsto x + \color{blue}{\left(\left(0.075 \cdot {x}^{2} - 0.16666666666666666\right) \cdot {x}^{2}\right)} \cdot x \]
      4. associate-*l*100.0%

        \[\leadsto x + \color{blue}{\left(0.075 \cdot {x}^{2} - 0.16666666666666666\right) \cdot \left({x}^{2} \cdot x\right)} \]
      5. *-commutative100.0%

        \[\leadsto x + \left(\color{blue}{{x}^{2} \cdot 0.075} - 0.16666666666666666\right) \cdot \left({x}^{2} \cdot x\right) \]
      6. fma-neg100.0%

        \[\leadsto x + \color{blue}{\mathsf{fma}\left({x}^{2}, 0.075, -0.16666666666666666\right)} \cdot \left({x}^{2} \cdot x\right) \]
      7. metadata-eval100.0%

        \[\leadsto x + \mathsf{fma}\left({x}^{2}, 0.075, \color{blue}{-0.16666666666666666}\right) \cdot \left({x}^{2} \cdot x\right) \]
      8. unpow2100.0%

        \[\leadsto x + \mathsf{fma}\left({x}^{2}, 0.075, -0.16666666666666666\right) \cdot \left(\color{blue}{\left(x \cdot x\right)} \cdot x\right) \]
      9. unpow3100.0%

        \[\leadsto x + \mathsf{fma}\left({x}^{2}, 0.075, -0.16666666666666666\right) \cdot \color{blue}{{x}^{3}} \]
    7. Simplified100.0%

      \[\leadsto \color{blue}{x + \mathsf{fma}\left({x}^{2}, 0.075, -0.16666666666666666\right) \cdot {x}^{3}} \]

    if 0.0060000000000000001 < x

    1. Initial program 56.4%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg56.4%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative56.4%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg56.4%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def99.9%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Step-by-step derivation
      1. add-sqr-sqrt99.9%

        \[\leadsto \log \color{blue}{\left(\sqrt{x + \mathsf{hypot}\left(1, x\right)} \cdot \sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)} \]
      2. pow299.9%

        \[\leadsto \log \color{blue}{\left({\left(\sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)}^{2}\right)} \]
      3. log-pow99.9%

        \[\leadsto \color{blue}{2 \cdot \log \left(\sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)} \]
    6. Applied egg-rr99.9%

      \[\leadsto \color{blue}{2 \cdot \log \left(\sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)} \]
  3. Recombined 3 regimes into one program.
  4. Final simplification100.0%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -0.0076:\\ \;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\ \mathbf{elif}\;x \leq 0.006:\\ \;\;\;\;x + \mathsf{fma}\left({x}^{2}, 0.075, -0.16666666666666666\right) \cdot {x}^{3}\\ \mathbf{else}:\\ \;\;\;\;2 \cdot \log \left(\sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)\\ \end{array} \]
  5. Add Preprocessing

Alternative 3: 99.9% accurate, 0.7× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq -0.0076:\\ \;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\ \mathbf{elif}\;x \leq 0.006:\\ \;\;\;\;x \cdot \left(1 + {x}^{2} \cdot \left({x}^{2} \cdot 0.075 - 0.16666666666666666\right)\right)\\ \mathbf{else}:\\ \;\;\;\;2 \cdot \log \left(\sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)\\ \end{array} \end{array} \]
(FPCore (x)
 :precision binary64
 (if (<= x -0.0076)
   (- (log (- (hypot 1.0 x) x)))
   (if (<= x 0.006)
     (*
      x
      (+ 1.0 (* (pow x 2.0) (- (* (pow x 2.0) 0.075) 0.16666666666666666))))
     (* 2.0 (log (sqrt (+ x (hypot 1.0 x))))))))
double code(double x) {
	double tmp;
	if (x <= -0.0076) {
		tmp = -log((hypot(1.0, x) - x));
	} else if (x <= 0.006) {
		tmp = x * (1.0 + (pow(x, 2.0) * ((pow(x, 2.0) * 0.075) - 0.16666666666666666)));
	} else {
		tmp = 2.0 * log(sqrt((x + hypot(1.0, x))));
	}
	return tmp;
}
public static double code(double x) {
	double tmp;
	if (x <= -0.0076) {
		tmp = -Math.log((Math.hypot(1.0, x) - x));
	} else if (x <= 0.006) {
		tmp = x * (1.0 + (Math.pow(x, 2.0) * ((Math.pow(x, 2.0) * 0.075) - 0.16666666666666666)));
	} else {
		tmp = 2.0 * Math.log(Math.sqrt((x + Math.hypot(1.0, x))));
	}
	return tmp;
}
def code(x):
	tmp = 0
	if x <= -0.0076:
		tmp = -math.log((math.hypot(1.0, x) - x))
	elif x <= 0.006:
		tmp = x * (1.0 + (math.pow(x, 2.0) * ((math.pow(x, 2.0) * 0.075) - 0.16666666666666666)))
	else:
		tmp = 2.0 * math.log(math.sqrt((x + math.hypot(1.0, x))))
	return tmp
function code(x)
	tmp = 0.0
	if (x <= -0.0076)
		tmp = Float64(-log(Float64(hypot(1.0, x) - x)));
	elseif (x <= 0.006)
		tmp = Float64(x * Float64(1.0 + Float64((x ^ 2.0) * Float64(Float64((x ^ 2.0) * 0.075) - 0.16666666666666666))));
	else
		tmp = Float64(2.0 * log(sqrt(Float64(x + hypot(1.0, x)))));
	end
	return tmp
end
function tmp_2 = code(x)
	tmp = 0.0;
	if (x <= -0.0076)
		tmp = -log((hypot(1.0, x) - x));
	elseif (x <= 0.006)
		tmp = x * (1.0 + ((x ^ 2.0) * (((x ^ 2.0) * 0.075) - 0.16666666666666666)));
	else
		tmp = 2.0 * log(sqrt((x + hypot(1.0, x))));
	end
	tmp_2 = tmp;
end
code[x_] := If[LessEqual[x, -0.0076], (-N[Log[N[(N[Sqrt[1.0 ^ 2 + x ^ 2], $MachinePrecision] - x), $MachinePrecision]], $MachinePrecision]), If[LessEqual[x, 0.006], N[(x * N[(1.0 + N[(N[Power[x, 2.0], $MachinePrecision] * N[(N[(N[Power[x, 2.0], $MachinePrecision] * 0.075), $MachinePrecision] - 0.16666666666666666), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[(2.0 * N[Log[N[Sqrt[N[(x + N[Sqrt[1.0 ^ 2 + x ^ 2], $MachinePrecision]), $MachinePrecision]], $MachinePrecision]], $MachinePrecision]), $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq -0.0076:\\
\;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\

\mathbf{elif}\;x \leq 0.006:\\
\;\;\;\;x \cdot \left(1 + {x}^{2} \cdot \left({x}^{2} \cdot 0.075 - 0.16666666666666666\right)\right)\\

\mathbf{else}:\\
\;\;\;\;2 \cdot \log \left(\sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if x < -0.00759999999999999998

    1. Initial program 4.0%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def5.1%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified5.1%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Step-by-step derivation
      1. flip-+4.4%

        \[\leadsto \log \color{blue}{\left(\frac{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}{x - \mathsf{hypot}\left(1, x\right)}\right)} \]
      2. clear-num4.4%

        \[\leadsto \log \color{blue}{\left(\frac{1}{\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}}\right)} \]
      3. log-div2.7%

        \[\leadsto \color{blue}{\log 1 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right)} \]
      4. metadata-eval2.7%

        \[\leadsto \color{blue}{0} - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      5. pow22.7%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{\color{blue}{{x}^{2}} - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      6. hypot-1-def3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\sqrt{1 + x \cdot x}} \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      7. hypot-1-def2.7%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \sqrt{1 + x \cdot x} \cdot \color{blue}{\sqrt{1 + x \cdot x}}}\right) \]
      8. add-sqr-sqrt3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(1 + x \cdot x\right)}}\right) \]
      9. +-commutative3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}}\right) \]
      10. fma-define3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\mathsf{fma}\left(x, x, 1\right)}}\right) \]
    6. Applied egg-rr3.5%

      \[\leadsto \color{blue}{0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
    7. Step-by-step derivation
      1. neg-sub03.5%

        \[\leadsto \color{blue}{-\log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
      2. div-sub3.5%

        \[\leadsto -\log \color{blue}{\left(\frac{x}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
      3. fma-undefine3.5%

        \[\leadsto -\log \left(\frac{x}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      4. unpow23.5%

        \[\leadsto -\log \left(\frac{x}{{x}^{2} - \left(\color{blue}{{x}^{2}} + 1\right)} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      5. associate--r+3.5%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{\left({x}^{2} - {x}^{2}\right) - 1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      6. +-inverses3.5%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{0} - 1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      7. metadata-eval3.5%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{-1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      8. *-rgt-identity3.5%

        \[\leadsto -\log \left(\frac{\color{blue}{x \cdot 1}}{-1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      9. associate-/l*3.5%

        \[\leadsto -\log \left(\color{blue}{x \cdot \frac{1}{-1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      10. metadata-eval3.5%

        \[\leadsto -\log \left(x \cdot \color{blue}{-1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      11. fma-undefine3.5%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}}\right) \]
      12. unpow23.5%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \left(\color{blue}{{x}^{2}} + 1\right)}\right) \]
      13. associate--r+56.1%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{\left({x}^{2} - {x}^{2}\right) - 1}}\right) \]
      14. +-inverses100.0%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{0} - 1}\right) \]
      15. metadata-eval100.0%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{-1}}\right) \]
      16. *-rgt-identity100.0%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\color{blue}{\mathsf{hypot}\left(1, x\right) \cdot 1}}{-1}\right) \]
      17. associate-/l*100.0%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{\mathsf{hypot}\left(1, x\right) \cdot \frac{1}{-1}}\right) \]
      18. metadata-eval100.0%

        \[\leadsto -\log \left(x \cdot -1 - \mathsf{hypot}\left(1, x\right) \cdot \color{blue}{-1}\right) \]
      19. *-commutative100.0%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{-1 \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      20. neg-mul-1100.0%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{\left(-\mathsf{hypot}\left(1, x\right)\right)}\right) \]
    8. Simplified100.0%

      \[\leadsto \color{blue}{-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)} \]

    if -0.00759999999999999998 < x < 0.0060000000000000001

    1. Initial program 10.2%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg10.2%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative10.2%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg10.2%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def10.2%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified10.2%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around 0 100.0%

      \[\leadsto \color{blue}{x \cdot \left(1 + {x}^{2} \cdot \left(0.075 \cdot {x}^{2} - 0.16666666666666666\right)\right)} \]

    if 0.0060000000000000001 < x

    1. Initial program 56.4%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg56.4%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative56.4%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg56.4%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def99.9%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Step-by-step derivation
      1. add-sqr-sqrt99.9%

        \[\leadsto \log \color{blue}{\left(\sqrt{x + \mathsf{hypot}\left(1, x\right)} \cdot \sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)} \]
      2. pow299.9%

        \[\leadsto \log \color{blue}{\left({\left(\sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)}^{2}\right)} \]
      3. log-pow99.9%

        \[\leadsto \color{blue}{2 \cdot \log \left(\sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)} \]
    6. Applied egg-rr99.9%

      \[\leadsto \color{blue}{2 \cdot \log \left(\sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)} \]
  3. Recombined 3 regimes into one program.
  4. Final simplification100.0%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -0.0076:\\ \;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\ \mathbf{elif}\;x \leq 0.006:\\ \;\;\;\;x \cdot \left(1 + {x}^{2} \cdot \left({x}^{2} \cdot 0.075 - 0.16666666666666666\right)\right)\\ \mathbf{else}:\\ \;\;\;\;2 \cdot \log \left(\sqrt{x + \mathsf{hypot}\left(1, x\right)}\right)\\ \end{array} \]
  5. Add Preprocessing

Alternative 4: 99.9% accurate, 0.9× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq -0.0076:\\ \;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\ \mathbf{elif}\;x \leq 0.0072:\\ \;\;\;\;x \cdot \left(1 + {x}^{2} \cdot \left({x}^{2} \cdot 0.075 - 0.16666666666666666\right)\right)\\ \mathbf{else}:\\ \;\;\;\;\log \left(x + \mathsf{hypot}\left(1, x\right)\right)\\ \end{array} \end{array} \]
(FPCore (x)
 :precision binary64
 (if (<= x -0.0076)
   (- (log (- (hypot 1.0 x) x)))
   (if (<= x 0.0072)
     (*
      x
      (+ 1.0 (* (pow x 2.0) (- (* (pow x 2.0) 0.075) 0.16666666666666666))))
     (log (+ x (hypot 1.0 x))))))
double code(double x) {
	double tmp;
	if (x <= -0.0076) {
		tmp = -log((hypot(1.0, x) - x));
	} else if (x <= 0.0072) {
		tmp = x * (1.0 + (pow(x, 2.0) * ((pow(x, 2.0) * 0.075) - 0.16666666666666666)));
	} else {
		tmp = log((x + hypot(1.0, x)));
	}
	return tmp;
}
public static double code(double x) {
	double tmp;
	if (x <= -0.0076) {
		tmp = -Math.log((Math.hypot(1.0, x) - x));
	} else if (x <= 0.0072) {
		tmp = x * (1.0 + (Math.pow(x, 2.0) * ((Math.pow(x, 2.0) * 0.075) - 0.16666666666666666)));
	} else {
		tmp = Math.log((x + Math.hypot(1.0, x)));
	}
	return tmp;
}
def code(x):
	tmp = 0
	if x <= -0.0076:
		tmp = -math.log((math.hypot(1.0, x) - x))
	elif x <= 0.0072:
		tmp = x * (1.0 + (math.pow(x, 2.0) * ((math.pow(x, 2.0) * 0.075) - 0.16666666666666666)))
	else:
		tmp = math.log((x + math.hypot(1.0, x)))
	return tmp
function code(x)
	tmp = 0.0
	if (x <= -0.0076)
		tmp = Float64(-log(Float64(hypot(1.0, x) - x)));
	elseif (x <= 0.0072)
		tmp = Float64(x * Float64(1.0 + Float64((x ^ 2.0) * Float64(Float64((x ^ 2.0) * 0.075) - 0.16666666666666666))));
	else
		tmp = log(Float64(x + hypot(1.0, x)));
	end
	return tmp
end
function tmp_2 = code(x)
	tmp = 0.0;
	if (x <= -0.0076)
		tmp = -log((hypot(1.0, x) - x));
	elseif (x <= 0.0072)
		tmp = x * (1.0 + ((x ^ 2.0) * (((x ^ 2.0) * 0.075) - 0.16666666666666666)));
	else
		tmp = log((x + hypot(1.0, x)));
	end
	tmp_2 = tmp;
end
code[x_] := If[LessEqual[x, -0.0076], (-N[Log[N[(N[Sqrt[1.0 ^ 2 + x ^ 2], $MachinePrecision] - x), $MachinePrecision]], $MachinePrecision]), If[LessEqual[x, 0.0072], N[(x * N[(1.0 + N[(N[Power[x, 2.0], $MachinePrecision] * N[(N[(N[Power[x, 2.0], $MachinePrecision] * 0.075), $MachinePrecision] - 0.16666666666666666), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[Log[N[(x + N[Sqrt[1.0 ^ 2 + x ^ 2], $MachinePrecision]), $MachinePrecision]], $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq -0.0076:\\
\;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\

\mathbf{elif}\;x \leq 0.0072:\\
\;\;\;\;x \cdot \left(1 + {x}^{2} \cdot \left({x}^{2} \cdot 0.075 - 0.16666666666666666\right)\right)\\

\mathbf{else}:\\
\;\;\;\;\log \left(x + \mathsf{hypot}\left(1, x\right)\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if x < -0.00759999999999999998

    1. Initial program 4.0%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def5.1%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified5.1%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Step-by-step derivation
      1. flip-+4.4%

        \[\leadsto \log \color{blue}{\left(\frac{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}{x - \mathsf{hypot}\left(1, x\right)}\right)} \]
      2. clear-num4.4%

        \[\leadsto \log \color{blue}{\left(\frac{1}{\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}}\right)} \]
      3. log-div2.7%

        \[\leadsto \color{blue}{\log 1 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right)} \]
      4. metadata-eval2.7%

        \[\leadsto \color{blue}{0} - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      5. pow22.7%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{\color{blue}{{x}^{2}} - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      6. hypot-1-def3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\sqrt{1 + x \cdot x}} \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      7. hypot-1-def2.7%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \sqrt{1 + x \cdot x} \cdot \color{blue}{\sqrt{1 + x \cdot x}}}\right) \]
      8. add-sqr-sqrt3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(1 + x \cdot x\right)}}\right) \]
      9. +-commutative3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}}\right) \]
      10. fma-define3.5%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\mathsf{fma}\left(x, x, 1\right)}}\right) \]
    6. Applied egg-rr3.5%

      \[\leadsto \color{blue}{0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
    7. Step-by-step derivation
      1. neg-sub03.5%

        \[\leadsto \color{blue}{-\log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
      2. div-sub3.5%

        \[\leadsto -\log \color{blue}{\left(\frac{x}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
      3. fma-undefine3.5%

        \[\leadsto -\log \left(\frac{x}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      4. unpow23.5%

        \[\leadsto -\log \left(\frac{x}{{x}^{2} - \left(\color{blue}{{x}^{2}} + 1\right)} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      5. associate--r+3.5%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{\left({x}^{2} - {x}^{2}\right) - 1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      6. +-inverses3.5%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{0} - 1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      7. metadata-eval3.5%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{-1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      8. *-rgt-identity3.5%

        \[\leadsto -\log \left(\frac{\color{blue}{x \cdot 1}}{-1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      9. associate-/l*3.5%

        \[\leadsto -\log \left(\color{blue}{x \cdot \frac{1}{-1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      10. metadata-eval3.5%

        \[\leadsto -\log \left(x \cdot \color{blue}{-1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      11. fma-undefine3.5%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}}\right) \]
      12. unpow23.5%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \left(\color{blue}{{x}^{2}} + 1\right)}\right) \]
      13. associate--r+56.1%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{\left({x}^{2} - {x}^{2}\right) - 1}}\right) \]
      14. +-inverses100.0%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{0} - 1}\right) \]
      15. metadata-eval100.0%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{-1}}\right) \]
      16. *-rgt-identity100.0%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\color{blue}{\mathsf{hypot}\left(1, x\right) \cdot 1}}{-1}\right) \]
      17. associate-/l*100.0%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{\mathsf{hypot}\left(1, x\right) \cdot \frac{1}{-1}}\right) \]
      18. metadata-eval100.0%

        \[\leadsto -\log \left(x \cdot -1 - \mathsf{hypot}\left(1, x\right) \cdot \color{blue}{-1}\right) \]
      19. *-commutative100.0%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{-1 \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      20. neg-mul-1100.0%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{\left(-\mathsf{hypot}\left(1, x\right)\right)}\right) \]
    8. Simplified100.0%

      \[\leadsto \color{blue}{-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)} \]

    if -0.00759999999999999998 < x < 0.0071999999999999998

    1. Initial program 10.2%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg10.2%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative10.2%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg10.2%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def10.2%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified10.2%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around 0 100.0%

      \[\leadsto \color{blue}{x \cdot \left(1 + {x}^{2} \cdot \left(0.075 \cdot {x}^{2} - 0.16666666666666666\right)\right)} \]

    if 0.0071999999999999998 < x

    1. Initial program 56.4%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg56.4%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative56.4%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg56.4%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def99.9%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
  3. Recombined 3 regimes into one program.
  4. Final simplification100.0%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -0.0076:\\ \;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\ \mathbf{elif}\;x \leq 0.0072:\\ \;\;\;\;x \cdot \left(1 + {x}^{2} \cdot \left({x}^{2} \cdot 0.075 - 0.16666666666666666\right)\right)\\ \mathbf{else}:\\ \;\;\;\;\log \left(x + \mathsf{hypot}\left(1, x\right)\right)\\ \end{array} \]
  5. Add Preprocessing

Alternative 5: 99.6% accurate, 1.0× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq -1.25:\\ \;\;\;\;\log \left(\frac{-0.5}{x}\right)\\ \mathbf{elif}\;x \leq 0.00105:\\ \;\;\;\;x + -0.16666666666666666 \cdot {x}^{3}\\ \mathbf{else}:\\ \;\;\;\;\log \left(x + \mathsf{hypot}\left(1, x\right)\right)\\ \end{array} \end{array} \]
(FPCore (x)
 :precision binary64
 (if (<= x -1.25)
   (log (/ -0.5 x))
   (if (<= x 0.00105)
     (+ x (* -0.16666666666666666 (pow x 3.0)))
     (log (+ x (hypot 1.0 x))))))
double code(double x) {
	double tmp;
	if (x <= -1.25) {
		tmp = log((-0.5 / x));
	} else if (x <= 0.00105) {
		tmp = x + (-0.16666666666666666 * pow(x, 3.0));
	} else {
		tmp = log((x + hypot(1.0, x)));
	}
	return tmp;
}
public static double code(double x) {
	double tmp;
	if (x <= -1.25) {
		tmp = Math.log((-0.5 / x));
	} else if (x <= 0.00105) {
		tmp = x + (-0.16666666666666666 * Math.pow(x, 3.0));
	} else {
		tmp = Math.log((x + Math.hypot(1.0, x)));
	}
	return tmp;
}
def code(x):
	tmp = 0
	if x <= -1.25:
		tmp = math.log((-0.5 / x))
	elif x <= 0.00105:
		tmp = x + (-0.16666666666666666 * math.pow(x, 3.0))
	else:
		tmp = math.log((x + math.hypot(1.0, x)))
	return tmp
function code(x)
	tmp = 0.0
	if (x <= -1.25)
		tmp = log(Float64(-0.5 / x));
	elseif (x <= 0.00105)
		tmp = Float64(x + Float64(-0.16666666666666666 * (x ^ 3.0)));
	else
		tmp = log(Float64(x + hypot(1.0, x)));
	end
	return tmp
end
function tmp_2 = code(x)
	tmp = 0.0;
	if (x <= -1.25)
		tmp = log((-0.5 / x));
	elseif (x <= 0.00105)
		tmp = x + (-0.16666666666666666 * (x ^ 3.0));
	else
		tmp = log((x + hypot(1.0, x)));
	end
	tmp_2 = tmp;
end
code[x_] := If[LessEqual[x, -1.25], N[Log[N[(-0.5 / x), $MachinePrecision]], $MachinePrecision], If[LessEqual[x, 0.00105], N[(x + N[(-0.16666666666666666 * N[Power[x, 3.0], $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[Log[N[(x + N[Sqrt[1.0 ^ 2 + x ^ 2], $MachinePrecision]), $MachinePrecision]], $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq -1.25:\\
\;\;\;\;\log \left(\frac{-0.5}{x}\right)\\

\mathbf{elif}\;x \leq 0.00105:\\
\;\;\;\;x + -0.16666666666666666 \cdot {x}^{3}\\

\mathbf{else}:\\
\;\;\;\;\log \left(x + \mathsf{hypot}\left(1, x\right)\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if x < -1.25

    1. Initial program 4.0%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def5.1%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified5.1%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around -inf 99.1%

      \[\leadsto \log \color{blue}{\left(\frac{-0.5}{x}\right)} \]

    if -1.25 < x < 0.00104999999999999994

    1. Initial program 10.2%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg10.2%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative10.2%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg10.2%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def10.2%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified10.2%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around 0 99.8%

      \[\leadsto \color{blue}{x \cdot \left(1 + -0.16666666666666666 \cdot {x}^{2}\right)} \]
    6. Step-by-step derivation
      1. distribute-rgt-in99.8%

        \[\leadsto \color{blue}{1 \cdot x + \left(-0.16666666666666666 \cdot {x}^{2}\right) \cdot x} \]
      2. *-lft-identity99.8%

        \[\leadsto \color{blue}{x} + \left(-0.16666666666666666 \cdot {x}^{2}\right) \cdot x \]
      3. associate-*l*99.8%

        \[\leadsto x + \color{blue}{-0.16666666666666666 \cdot \left({x}^{2} \cdot x\right)} \]
      4. unpow299.8%

        \[\leadsto x + -0.16666666666666666 \cdot \left(\color{blue}{\left(x \cdot x\right)} \cdot x\right) \]
      5. unpow399.8%

        \[\leadsto x + -0.16666666666666666 \cdot \color{blue}{{x}^{3}} \]
    7. Simplified99.8%

      \[\leadsto \color{blue}{x + -0.16666666666666666 \cdot {x}^{3}} \]

    if 0.00104999999999999994 < x

    1. Initial program 56.4%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg56.4%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative56.4%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg56.4%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def99.9%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
  3. Recombined 3 regimes into one program.
  4. Final simplification99.7%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -1.25:\\ \;\;\;\;\log \left(\frac{-0.5}{x}\right)\\ \mathbf{elif}\;x \leq 0.00105:\\ \;\;\;\;x + -0.16666666666666666 \cdot {x}^{3}\\ \mathbf{else}:\\ \;\;\;\;\log \left(x + \mathsf{hypot}\left(1, x\right)\right)\\ \end{array} \]
  5. Add Preprocessing

Alternative 6: 99.9% accurate, 1.0× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq -0.0009:\\ \;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\ \mathbf{elif}\;x \leq 0.00105:\\ \;\;\;\;x + -0.16666666666666666 \cdot {x}^{3}\\ \mathbf{else}:\\ \;\;\;\;\log \left(x + \mathsf{hypot}\left(1, x\right)\right)\\ \end{array} \end{array} \]
(FPCore (x)
 :precision binary64
 (if (<= x -0.0009)
   (- (log (- (hypot 1.0 x) x)))
   (if (<= x 0.00105)
     (+ x (* -0.16666666666666666 (pow x 3.0)))
     (log (+ x (hypot 1.0 x))))))
double code(double x) {
	double tmp;
	if (x <= -0.0009) {
		tmp = -log((hypot(1.0, x) - x));
	} else if (x <= 0.00105) {
		tmp = x + (-0.16666666666666666 * pow(x, 3.0));
	} else {
		tmp = log((x + hypot(1.0, x)));
	}
	return tmp;
}
public static double code(double x) {
	double tmp;
	if (x <= -0.0009) {
		tmp = -Math.log((Math.hypot(1.0, x) - x));
	} else if (x <= 0.00105) {
		tmp = x + (-0.16666666666666666 * Math.pow(x, 3.0));
	} else {
		tmp = Math.log((x + Math.hypot(1.0, x)));
	}
	return tmp;
}
def code(x):
	tmp = 0
	if x <= -0.0009:
		tmp = -math.log((math.hypot(1.0, x) - x))
	elif x <= 0.00105:
		tmp = x + (-0.16666666666666666 * math.pow(x, 3.0))
	else:
		tmp = math.log((x + math.hypot(1.0, x)))
	return tmp
function code(x)
	tmp = 0.0
	if (x <= -0.0009)
		tmp = Float64(-log(Float64(hypot(1.0, x) - x)));
	elseif (x <= 0.00105)
		tmp = Float64(x + Float64(-0.16666666666666666 * (x ^ 3.0)));
	else
		tmp = log(Float64(x + hypot(1.0, x)));
	end
	return tmp
end
function tmp_2 = code(x)
	tmp = 0.0;
	if (x <= -0.0009)
		tmp = -log((hypot(1.0, x) - x));
	elseif (x <= 0.00105)
		tmp = x + (-0.16666666666666666 * (x ^ 3.0));
	else
		tmp = log((x + hypot(1.0, x)));
	end
	tmp_2 = tmp;
end
code[x_] := If[LessEqual[x, -0.0009], (-N[Log[N[(N[Sqrt[1.0 ^ 2 + x ^ 2], $MachinePrecision] - x), $MachinePrecision]], $MachinePrecision]), If[LessEqual[x, 0.00105], N[(x + N[(-0.16666666666666666 * N[Power[x, 3.0], $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[Log[N[(x + N[Sqrt[1.0 ^ 2 + x ^ 2], $MachinePrecision]), $MachinePrecision]], $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq -0.0009:\\
\;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\

\mathbf{elif}\;x \leq 0.00105:\\
\;\;\;\;x + -0.16666666666666666 \cdot {x}^{3}\\

\mathbf{else}:\\
\;\;\;\;\log \left(x + \mathsf{hypot}\left(1, x\right)\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if x < -8.9999999999999998e-4

    1. Initial program 5.5%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg5.5%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative5.5%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg5.5%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def6.6%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified6.6%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Step-by-step derivation
      1. flip-+5.8%

        \[\leadsto \log \color{blue}{\left(\frac{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}{x - \mathsf{hypot}\left(1, x\right)}\right)} \]
      2. clear-num5.8%

        \[\leadsto \log \color{blue}{\left(\frac{1}{\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}}\right)} \]
      3. log-div4.2%

        \[\leadsto \color{blue}{\log 1 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right)} \]
      4. metadata-eval4.2%

        \[\leadsto \color{blue}{0} - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{x \cdot x - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      5. pow24.2%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{\color{blue}{{x}^{2}} - \mathsf{hypot}\left(1, x\right) \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      6. hypot-1-def4.9%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\sqrt{1 + x \cdot x}} \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      7. hypot-1-def4.2%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \sqrt{1 + x \cdot x} \cdot \color{blue}{\sqrt{1 + x \cdot x}}}\right) \]
      8. add-sqr-sqrt4.9%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(1 + x \cdot x\right)}}\right) \]
      9. +-commutative4.9%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}}\right) \]
      10. fma-define4.9%

        \[\leadsto 0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\mathsf{fma}\left(x, x, 1\right)}}\right) \]
    6. Applied egg-rr4.9%

      \[\leadsto \color{blue}{0 - \log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
    7. Step-by-step derivation
      1. neg-sub04.9%

        \[\leadsto \color{blue}{-\log \left(\frac{x - \mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
      2. div-sub4.9%

        \[\leadsto -\log \color{blue}{\left(\frac{x}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right)} \]
      3. fma-undefine4.9%

        \[\leadsto -\log \left(\frac{x}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      4. unpow24.9%

        \[\leadsto -\log \left(\frac{x}{{x}^{2} - \left(\color{blue}{{x}^{2}} + 1\right)} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      5. associate--r+4.9%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{\left({x}^{2} - {x}^{2}\right) - 1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      6. +-inverses4.9%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{0} - 1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      7. metadata-eval4.9%

        \[\leadsto -\log \left(\frac{x}{\color{blue}{-1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      8. *-rgt-identity4.9%

        \[\leadsto -\log \left(\frac{\color{blue}{x \cdot 1}}{-1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      9. associate-/l*4.9%

        \[\leadsto -\log \left(\color{blue}{x \cdot \frac{1}{-1}} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      10. metadata-eval4.9%

        \[\leadsto -\log \left(x \cdot \color{blue}{-1} - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \mathsf{fma}\left(x, x, 1\right)}\right) \]
      11. fma-undefine4.9%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \color{blue}{\left(x \cdot x + 1\right)}}\right) \]
      12. unpow24.9%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{{x}^{2} - \left(\color{blue}{{x}^{2}} + 1\right)}\right) \]
      13. associate--r+56.7%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{\left({x}^{2} - {x}^{2}\right) - 1}}\right) \]
      14. +-inverses99.8%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{0} - 1}\right) \]
      15. metadata-eval99.8%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\mathsf{hypot}\left(1, x\right)}{\color{blue}{-1}}\right) \]
      16. *-rgt-identity99.8%

        \[\leadsto -\log \left(x \cdot -1 - \frac{\color{blue}{\mathsf{hypot}\left(1, x\right) \cdot 1}}{-1}\right) \]
      17. associate-/l*99.8%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{\mathsf{hypot}\left(1, x\right) \cdot \frac{1}{-1}}\right) \]
      18. metadata-eval99.8%

        \[\leadsto -\log \left(x \cdot -1 - \mathsf{hypot}\left(1, x\right) \cdot \color{blue}{-1}\right) \]
      19. *-commutative99.8%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{-1 \cdot \mathsf{hypot}\left(1, x\right)}\right) \]
      20. neg-mul-199.8%

        \[\leadsto -\log \left(x \cdot -1 - \color{blue}{\left(-\mathsf{hypot}\left(1, x\right)\right)}\right) \]
    8. Simplified99.8%

      \[\leadsto \color{blue}{-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)} \]

    if -8.9999999999999998e-4 < x < 0.00104999999999999994

    1. Initial program 9.6%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg9.6%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative9.6%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg9.6%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def9.6%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified9.6%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around 0 100.0%

      \[\leadsto \color{blue}{x \cdot \left(1 + -0.16666666666666666 \cdot {x}^{2}\right)} \]
    6. Step-by-step derivation
      1. distribute-rgt-in100.0%

        \[\leadsto \color{blue}{1 \cdot x + \left(-0.16666666666666666 \cdot {x}^{2}\right) \cdot x} \]
      2. *-lft-identity100.0%

        \[\leadsto \color{blue}{x} + \left(-0.16666666666666666 \cdot {x}^{2}\right) \cdot x \]
      3. associate-*l*100.0%

        \[\leadsto x + \color{blue}{-0.16666666666666666 \cdot \left({x}^{2} \cdot x\right)} \]
      4. unpow2100.0%

        \[\leadsto x + -0.16666666666666666 \cdot \left(\color{blue}{\left(x \cdot x\right)} \cdot x\right) \]
      5. unpow3100.0%

        \[\leadsto x + -0.16666666666666666 \cdot \color{blue}{{x}^{3}} \]
    7. Simplified100.0%

      \[\leadsto \color{blue}{x + -0.16666666666666666 \cdot {x}^{3}} \]

    if 0.00104999999999999994 < x

    1. Initial program 56.4%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg56.4%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative56.4%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg56.4%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def99.9%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified99.9%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
  3. Recombined 3 regimes into one program.
  4. Final simplification99.9%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -0.0009:\\ \;\;\;\;-\log \left(\mathsf{hypot}\left(1, x\right) - x\right)\\ \mathbf{elif}\;x \leq 0.00105:\\ \;\;\;\;x + -0.16666666666666666 \cdot {x}^{3}\\ \mathbf{else}:\\ \;\;\;\;\log \left(x + \mathsf{hypot}\left(1, x\right)\right)\\ \end{array} \]
  5. Add Preprocessing

Alternative 7: 99.4% accurate, 1.8× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq -1.25:\\ \;\;\;\;\log \left(\frac{-0.5}{x}\right)\\ \mathbf{elif}\;x \leq 1.3:\\ \;\;\;\;x + -0.16666666666666666 \cdot {x}^{3}\\ \mathbf{else}:\\ \;\;\;\;\log \left(x \cdot 2\right)\\ \end{array} \end{array} \]
(FPCore (x)
 :precision binary64
 (if (<= x -1.25)
   (log (/ -0.5 x))
   (if (<= x 1.3) (+ x (* -0.16666666666666666 (pow x 3.0))) (log (* x 2.0)))))
double code(double x) {
	double tmp;
	if (x <= -1.25) {
		tmp = log((-0.5 / x));
	} else if (x <= 1.3) {
		tmp = x + (-0.16666666666666666 * pow(x, 3.0));
	} else {
		tmp = log((x * 2.0));
	}
	return tmp;
}
real(8) function code(x)
    real(8), intent (in) :: x
    real(8) :: tmp
    if (x <= (-1.25d0)) then
        tmp = log(((-0.5d0) / x))
    else if (x <= 1.3d0) then
        tmp = x + ((-0.16666666666666666d0) * (x ** 3.0d0))
    else
        tmp = log((x * 2.0d0))
    end if
    code = tmp
end function
public static double code(double x) {
	double tmp;
	if (x <= -1.25) {
		tmp = Math.log((-0.5 / x));
	} else if (x <= 1.3) {
		tmp = x + (-0.16666666666666666 * Math.pow(x, 3.0));
	} else {
		tmp = Math.log((x * 2.0));
	}
	return tmp;
}
def code(x):
	tmp = 0
	if x <= -1.25:
		tmp = math.log((-0.5 / x))
	elif x <= 1.3:
		tmp = x + (-0.16666666666666666 * math.pow(x, 3.0))
	else:
		tmp = math.log((x * 2.0))
	return tmp
function code(x)
	tmp = 0.0
	if (x <= -1.25)
		tmp = log(Float64(-0.5 / x));
	elseif (x <= 1.3)
		tmp = Float64(x + Float64(-0.16666666666666666 * (x ^ 3.0)));
	else
		tmp = log(Float64(x * 2.0));
	end
	return tmp
end
function tmp_2 = code(x)
	tmp = 0.0;
	if (x <= -1.25)
		tmp = log((-0.5 / x));
	elseif (x <= 1.3)
		tmp = x + (-0.16666666666666666 * (x ^ 3.0));
	else
		tmp = log((x * 2.0));
	end
	tmp_2 = tmp;
end
code[x_] := If[LessEqual[x, -1.25], N[Log[N[(-0.5 / x), $MachinePrecision]], $MachinePrecision], If[LessEqual[x, 1.3], N[(x + N[(-0.16666666666666666 * N[Power[x, 3.0], $MachinePrecision]), $MachinePrecision]), $MachinePrecision], N[Log[N[(x * 2.0), $MachinePrecision]], $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq -1.25:\\
\;\;\;\;\log \left(\frac{-0.5}{x}\right)\\

\mathbf{elif}\;x \leq 1.3:\\
\;\;\;\;x + -0.16666666666666666 \cdot {x}^{3}\\

\mathbf{else}:\\
\;\;\;\;\log \left(x \cdot 2\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if x < -1.25

    1. Initial program 4.0%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def5.1%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified5.1%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around -inf 99.1%

      \[\leadsto \log \color{blue}{\left(\frac{-0.5}{x}\right)} \]

    if -1.25 < x < 1.30000000000000004

    1. Initial program 10.9%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg10.9%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative10.9%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg10.9%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def10.9%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified10.9%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around 0 99.5%

      \[\leadsto \color{blue}{x \cdot \left(1 + -0.16666666666666666 \cdot {x}^{2}\right)} \]
    6. Step-by-step derivation
      1. distribute-rgt-in99.5%

        \[\leadsto \color{blue}{1 \cdot x + \left(-0.16666666666666666 \cdot {x}^{2}\right) \cdot x} \]
      2. *-lft-identity99.5%

        \[\leadsto \color{blue}{x} + \left(-0.16666666666666666 \cdot {x}^{2}\right) \cdot x \]
      3. associate-*l*99.5%

        \[\leadsto x + \color{blue}{-0.16666666666666666 \cdot \left({x}^{2} \cdot x\right)} \]
      4. unpow299.5%

        \[\leadsto x + -0.16666666666666666 \cdot \left(\color{blue}{\left(x \cdot x\right)} \cdot x\right) \]
      5. unpow399.5%

        \[\leadsto x + -0.16666666666666666 \cdot \color{blue}{{x}^{3}} \]
    7. Simplified99.5%

      \[\leadsto \color{blue}{x + -0.16666666666666666 \cdot {x}^{3}} \]

    if 1.30000000000000004 < x

    1. Initial program 56.0%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg56.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative56.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg56.0%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def100.0%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around inf 97.4%

      \[\leadsto \log \color{blue}{\left(2 \cdot x\right)} \]
    6. Step-by-step derivation
      1. *-commutative97.4%

        \[\leadsto \log \color{blue}{\left(x \cdot 2\right)} \]
    7. Simplified97.4%

      \[\leadsto \log \color{blue}{\left(x \cdot 2\right)} \]
  3. Recombined 3 regimes into one program.
  4. Final simplification98.8%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -1.25:\\ \;\;\;\;\log \left(\frac{-0.5}{x}\right)\\ \mathbf{elif}\;x \leq 1.3:\\ \;\;\;\;x + -0.16666666666666666 \cdot {x}^{3}\\ \mathbf{else}:\\ \;\;\;\;\log \left(x \cdot 2\right)\\ \end{array} \]
  5. Add Preprocessing

Alternative 8: 99.1% accurate, 1.8× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq -1.25:\\ \;\;\;\;\log \left(\frac{-0.5}{x}\right)\\ \mathbf{elif}\;x \leq 1.3:\\ \;\;\;\;x\\ \mathbf{else}:\\ \;\;\;\;\log \left(x \cdot 2\right)\\ \end{array} \end{array} \]
(FPCore (x)
 :precision binary64
 (if (<= x -1.25) (log (/ -0.5 x)) (if (<= x 1.3) x (log (* x 2.0)))))
double code(double x) {
	double tmp;
	if (x <= -1.25) {
		tmp = log((-0.5 / x));
	} else if (x <= 1.3) {
		tmp = x;
	} else {
		tmp = log((x * 2.0));
	}
	return tmp;
}
real(8) function code(x)
    real(8), intent (in) :: x
    real(8) :: tmp
    if (x <= (-1.25d0)) then
        tmp = log(((-0.5d0) / x))
    else if (x <= 1.3d0) then
        tmp = x
    else
        tmp = log((x * 2.0d0))
    end if
    code = tmp
end function
public static double code(double x) {
	double tmp;
	if (x <= -1.25) {
		tmp = Math.log((-0.5 / x));
	} else if (x <= 1.3) {
		tmp = x;
	} else {
		tmp = Math.log((x * 2.0));
	}
	return tmp;
}
def code(x):
	tmp = 0
	if x <= -1.25:
		tmp = math.log((-0.5 / x))
	elif x <= 1.3:
		tmp = x
	else:
		tmp = math.log((x * 2.0))
	return tmp
function code(x)
	tmp = 0.0
	if (x <= -1.25)
		tmp = log(Float64(-0.5 / x));
	elseif (x <= 1.3)
		tmp = x;
	else
		tmp = log(Float64(x * 2.0));
	end
	return tmp
end
function tmp_2 = code(x)
	tmp = 0.0;
	if (x <= -1.25)
		tmp = log((-0.5 / x));
	elseif (x <= 1.3)
		tmp = x;
	else
		tmp = log((x * 2.0));
	end
	tmp_2 = tmp;
end
code[x_] := If[LessEqual[x, -1.25], N[Log[N[(-0.5 / x), $MachinePrecision]], $MachinePrecision], If[LessEqual[x, 1.3], x, N[Log[N[(x * 2.0), $MachinePrecision]], $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq -1.25:\\
\;\;\;\;\log \left(\frac{-0.5}{x}\right)\\

\mathbf{elif}\;x \leq 1.3:\\
\;\;\;\;x\\

\mathbf{else}:\\
\;\;\;\;\log \left(x \cdot 2\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 3 regimes
  2. if x < -1.25

    1. Initial program 4.0%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative4.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg4.0%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def5.1%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified5.1%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around -inf 99.1%

      \[\leadsto \log \color{blue}{\left(\frac{-0.5}{x}\right)} \]

    if -1.25 < x < 1.30000000000000004

    1. Initial program 10.9%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg10.9%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative10.9%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg10.9%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def10.9%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified10.9%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around 0 98.0%

      \[\leadsto \color{blue}{x} \]

    if 1.30000000000000004 < x

    1. Initial program 56.0%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg56.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative56.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg56.0%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def100.0%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around inf 97.4%

      \[\leadsto \log \color{blue}{\left(2 \cdot x\right)} \]
    6. Step-by-step derivation
      1. *-commutative97.4%

        \[\leadsto \log \color{blue}{\left(x \cdot 2\right)} \]
    7. Simplified97.4%

      \[\leadsto \log \color{blue}{\left(x \cdot 2\right)} \]
  3. Recombined 3 regimes into one program.
  4. Final simplification98.1%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq -1.25:\\ \;\;\;\;\log \left(\frac{-0.5}{x}\right)\\ \mathbf{elif}\;x \leq 1.3:\\ \;\;\;\;x\\ \mathbf{else}:\\ \;\;\;\;\log \left(x \cdot 2\right)\\ \end{array} \]
  5. Add Preprocessing

Alternative 9: 75.4% accurate, 1.9× speedup?

\[\begin{array}{l} \\ \begin{array}{l} \mathbf{if}\;x \leq 1.3:\\ \;\;\;\;x\\ \mathbf{else}:\\ \;\;\;\;\log \left(x \cdot 2\right)\\ \end{array} \end{array} \]
(FPCore (x) :precision binary64 (if (<= x 1.3) x (log (* x 2.0))))
double code(double x) {
	double tmp;
	if (x <= 1.3) {
		tmp = x;
	} else {
		tmp = log((x * 2.0));
	}
	return tmp;
}
real(8) function code(x)
    real(8), intent (in) :: x
    real(8) :: tmp
    if (x <= 1.3d0) then
        tmp = x
    else
        tmp = log((x * 2.0d0))
    end if
    code = tmp
end function
public static double code(double x) {
	double tmp;
	if (x <= 1.3) {
		tmp = x;
	} else {
		tmp = Math.log((x * 2.0));
	}
	return tmp;
}
def code(x):
	tmp = 0
	if x <= 1.3:
		tmp = x
	else:
		tmp = math.log((x * 2.0))
	return tmp
function code(x)
	tmp = 0.0
	if (x <= 1.3)
		tmp = x;
	else
		tmp = log(Float64(x * 2.0));
	end
	return tmp
end
function tmp_2 = code(x)
	tmp = 0.0;
	if (x <= 1.3)
		tmp = x;
	else
		tmp = log((x * 2.0));
	end
	tmp_2 = tmp;
end
code[x_] := If[LessEqual[x, 1.3], x, N[Log[N[(x * 2.0), $MachinePrecision]], $MachinePrecision]]
\begin{array}{l}

\\
\begin{array}{l}
\mathbf{if}\;x \leq 1.3:\\
\;\;\;\;x\\

\mathbf{else}:\\
\;\;\;\;\log \left(x \cdot 2\right)\\


\end{array}
\end{array}
Derivation
  1. Split input into 2 regimes
  2. if x < 1.30000000000000004

    1. Initial program 8.7%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg8.7%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative8.7%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg8.7%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def9.0%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified9.0%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around 0 68.5%

      \[\leadsto \color{blue}{x} \]

    if 1.30000000000000004 < x

    1. Initial program 56.0%

      \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
    2. Step-by-step derivation
      1. sqr-neg56.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
      2. +-commutative56.0%

        \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
      3. sqr-neg56.0%

        \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
      4. hypot-1-def100.0%

        \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
    3. Simplified100.0%

      \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
    4. Add Preprocessing
    5. Taylor expanded in x around inf 97.4%

      \[\leadsto \log \color{blue}{\left(2 \cdot x\right)} \]
    6. Step-by-step derivation
      1. *-commutative97.4%

        \[\leadsto \log \color{blue}{\left(x \cdot 2\right)} \]
    7. Simplified97.4%

      \[\leadsto \log \color{blue}{\left(x \cdot 2\right)} \]
  3. Recombined 2 regimes into one program.
  4. Final simplification77.2%

    \[\leadsto \begin{array}{l} \mathbf{if}\;x \leq 1.3:\\ \;\;\;\;x\\ \mathbf{else}:\\ \;\;\;\;\log \left(x \cdot 2\right)\\ \end{array} \]
  5. Add Preprocessing

Alternative 10: 51.8% accurate, 207.0× speedup?

\[\begin{array}{l} \\ x \end{array} \]
(FPCore (x) :precision binary64 x)
double code(double x) {
	return x;
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = x
end function
public static double code(double x) {
	return x;
}
def code(x):
	return x
function code(x)
	return x
end
function tmp = code(x)
	tmp = x;
end
code[x_] := x
\begin{array}{l}

\\
x
\end{array}
Derivation
  1. Initial program 22.9%

    \[\log \left(x + \sqrt{x \cdot x + 1}\right) \]
  2. Step-by-step derivation
    1. sqr-neg22.9%

      \[\leadsto \log \left(x + \sqrt{\color{blue}{\left(-x\right) \cdot \left(-x\right)} + 1}\right) \]
    2. +-commutative22.9%

      \[\leadsto \log \left(x + \sqrt{\color{blue}{1 + \left(-x\right) \cdot \left(-x\right)}}\right) \]
    3. sqr-neg22.9%

      \[\leadsto \log \left(x + \sqrt{1 + \color{blue}{x \cdot x}}\right) \]
    4. hypot-1-def36.4%

      \[\leadsto \log \left(x + \color{blue}{\mathsf{hypot}\left(1, x\right)}\right) \]
  3. Simplified36.4%

    \[\leadsto \color{blue}{\log \left(x + \mathsf{hypot}\left(1, x\right)\right)} \]
  4. Add Preprocessing
  5. Taylor expanded in x around 0 49.6%

    \[\leadsto \color{blue}{x} \]
  6. Final simplification49.6%

    \[\leadsto x \]
  7. Add Preprocessing

Developer target: 30.5% accurate, 1.0× speedup?

\[\begin{array}{l} \\ \begin{array}{l} t_0 := \sqrt{x \cdot x + 1}\\ \mathbf{if}\;x < 0:\\ \;\;\;\;\log \left(\frac{-1}{x - t\_0}\right)\\ \mathbf{else}:\\ \;\;\;\;\log \left(x + t\_0\right)\\ \end{array} \end{array} \]
(FPCore (x)
 :precision binary64
 (let* ((t_0 (sqrt (+ (* x x) 1.0))))
   (if (< x 0.0) (log (/ -1.0 (- x t_0))) (log (+ x t_0)))))
double code(double x) {
	double t_0 = sqrt(((x * x) + 1.0));
	double tmp;
	if (x < 0.0) {
		tmp = log((-1.0 / (x - t_0)));
	} else {
		tmp = log((x + t_0));
	}
	return tmp;
}
real(8) function code(x)
    real(8), intent (in) :: x
    real(8) :: t_0
    real(8) :: tmp
    t_0 = sqrt(((x * x) + 1.0d0))
    if (x < 0.0d0) then
        tmp = log(((-1.0d0) / (x - t_0)))
    else
        tmp = log((x + t_0))
    end if
    code = tmp
end function
public static double code(double x) {
	double t_0 = Math.sqrt(((x * x) + 1.0));
	double tmp;
	if (x < 0.0) {
		tmp = Math.log((-1.0 / (x - t_0)));
	} else {
		tmp = Math.log((x + t_0));
	}
	return tmp;
}
def code(x):
	t_0 = math.sqrt(((x * x) + 1.0))
	tmp = 0
	if x < 0.0:
		tmp = math.log((-1.0 / (x - t_0)))
	else:
		tmp = math.log((x + t_0))
	return tmp
function code(x)
	t_0 = sqrt(Float64(Float64(x * x) + 1.0))
	tmp = 0.0
	if (x < 0.0)
		tmp = log(Float64(-1.0 / Float64(x - t_0)));
	else
		tmp = log(Float64(x + t_0));
	end
	return tmp
end
function tmp_2 = code(x)
	t_0 = sqrt(((x * x) + 1.0));
	tmp = 0.0;
	if (x < 0.0)
		tmp = log((-1.0 / (x - t_0)));
	else
		tmp = log((x + t_0));
	end
	tmp_2 = tmp;
end
code[x_] := Block[{t$95$0 = N[Sqrt[N[(N[(x * x), $MachinePrecision] + 1.0), $MachinePrecision]], $MachinePrecision]}, If[Less[x, 0.0], N[Log[N[(-1.0 / N[(x - t$95$0), $MachinePrecision]), $MachinePrecision]], $MachinePrecision], N[Log[N[(x + t$95$0), $MachinePrecision]], $MachinePrecision]]]
\begin{array}{l}

\\
\begin{array}{l}
t_0 := \sqrt{x \cdot x + 1}\\
\mathbf{if}\;x < 0:\\
\;\;\;\;\log \left(\frac{-1}{x - t\_0}\right)\\

\mathbf{else}:\\
\;\;\;\;\log \left(x + t\_0\right)\\


\end{array}
\end{array}

Reproduce

?
herbie shell --seed 2024077 
(FPCore (x)
  :name "Hyperbolic arcsine"
  :precision binary64

  :alt
  (if (< x 0.0) (log (/ -1.0 (- x (sqrt (+ (* x x) 1.0))))) (log (+ x (sqrt (+ (* x x) 1.0)))))

  (log (+ x (sqrt (+ (* x x) 1.0)))))