Hyperbolic arc-(co)secant

Percentage Accurate: 100.0% → 100.0%
Time: 8.3s
Alternatives: 5
Speedup: N/A×

Specification

?
\[\begin{array}{l} \\ \log \left(\frac{1}{x} + \frac{\sqrt{1 - x \cdot x}}{x}\right) \end{array} \]
(FPCore (x)
 :precision binary64
 (log (+ (/ 1.0 x) (/ (sqrt (- 1.0 (* x x))) x))))
double code(double x) {
	return log(((1.0 / x) + (sqrt((1.0 - (x * x))) / x)));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log(((1.0d0 / x) + (sqrt((1.0d0 - (x * x))) / x)))
end function
public static double code(double x) {
	return Math.log(((1.0 / x) + (Math.sqrt((1.0 - (x * x))) / x)));
}
def code(x):
	return math.log(((1.0 / x) + (math.sqrt((1.0 - (x * x))) / x)))
function code(x)
	return log(Float64(Float64(1.0 / x) + Float64(sqrt(Float64(1.0 - Float64(x * x))) / x)))
end
function tmp = code(x)
	tmp = log(((1.0 / x) + (sqrt((1.0 - (x * x))) / x)));
end
code[x_] := N[Log[N[(N[(1.0 / x), $MachinePrecision] + N[(N[Sqrt[N[(1.0 - N[(x * x), $MachinePrecision]), $MachinePrecision]], $MachinePrecision] / x), $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(\frac{1}{x} + \frac{\sqrt{1 - x \cdot x}}{x}\right)
\end{array}

Sampling outcomes in binary64 precision:

Local Percentage Accuracy vs ?

The average percentage accuracy by input value. Horizontal axis shows value of an input variable; the variable is choosen in the title. Vertical axis is accuracy; higher is better. Red represent the original program, while blue represents Herbie's suggestion. These can be toggled with buttons below the plot. The line is an average while dots represent individual samples.

Accuracy vs Speed?

Herbie found 5 alternatives:

AlternativeAccuracySpeedup
The accuracy (vertical axis) and speed (horizontal axis) of each alternatives. Up and to the right is better. The red square shows the initial program, and each blue circle shows an alternative.The line shows the best available speed-accuracy tradeoffs.

Initial Program: 100.0% accurate, 1.0× speedup?

\[\begin{array}{l} \\ \log \left(\frac{1}{x} + \frac{\sqrt{1 - x \cdot x}}{x}\right) \end{array} \]
(FPCore (x)
 :precision binary64
 (log (+ (/ 1.0 x) (/ (sqrt (- 1.0 (* x x))) x))))
double code(double x) {
	return log(((1.0 / x) + (sqrt((1.0 - (x * x))) / x)));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log(((1.0d0 / x) + (sqrt((1.0d0 - (x * x))) / x)))
end function
public static double code(double x) {
	return Math.log(((1.0 / x) + (Math.sqrt((1.0 - (x * x))) / x)));
}
def code(x):
	return math.log(((1.0 / x) + (math.sqrt((1.0 - (x * x))) / x)))
function code(x)
	return log(Float64(Float64(1.0 / x) + Float64(sqrt(Float64(1.0 - Float64(x * x))) / x)))
end
function tmp = code(x)
	tmp = log(((1.0 / x) + (sqrt((1.0 - (x * x))) / x)));
end
code[x_] := N[Log[N[(N[(1.0 / x), $MachinePrecision] + N[(N[Sqrt[N[(1.0 - N[(x * x), $MachinePrecision]), $MachinePrecision]], $MachinePrecision] / x), $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(\frac{1}{x} + \frac{\sqrt{1 - x \cdot x}}{x}\right)
\end{array}

Alternative 1: 100.0% accurate, 1.1× speedup?

\[\begin{array}{l} \\ \log \left(\frac{1 + \sqrt{1 - x \cdot x}}{x}\right) \end{array} \]
(FPCore (x) :precision binary64 (log (/ (+ 1.0 (sqrt (- 1.0 (* x x)))) x)))
double code(double x) {
	return log(((1.0 + sqrt((1.0 - (x * x)))) / x));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = log(((1.0d0 + sqrt((1.0d0 - (x * x)))) / x))
end function
public static double code(double x) {
	return Math.log(((1.0 + Math.sqrt((1.0 - (x * x)))) / x));
}
def code(x):
	return math.log(((1.0 + math.sqrt((1.0 - (x * x)))) / x))
function code(x)
	return log(Float64(Float64(1.0 + sqrt(Float64(1.0 - Float64(x * x)))) / x))
end
function tmp = code(x)
	tmp = log(((1.0 + sqrt((1.0 - (x * x)))) / x));
end
code[x_] := N[Log[N[(N[(1.0 + N[Sqrt[N[(1.0 - N[(x * x), $MachinePrecision]), $MachinePrecision]], $MachinePrecision]), $MachinePrecision] / x), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(\frac{1 + \sqrt{1 - x \cdot x}}{x}\right)
\end{array}
Derivation
  1. Initial program 100.0%

    \[\log \left(\frac{1}{x} + \frac{\sqrt{1 - x \cdot x}}{x}\right) \]
  2. Add Preprocessing
  3. Step-by-step derivation
    1. div-invN/A

      \[\leadsto \log \left(\frac{1}{x} + \color{blue}{\sqrt{1 - x \cdot x} \cdot \frac{1}{x}}\right) \]
    2. distribute-rgt1-inN/A

      \[\leadsto \log \color{blue}{\left(\left(\sqrt{1 - x \cdot x} + 1\right) \cdot \frac{1}{x}\right)} \]
    3. un-div-invN/A

      \[\leadsto \log \color{blue}{\left(\frac{\sqrt{1 - x \cdot x} + 1}{x}\right)} \]
    4. /-lowering-/.f64N/A

      \[\leadsto \log \color{blue}{\left(\frac{\sqrt{1 - x \cdot x} + 1}{x}\right)} \]
    5. +-commutativeN/A

      \[\leadsto \log \left(\frac{\color{blue}{1 + \sqrt{1 - x \cdot x}}}{x}\right) \]
    6. +-lowering-+.f64N/A

      \[\leadsto \log \left(\frac{\color{blue}{1 + \sqrt{1 - x \cdot x}}}{x}\right) \]
    7. rem-square-sqrtN/A

      \[\leadsto \log \left(\frac{1 + \sqrt{\color{blue}{\sqrt{1 - x \cdot x} \cdot \sqrt{1 - x \cdot x}}}}{x}\right) \]
    8. sqrt-lowering-sqrt.f64N/A

      \[\leadsto \log \left(\frac{1 + \color{blue}{\sqrt{\sqrt{1 - x \cdot x} \cdot \sqrt{1 - x \cdot x}}}}{x}\right) \]
    9. rem-square-sqrtN/A

      \[\leadsto \log \left(\frac{1 + \sqrt{\color{blue}{1 - x \cdot x}}}{x}\right) \]
    10. --lowering--.f64N/A

      \[\leadsto \log \left(\frac{1 + \sqrt{\color{blue}{1 - x \cdot x}}}{x}\right) \]
    11. *-lowering-*.f64100.0

      \[\leadsto \log \left(\frac{1 + \sqrt{1 - \color{blue}{x \cdot x}}}{x}\right) \]
  4. Applied egg-rr100.0%

    \[\leadsto \log \color{blue}{\left(\frac{1 + \sqrt{1 - x \cdot x}}{x}\right)} \]
  5. Add Preprocessing

Alternative 2: 99.6% accurate, 1.1× speedup?

\[\begin{array}{l} \\ -\log \left(\frac{1}{\mathsf{fma}\left(x, -0.5, \frac{2}{x}\right)}\right) \end{array} \]
(FPCore (x) :precision binary64 (- (log (/ 1.0 (fma x -0.5 (/ 2.0 x))))))
double code(double x) {
	return -log((1.0 / fma(x, -0.5, (2.0 / x))));
}
function code(x)
	return Float64(-log(Float64(1.0 / fma(x, -0.5, Float64(2.0 / x)))))
end
code[x_] := (-N[Log[N[(1.0 / N[(x * -0.5 + N[(2.0 / x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]], $MachinePrecision])
\begin{array}{l}

\\
-\log \left(\frac{1}{\mathsf{fma}\left(x, -0.5, \frac{2}{x}\right)}\right)
\end{array}
Derivation
  1. Initial program 100.0%

    \[\log \left(\frac{1}{x} + \frac{\sqrt{1 - x \cdot x}}{x}\right) \]
  2. Add Preprocessing
  3. Taylor expanded in x around 0

    \[\leadsto \log \color{blue}{\left(\frac{2 + \frac{-1}{2} \cdot {x}^{2}}{x}\right)} \]
  4. Step-by-step derivation
    1. /-lowering-/.f64N/A

      \[\leadsto \log \color{blue}{\left(\frac{2 + \frac{-1}{2} \cdot {x}^{2}}{x}\right)} \]
    2. +-commutativeN/A

      \[\leadsto \log \left(\frac{\color{blue}{\frac{-1}{2} \cdot {x}^{2} + 2}}{x}\right) \]
    3. unpow2N/A

      \[\leadsto \log \left(\frac{\frac{-1}{2} \cdot \color{blue}{\left(x \cdot x\right)} + 2}{x}\right) \]
    4. associate-*r*N/A

      \[\leadsto \log \left(\frac{\color{blue}{\left(\frac{-1}{2} \cdot x\right) \cdot x} + 2}{x}\right) \]
    5. *-commutativeN/A

      \[\leadsto \log \left(\frac{\color{blue}{x \cdot \left(\frac{-1}{2} \cdot x\right)} + 2}{x}\right) \]
    6. metadata-evalN/A

      \[\leadsto \log \left(\frac{x \cdot \left(\color{blue}{\left(\mathsf{neg}\left(\frac{1}{2}\right)\right)} \cdot x\right) + 2}{x}\right) \]
    7. distribute-lft-neg-inN/A

      \[\leadsto \log \left(\frac{x \cdot \color{blue}{\left(\mathsf{neg}\left(\frac{1}{2} \cdot x\right)\right)} + 2}{x}\right) \]
    8. *-commutativeN/A

      \[\leadsto \log \left(\frac{x \cdot \left(\mathsf{neg}\left(\color{blue}{x \cdot \frac{1}{2}}\right)\right) + 2}{x}\right) \]
    9. accelerator-lowering-fma.f64N/A

      \[\leadsto \log \left(\frac{\color{blue}{\mathsf{fma}\left(x, \mathsf{neg}\left(x \cdot \frac{1}{2}\right), 2\right)}}{x}\right) \]
    10. distribute-rgt-neg-inN/A

      \[\leadsto \log \left(\frac{\mathsf{fma}\left(x, \color{blue}{x \cdot \left(\mathsf{neg}\left(\frac{1}{2}\right)\right)}, 2\right)}{x}\right) \]
    11. metadata-evalN/A

      \[\leadsto \log \left(\frac{\mathsf{fma}\left(x, x \cdot \color{blue}{\frac{-1}{2}}, 2\right)}{x}\right) \]
    12. *-lowering-*.f6499.4

      \[\leadsto \log \left(\frac{\mathsf{fma}\left(x, \color{blue}{x \cdot -0.5}, 2\right)}{x}\right) \]
  5. Simplified99.4%

    \[\leadsto \log \color{blue}{\left(\frac{\mathsf{fma}\left(x, x \cdot -0.5, 2\right)}{x}\right)} \]
  6. Step-by-step derivation
    1. frac-2negN/A

      \[\leadsto \log \color{blue}{\left(\frac{\mathsf{neg}\left(\left(x \cdot \left(x \cdot \frac{-1}{2}\right) + 2\right)\right)}{\mathsf{neg}\left(x\right)}\right)} \]
    2. div-invN/A

      \[\leadsto \log \color{blue}{\left(\left(\mathsf{neg}\left(\left(x \cdot \left(x \cdot \frac{-1}{2}\right) + 2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right)} \]
    3. *-lowering-*.f64N/A

      \[\leadsto \log \color{blue}{\left(\left(\mathsf{neg}\left(\left(x \cdot \left(x \cdot \frac{-1}{2}\right) + 2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right)} \]
    4. distribute-neg-inN/A

      \[\leadsto \log \left(\color{blue}{\left(\left(\mathsf{neg}\left(x \cdot \left(x \cdot \frac{-1}{2}\right)\right)\right) + \left(\mathsf{neg}\left(2\right)\right)\right)} \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    5. distribute-rgt-neg-inN/A

      \[\leadsto \log \left(\left(\color{blue}{x \cdot \left(\mathsf{neg}\left(x \cdot \frac{-1}{2}\right)\right)} + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    6. metadata-evalN/A

      \[\leadsto \log \left(\left(x \cdot \left(\mathsf{neg}\left(x \cdot \color{blue}{\left(\mathsf{neg}\left(\frac{1}{2}\right)\right)}\right)\right) + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    7. distribute-rgt-neg-inN/A

      \[\leadsto \log \left(\left(x \cdot \left(\mathsf{neg}\left(\color{blue}{\left(\mathsf{neg}\left(x \cdot \frac{1}{2}\right)\right)}\right)\right) + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    8. metadata-evalN/A

      \[\leadsto \log \left(\left(x \cdot \left(\mathsf{neg}\left(\left(\mathsf{neg}\left(x \cdot \color{blue}{\frac{1}{2}}\right)\right)\right)\right) + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    9. div-invN/A

      \[\leadsto \log \left(\left(x \cdot \left(\mathsf{neg}\left(\left(\mathsf{neg}\left(\color{blue}{\frac{x}{2}}\right)\right)\right)\right) + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    10. distribute-neg-frac2N/A

      \[\leadsto \log \left(\left(x \cdot \left(\mathsf{neg}\left(\color{blue}{\frac{x}{\mathsf{neg}\left(2\right)}}\right)\right) + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    11. distribute-frac-negN/A

      \[\leadsto \log \left(\left(x \cdot \color{blue}{\frac{\mathsf{neg}\left(x\right)}{\mathsf{neg}\left(2\right)}} + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    12. frac-2negN/A

      \[\leadsto \log \left(\left(x \cdot \color{blue}{\frac{x}{2}} + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    13. accelerator-lowering-fma.f64N/A

      \[\leadsto \log \left(\color{blue}{\mathsf{fma}\left(x, \frac{x}{2}, \mathsf{neg}\left(2\right)\right)} \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    14. div-invN/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, \color{blue}{x \cdot \frac{1}{2}}, \mathsf{neg}\left(2\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    15. metadata-evalN/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, x \cdot \color{blue}{\frac{1}{2}}, \mathsf{neg}\left(2\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    16. *-lowering-*.f64N/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, \color{blue}{x \cdot \frac{1}{2}}, \mathsf{neg}\left(2\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    17. metadata-evalN/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, x \cdot \frac{1}{2}, \color{blue}{-2}\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    18. frac-2negN/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, x \cdot \frac{1}{2}, -2\right) \cdot \color{blue}{\frac{\mathsf{neg}\left(1\right)}{\mathsf{neg}\left(\left(\mathsf{neg}\left(x\right)\right)\right)}}\right) \]
    19. metadata-evalN/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, x \cdot \frac{1}{2}, -2\right) \cdot \frac{\color{blue}{-1}}{\mathsf{neg}\left(\left(\mathsf{neg}\left(x\right)\right)\right)}\right) \]
    20. remove-double-negN/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, x \cdot \frac{1}{2}, -2\right) \cdot \frac{-1}{\color{blue}{x}}\right) \]
    21. /-lowering-/.f6499.4

      \[\leadsto \log \left(\mathsf{fma}\left(x, x \cdot 0.5, -2\right) \cdot \color{blue}{\frac{-1}{x}}\right) \]
  7. Applied egg-rr99.4%

    \[\leadsto \log \color{blue}{\left(\mathsf{fma}\left(x, x \cdot 0.5, -2\right) \cdot \frac{-1}{x}\right)} \]
  8. Taylor expanded in x around inf

    \[\leadsto \log \color{blue}{\left(x \cdot \left(2 \cdot \frac{1}{{x}^{2}} - \frac{1}{2}\right)\right)} \]
  9. Step-by-step derivation
    1. sub-negN/A

      \[\leadsto \log \left(x \cdot \color{blue}{\left(2 \cdot \frac{1}{{x}^{2}} + \left(\mathsf{neg}\left(\frac{1}{2}\right)\right)\right)}\right) \]
    2. metadata-evalN/A

      \[\leadsto \log \left(x \cdot \left(2 \cdot \frac{1}{{x}^{2}} + \color{blue}{\frac{-1}{2}}\right)\right) \]
    3. +-commutativeN/A

      \[\leadsto \log \left(x \cdot \color{blue}{\left(\frac{-1}{2} + 2 \cdot \frac{1}{{x}^{2}}\right)}\right) \]
    4. distribute-lft-inN/A

      \[\leadsto \log \color{blue}{\left(x \cdot \frac{-1}{2} + x \cdot \left(2 \cdot \frac{1}{{x}^{2}}\right)\right)} \]
    5. associate-*r/N/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + x \cdot \color{blue}{\frac{2 \cdot 1}{{x}^{2}}}\right) \]
    6. metadata-evalN/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + x \cdot \frac{\color{blue}{2}}{{x}^{2}}\right) \]
    7. associate-*r/N/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + \color{blue}{\frac{x \cdot 2}{{x}^{2}}}\right) \]
    8. unpow2N/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + \frac{x \cdot 2}{\color{blue}{x \cdot x}}\right) \]
    9. times-fracN/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + \color{blue}{\frac{x}{x} \cdot \frac{2}{x}}\right) \]
    10. *-inversesN/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + \color{blue}{1} \cdot \frac{2}{x}\right) \]
    11. *-lft-identityN/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + \color{blue}{\frac{2}{x}}\right) \]
    12. accelerator-lowering-fma.f64N/A

      \[\leadsto \log \color{blue}{\left(\mathsf{fma}\left(x, \frac{-1}{2}, \frac{2}{x}\right)\right)} \]
    13. /-lowering-/.f6499.4

      \[\leadsto \log \left(\mathsf{fma}\left(x, -0.5, \color{blue}{\frac{2}{x}}\right)\right) \]
  10. Simplified99.4%

    \[\leadsto \log \color{blue}{\left(\mathsf{fma}\left(x, -0.5, \frac{2}{x}\right)\right)} \]
  11. Step-by-step derivation
    1. flip-+N/A

      \[\leadsto \log \color{blue}{\left(\frac{\left(x \cdot \frac{-1}{2}\right) \cdot \left(x \cdot \frac{-1}{2}\right) - \frac{2}{x} \cdot \frac{2}{x}}{x \cdot \frac{-1}{2} - \frac{2}{x}}\right)} \]
    2. clear-numN/A

      \[\leadsto \log \color{blue}{\left(\frac{1}{\frac{x \cdot \frac{-1}{2} - \frac{2}{x}}{\left(x \cdot \frac{-1}{2}\right) \cdot \left(x \cdot \frac{-1}{2}\right) - \frac{2}{x} \cdot \frac{2}{x}}}\right)} \]
    3. log-recN/A

      \[\leadsto \color{blue}{\mathsf{neg}\left(\log \left(\frac{x \cdot \frac{-1}{2} - \frac{2}{x}}{\left(x \cdot \frac{-1}{2}\right) \cdot \left(x \cdot \frac{-1}{2}\right) - \frac{2}{x} \cdot \frac{2}{x}}\right)\right)} \]
    4. neg-lowering-neg.f64N/A

      \[\leadsto \color{blue}{\mathsf{neg}\left(\log \left(\frac{x \cdot \frac{-1}{2} - \frac{2}{x}}{\left(x \cdot \frac{-1}{2}\right) \cdot \left(x \cdot \frac{-1}{2}\right) - \frac{2}{x} \cdot \frac{2}{x}}\right)\right)} \]
    5. log-lowering-log.f64N/A

      \[\leadsto \mathsf{neg}\left(\color{blue}{\log \left(\frac{x \cdot \frac{-1}{2} - \frac{2}{x}}{\left(x \cdot \frac{-1}{2}\right) \cdot \left(x \cdot \frac{-1}{2}\right) - \frac{2}{x} \cdot \frac{2}{x}}\right)}\right) \]
    6. clear-numN/A

      \[\leadsto \mathsf{neg}\left(\log \color{blue}{\left(\frac{1}{\frac{\left(x \cdot \frac{-1}{2}\right) \cdot \left(x \cdot \frac{-1}{2}\right) - \frac{2}{x} \cdot \frac{2}{x}}{x \cdot \frac{-1}{2} - \frac{2}{x}}}\right)}\right) \]
    7. flip-+N/A

      \[\leadsto \mathsf{neg}\left(\log \left(\frac{1}{\color{blue}{x \cdot \frac{-1}{2} + \frac{2}{x}}}\right)\right) \]
    8. /-lowering-/.f64N/A

      \[\leadsto \mathsf{neg}\left(\log \color{blue}{\left(\frac{1}{x \cdot \frac{-1}{2} + \frac{2}{x}}\right)}\right) \]
    9. accelerator-lowering-fma.f64N/A

      \[\leadsto \mathsf{neg}\left(\log \left(\frac{1}{\color{blue}{\mathsf{fma}\left(x, \frac{-1}{2}, \frac{2}{x}\right)}}\right)\right) \]
    10. /-lowering-/.f6499.4

      \[\leadsto -\log \left(\frac{1}{\mathsf{fma}\left(x, -0.5, \color{blue}{\frac{2}{x}}\right)}\right) \]
  12. Applied egg-rr99.4%

    \[\leadsto \color{blue}{-\log \left(\frac{1}{\mathsf{fma}\left(x, -0.5, \frac{2}{x}\right)}\right)} \]
  13. Add Preprocessing

Alternative 3: 99.6% accurate, 1.2× speedup?

\[\begin{array}{l} \\ \log \left(\mathsf{fma}\left(x, -0.5, \frac{2}{x}\right)\right) \end{array} \]
(FPCore (x) :precision binary64 (log (fma x -0.5 (/ 2.0 x))))
double code(double x) {
	return log(fma(x, -0.5, (2.0 / x)));
}
function code(x)
	return log(fma(x, -0.5, Float64(2.0 / x)))
end
code[x_] := N[Log[N[(x * -0.5 + N[(2.0 / x), $MachinePrecision]), $MachinePrecision]], $MachinePrecision]
\begin{array}{l}

\\
\log \left(\mathsf{fma}\left(x, -0.5, \frac{2}{x}\right)\right)
\end{array}
Derivation
  1. Initial program 100.0%

    \[\log \left(\frac{1}{x} + \frac{\sqrt{1 - x \cdot x}}{x}\right) \]
  2. Add Preprocessing
  3. Taylor expanded in x around 0

    \[\leadsto \log \color{blue}{\left(\frac{2 + \frac{-1}{2} \cdot {x}^{2}}{x}\right)} \]
  4. Step-by-step derivation
    1. /-lowering-/.f64N/A

      \[\leadsto \log \color{blue}{\left(\frac{2 + \frac{-1}{2} \cdot {x}^{2}}{x}\right)} \]
    2. +-commutativeN/A

      \[\leadsto \log \left(\frac{\color{blue}{\frac{-1}{2} \cdot {x}^{2} + 2}}{x}\right) \]
    3. unpow2N/A

      \[\leadsto \log \left(\frac{\frac{-1}{2} \cdot \color{blue}{\left(x \cdot x\right)} + 2}{x}\right) \]
    4. associate-*r*N/A

      \[\leadsto \log \left(\frac{\color{blue}{\left(\frac{-1}{2} \cdot x\right) \cdot x} + 2}{x}\right) \]
    5. *-commutativeN/A

      \[\leadsto \log \left(\frac{\color{blue}{x \cdot \left(\frac{-1}{2} \cdot x\right)} + 2}{x}\right) \]
    6. metadata-evalN/A

      \[\leadsto \log \left(\frac{x \cdot \left(\color{blue}{\left(\mathsf{neg}\left(\frac{1}{2}\right)\right)} \cdot x\right) + 2}{x}\right) \]
    7. distribute-lft-neg-inN/A

      \[\leadsto \log \left(\frac{x \cdot \color{blue}{\left(\mathsf{neg}\left(\frac{1}{2} \cdot x\right)\right)} + 2}{x}\right) \]
    8. *-commutativeN/A

      \[\leadsto \log \left(\frac{x \cdot \left(\mathsf{neg}\left(\color{blue}{x \cdot \frac{1}{2}}\right)\right) + 2}{x}\right) \]
    9. accelerator-lowering-fma.f64N/A

      \[\leadsto \log \left(\frac{\color{blue}{\mathsf{fma}\left(x, \mathsf{neg}\left(x \cdot \frac{1}{2}\right), 2\right)}}{x}\right) \]
    10. distribute-rgt-neg-inN/A

      \[\leadsto \log \left(\frac{\mathsf{fma}\left(x, \color{blue}{x \cdot \left(\mathsf{neg}\left(\frac{1}{2}\right)\right)}, 2\right)}{x}\right) \]
    11. metadata-evalN/A

      \[\leadsto \log \left(\frac{\mathsf{fma}\left(x, x \cdot \color{blue}{\frac{-1}{2}}, 2\right)}{x}\right) \]
    12. *-lowering-*.f6499.4

      \[\leadsto \log \left(\frac{\mathsf{fma}\left(x, \color{blue}{x \cdot -0.5}, 2\right)}{x}\right) \]
  5. Simplified99.4%

    \[\leadsto \log \color{blue}{\left(\frac{\mathsf{fma}\left(x, x \cdot -0.5, 2\right)}{x}\right)} \]
  6. Step-by-step derivation
    1. frac-2negN/A

      \[\leadsto \log \color{blue}{\left(\frac{\mathsf{neg}\left(\left(x \cdot \left(x \cdot \frac{-1}{2}\right) + 2\right)\right)}{\mathsf{neg}\left(x\right)}\right)} \]
    2. div-invN/A

      \[\leadsto \log \color{blue}{\left(\left(\mathsf{neg}\left(\left(x \cdot \left(x \cdot \frac{-1}{2}\right) + 2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right)} \]
    3. *-lowering-*.f64N/A

      \[\leadsto \log \color{blue}{\left(\left(\mathsf{neg}\left(\left(x \cdot \left(x \cdot \frac{-1}{2}\right) + 2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right)} \]
    4. distribute-neg-inN/A

      \[\leadsto \log \left(\color{blue}{\left(\left(\mathsf{neg}\left(x \cdot \left(x \cdot \frac{-1}{2}\right)\right)\right) + \left(\mathsf{neg}\left(2\right)\right)\right)} \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    5. distribute-rgt-neg-inN/A

      \[\leadsto \log \left(\left(\color{blue}{x \cdot \left(\mathsf{neg}\left(x \cdot \frac{-1}{2}\right)\right)} + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    6. metadata-evalN/A

      \[\leadsto \log \left(\left(x \cdot \left(\mathsf{neg}\left(x \cdot \color{blue}{\left(\mathsf{neg}\left(\frac{1}{2}\right)\right)}\right)\right) + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    7. distribute-rgt-neg-inN/A

      \[\leadsto \log \left(\left(x \cdot \left(\mathsf{neg}\left(\color{blue}{\left(\mathsf{neg}\left(x \cdot \frac{1}{2}\right)\right)}\right)\right) + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    8. metadata-evalN/A

      \[\leadsto \log \left(\left(x \cdot \left(\mathsf{neg}\left(\left(\mathsf{neg}\left(x \cdot \color{blue}{\frac{1}{2}}\right)\right)\right)\right) + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    9. div-invN/A

      \[\leadsto \log \left(\left(x \cdot \left(\mathsf{neg}\left(\left(\mathsf{neg}\left(\color{blue}{\frac{x}{2}}\right)\right)\right)\right) + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    10. distribute-neg-frac2N/A

      \[\leadsto \log \left(\left(x \cdot \left(\mathsf{neg}\left(\color{blue}{\frac{x}{\mathsf{neg}\left(2\right)}}\right)\right) + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    11. distribute-frac-negN/A

      \[\leadsto \log \left(\left(x \cdot \color{blue}{\frac{\mathsf{neg}\left(x\right)}{\mathsf{neg}\left(2\right)}} + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    12. frac-2negN/A

      \[\leadsto \log \left(\left(x \cdot \color{blue}{\frac{x}{2}} + \left(\mathsf{neg}\left(2\right)\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    13. accelerator-lowering-fma.f64N/A

      \[\leadsto \log \left(\color{blue}{\mathsf{fma}\left(x, \frac{x}{2}, \mathsf{neg}\left(2\right)\right)} \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    14. div-invN/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, \color{blue}{x \cdot \frac{1}{2}}, \mathsf{neg}\left(2\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    15. metadata-evalN/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, x \cdot \color{blue}{\frac{1}{2}}, \mathsf{neg}\left(2\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    16. *-lowering-*.f64N/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, \color{blue}{x \cdot \frac{1}{2}}, \mathsf{neg}\left(2\right)\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    17. metadata-evalN/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, x \cdot \frac{1}{2}, \color{blue}{-2}\right) \cdot \frac{1}{\mathsf{neg}\left(x\right)}\right) \]
    18. frac-2negN/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, x \cdot \frac{1}{2}, -2\right) \cdot \color{blue}{\frac{\mathsf{neg}\left(1\right)}{\mathsf{neg}\left(\left(\mathsf{neg}\left(x\right)\right)\right)}}\right) \]
    19. metadata-evalN/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, x \cdot \frac{1}{2}, -2\right) \cdot \frac{\color{blue}{-1}}{\mathsf{neg}\left(\left(\mathsf{neg}\left(x\right)\right)\right)}\right) \]
    20. remove-double-negN/A

      \[\leadsto \log \left(\mathsf{fma}\left(x, x \cdot \frac{1}{2}, -2\right) \cdot \frac{-1}{\color{blue}{x}}\right) \]
    21. /-lowering-/.f6499.4

      \[\leadsto \log \left(\mathsf{fma}\left(x, x \cdot 0.5, -2\right) \cdot \color{blue}{\frac{-1}{x}}\right) \]
  7. Applied egg-rr99.4%

    \[\leadsto \log \color{blue}{\left(\mathsf{fma}\left(x, x \cdot 0.5, -2\right) \cdot \frac{-1}{x}\right)} \]
  8. Taylor expanded in x around inf

    \[\leadsto \log \color{blue}{\left(x \cdot \left(2 \cdot \frac{1}{{x}^{2}} - \frac{1}{2}\right)\right)} \]
  9. Step-by-step derivation
    1. sub-negN/A

      \[\leadsto \log \left(x \cdot \color{blue}{\left(2 \cdot \frac{1}{{x}^{2}} + \left(\mathsf{neg}\left(\frac{1}{2}\right)\right)\right)}\right) \]
    2. metadata-evalN/A

      \[\leadsto \log \left(x \cdot \left(2 \cdot \frac{1}{{x}^{2}} + \color{blue}{\frac{-1}{2}}\right)\right) \]
    3. +-commutativeN/A

      \[\leadsto \log \left(x \cdot \color{blue}{\left(\frac{-1}{2} + 2 \cdot \frac{1}{{x}^{2}}\right)}\right) \]
    4. distribute-lft-inN/A

      \[\leadsto \log \color{blue}{\left(x \cdot \frac{-1}{2} + x \cdot \left(2 \cdot \frac{1}{{x}^{2}}\right)\right)} \]
    5. associate-*r/N/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + x \cdot \color{blue}{\frac{2 \cdot 1}{{x}^{2}}}\right) \]
    6. metadata-evalN/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + x \cdot \frac{\color{blue}{2}}{{x}^{2}}\right) \]
    7. associate-*r/N/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + \color{blue}{\frac{x \cdot 2}{{x}^{2}}}\right) \]
    8. unpow2N/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + \frac{x \cdot 2}{\color{blue}{x \cdot x}}\right) \]
    9. times-fracN/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + \color{blue}{\frac{x}{x} \cdot \frac{2}{x}}\right) \]
    10. *-inversesN/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + \color{blue}{1} \cdot \frac{2}{x}\right) \]
    11. *-lft-identityN/A

      \[\leadsto \log \left(x \cdot \frac{-1}{2} + \color{blue}{\frac{2}{x}}\right) \]
    12. accelerator-lowering-fma.f64N/A

      \[\leadsto \log \color{blue}{\left(\mathsf{fma}\left(x, \frac{-1}{2}, \frac{2}{x}\right)\right)} \]
    13. /-lowering-/.f6499.4

      \[\leadsto \log \left(\mathsf{fma}\left(x, -0.5, \color{blue}{\frac{2}{x}}\right)\right) \]
  10. Simplified99.4%

    \[\leadsto \log \color{blue}{\left(\mathsf{fma}\left(x, -0.5, \frac{2}{x}\right)\right)} \]
  11. Add Preprocessing

Alternative 4: 99.3% accurate, 1.3× speedup?

\[\begin{array}{l} \\ -\log \left(x \cdot 0.5\right) \end{array} \]
(FPCore (x) :precision binary64 (- (log (* x 0.5))))
double code(double x) {
	return -log((x * 0.5));
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = -log((x * 0.5d0))
end function
public static double code(double x) {
	return -Math.log((x * 0.5));
}
def code(x):
	return -math.log((x * 0.5))
function code(x)
	return Float64(-log(Float64(x * 0.5)))
end
function tmp = code(x)
	tmp = -log((x * 0.5));
end
code[x_] := (-N[Log[N[(x * 0.5), $MachinePrecision]], $MachinePrecision])
\begin{array}{l}

\\
-\log \left(x \cdot 0.5\right)
\end{array}
Derivation
  1. Initial program 100.0%

    \[\log \left(\frac{1}{x} + \frac{\sqrt{1 - x \cdot x}}{x}\right) \]
  2. Add Preprocessing
  3. Taylor expanded in x around 0

    \[\leadsto \log \color{blue}{\left(\frac{2}{x}\right)} \]
  4. Step-by-step derivation
    1. /-lowering-/.f6498.9

      \[\leadsto \log \color{blue}{\left(\frac{2}{x}\right)} \]
  5. Simplified98.9%

    \[\leadsto \log \color{blue}{\left(\frac{2}{x}\right)} \]
  6. Step-by-step derivation
    1. clear-numN/A

      \[\leadsto \log \color{blue}{\left(\frac{1}{\frac{x}{2}}\right)} \]
    2. log-recN/A

      \[\leadsto \color{blue}{\mathsf{neg}\left(\log \left(\frac{x}{2}\right)\right)} \]
    3. neg-lowering-neg.f64N/A

      \[\leadsto \color{blue}{\mathsf{neg}\left(\log \left(\frac{x}{2}\right)\right)} \]
    4. log-lowering-log.f64N/A

      \[\leadsto \mathsf{neg}\left(\color{blue}{\log \left(\frac{x}{2}\right)}\right) \]
    5. div-invN/A

      \[\leadsto \mathsf{neg}\left(\log \color{blue}{\left(x \cdot \frac{1}{2}\right)}\right) \]
    6. metadata-evalN/A

      \[\leadsto \mathsf{neg}\left(\log \left(x \cdot \color{blue}{\frac{1}{2}}\right)\right) \]
    7. *-lowering-*.f6498.9

      \[\leadsto -\log \color{blue}{\left(x \cdot 0.5\right)} \]
  7. Applied egg-rr98.9%

    \[\leadsto \color{blue}{-\log \left(x \cdot 0.5\right)} \]
  8. Add Preprocessing

Alternative 5: 0.0% accurate, 1.4× speedup?

\[\begin{array}{l} \\ 0.5 \cdot \log -1 \end{array} \]
(FPCore (x) :precision binary64 (* 0.5 (log -1.0)))
double code(double x) {
	return 0.5 * log(-1.0);
}
real(8) function code(x)
    real(8), intent (in) :: x
    code = 0.5d0 * log((-1.0d0))
end function
public static double code(double x) {
	return 0.5 * Math.log(-1.0);
}
def code(x):
	return 0.5 * math.log(-1.0)
function code(x)
	return Float64(0.5 * log(-1.0))
end
function tmp = code(x)
	tmp = 0.5 * log(-1.0);
end
code[x_] := N[(0.5 * N[Log[-1.0], $MachinePrecision]), $MachinePrecision]
\begin{array}{l}

\\
0.5 \cdot \log -1
\end{array}
Derivation
  1. Initial program 100.0%

    \[\log \left(\frac{1}{x} + \frac{\sqrt{1 - x \cdot x}}{x}\right) \]
  2. Add Preprocessing
  3. Taylor expanded in x around inf

    \[\leadsto \log \color{blue}{\left(\sqrt{-1}\right)} \]
  4. Step-by-step derivation
    1. sqrt-lowering-sqrt.f640.0

      \[\leadsto \log \color{blue}{\left(\sqrt{-1}\right)} \]
  5. Simplified0.0%

    \[\leadsto \log \color{blue}{\left(\sqrt{-1}\right)} \]
  6. Step-by-step derivation
    1. pow1/2N/A

      \[\leadsto \log \color{blue}{\left({-1}^{\frac{1}{2}}\right)} \]
    2. pow-to-expN/A

      \[\leadsto \log \color{blue}{\left(e^{\log -1 \cdot \frac{1}{2}}\right)} \]
    3. rem-log-expN/A

      \[\leadsto \color{blue}{\log -1 \cdot \frac{1}{2}} \]
    4. *-lowering-*.f64N/A

      \[\leadsto \color{blue}{\log -1 \cdot \frac{1}{2}} \]
    5. log-lowering-log.f640.0

      \[\leadsto \color{blue}{\log -1} \cdot 0.5 \]
  7. Applied egg-rr0.0%

    \[\leadsto \color{blue}{\log -1 \cdot 0.5} \]
  8. Final simplification0.0%

    \[\leadsto 0.5 \cdot \log -1 \]
  9. Add Preprocessing

Reproduce

?
herbie shell --seed 2024205 
(FPCore (x)
  :name "Hyperbolic arc-(co)secant"
  :precision binary64
  (log (+ (/ 1.0 x) (/ (sqrt (- 1.0 (* x x))) x))))