| Alternative 1 | |
|---|---|
| Accuracy | 99.1% |
| Cost | 192 |
\[-1 - x
\]
(FPCore (x) :precision binary64 (/ (log (- 1.0 x)) (log (+ 1.0 x))))
(FPCore (x) :precision binary64 (- (+ (* x (* x -0.5)) -1.0) x))
double code(double x) {
return log((1.0 - x)) / log((1.0 + x));
}
double code(double x) {
return ((x * (x * -0.5)) + -1.0) - x;
}
real(8) function code(x)
real(8), intent (in) :: x
code = log((1.0d0 - x)) / log((1.0d0 + x))
end function
real(8) function code(x)
real(8), intent (in) :: x
code = ((x * (x * (-0.5d0))) + (-1.0d0)) - x
end function
public static double code(double x) {
return Math.log((1.0 - x)) / Math.log((1.0 + x));
}
public static double code(double x) {
return ((x * (x * -0.5)) + -1.0) - x;
}
def code(x): return math.log((1.0 - x)) / math.log((1.0 + x))
def code(x): return ((x * (x * -0.5)) + -1.0) - x
function code(x) return Float64(log(Float64(1.0 - x)) / log(Float64(1.0 + x))) end
function code(x) return Float64(Float64(Float64(x * Float64(x * -0.5)) + -1.0) - x) end
function tmp = code(x) tmp = log((1.0 - x)) / log((1.0 + x)); end
function tmp = code(x) tmp = ((x * (x * -0.5)) + -1.0) - x; end
code[x_] := N[(N[Log[N[(1.0 - x), $MachinePrecision]], $MachinePrecision] / N[Log[N[(1.0 + x), $MachinePrecision]], $MachinePrecision]), $MachinePrecision]
code[x_] := N[(N[(N[(x * N[(x * -0.5), $MachinePrecision]), $MachinePrecision] + -1.0), $MachinePrecision] - x), $MachinePrecision]
\frac{\log \left(1 - x\right)}{\log \left(1 + x\right)}
\left(x \cdot \left(x \cdot -0.5\right) + -1\right) - x
Results
| Original | 4.0% |
|---|---|
| Target | 99.6% |
| Herbie | 99.4% |
Initial program 4.0%
Simplified100.0%
[Start]4.0 | \[ \frac{\log \left(1 - x\right)}{\log \left(1 + x\right)}
\] |
|---|---|
sub-neg [=>]4.0 | \[ \frac{\log \color{blue}{\left(1 + \left(-x\right)\right)}}{\log \left(1 + x\right)}
\] |
log1p-def [=>]3.0 | \[ \frac{\color{blue}{\mathsf{log1p}\left(-x\right)}}{\log \left(1 + x\right)}
\] |
log1p-def [=>]100.0 | \[ \frac{\mathsf{log1p}\left(-x\right)}{\color{blue}{\mathsf{log1p}\left(x\right)}}
\] |
Taylor expanded in x around 0 99.4%
Simplified99.4%
[Start]99.4 | \[ \left(-0.5 \cdot {x}^{2} + -1 \cdot x\right) - 1
\] |
|---|---|
associate--l+ [=>]99.4 | \[ \color{blue}{-0.5 \cdot {x}^{2} + \left(-1 \cdot x - 1\right)}
\] |
*-commutative [=>]99.4 | \[ \color{blue}{{x}^{2} \cdot -0.5} + \left(-1 \cdot x - 1\right)
\] |
unpow2 [=>]99.4 | \[ \color{blue}{\left(x \cdot x\right)} \cdot -0.5 + \left(-1 \cdot x - 1\right)
\] |
associate-*l* [=>]99.4 | \[ \color{blue}{x \cdot \left(x \cdot -0.5\right)} + \left(-1 \cdot x - 1\right)
\] |
fma-def [=>]99.4 | \[ \color{blue}{\mathsf{fma}\left(x, x \cdot -0.5, -1 \cdot x - 1\right)}
\] |
sub-neg [=>]99.4 | \[ \mathsf{fma}\left(x, x \cdot -0.5, \color{blue}{-1 \cdot x + \left(-1\right)}\right)
\] |
metadata-eval [=>]99.4 | \[ \mathsf{fma}\left(x, x \cdot -0.5, -1 \cdot x + \color{blue}{-1}\right)
\] |
+-commutative [=>]99.4 | \[ \mathsf{fma}\left(x, x \cdot -0.5, \color{blue}{-1 + -1 \cdot x}\right)
\] |
mul-1-neg [=>]99.4 | \[ \mathsf{fma}\left(x, x \cdot -0.5, -1 + \color{blue}{\left(-x\right)}\right)
\] |
unsub-neg [=>]99.4 | \[ \mathsf{fma}\left(x, x \cdot -0.5, \color{blue}{-1 - x}\right)
\] |
Applied egg-rr99.4%
[Start]99.4 | \[ \mathsf{fma}\left(x, x \cdot -0.5, -1 - x\right)
\] |
|---|---|
fma-udef [=>]99.4 | \[ \color{blue}{x \cdot \left(x \cdot -0.5\right) + \left(-1 - x\right)}
\] |
associate-+r- [=>]99.4 | \[ \color{blue}{\left(x \cdot \left(x \cdot -0.5\right) + -1\right) - x}
\] |
Final simplification99.4%
| Alternative 1 | |
|---|---|
| Accuracy | 99.1% |
| Cost | 192 |
| Alternative 2 | |
|---|---|
| Accuracy | 98.0% |
| Cost | 64 |
herbie shell --seed 2023152
(FPCore (x)
:name "qlog (example 3.10)"
:precision binary64
:pre (and (< -1.0 x) (< x 1.0))
:herbie-target
(- (+ (+ (+ 1.0 x) (/ (* x x) 2.0)) (* 0.4166666666666667 (pow x 3.0))))
(/ (log (- 1.0 x)) (log (+ 1.0 x))))