| Alternative 1 | |
|---|---|
| Accuracy | 99.7% |
| Cost | 6848 |
\[x \cdot \mathsf{fma}\left(x \cdot x, -0.12900613773279798, 0.954929658551372\right)
\]
(FPCore (x) :precision binary64 (- (* 0.954929658551372 x) (* 0.12900613773279798 (* (* x x) x))))
(FPCore (x) :precision binary64 (fma 0.954929658551372 x (* (pow x 3.0) -0.12900613773279798)))
double code(double x) {
return (0.954929658551372 * x) - (0.12900613773279798 * ((x * x) * x));
}
double code(double x) {
return fma(0.954929658551372, x, (pow(x, 3.0) * -0.12900613773279798));
}
function code(x) return Float64(Float64(0.954929658551372 * x) - Float64(0.12900613773279798 * Float64(Float64(x * x) * x))) end
function code(x) return fma(0.954929658551372, x, Float64((x ^ 3.0) * -0.12900613773279798)) end
code[x_] := N[(N[(0.954929658551372 * x), $MachinePrecision] - N[(0.12900613773279798 * N[(N[(x * x), $MachinePrecision] * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]
code[x_] := N[(0.954929658551372 * x + N[(N[Power[x, 3.0], $MachinePrecision] * -0.12900613773279798), $MachinePrecision]), $MachinePrecision]
0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\left(x \cdot x\right) \cdot x\right)
\mathsf{fma}\left(0.954929658551372, x, {x}^{3} \cdot -0.12900613773279798\right)
Initial program 99.6%
Simplified99.7%
[Start]99.6 | \[ 0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\left(x \cdot x\right) \cdot x\right)
\] |
|---|---|
fma-neg [=>]99.6 | \[ \color{blue}{\mathsf{fma}\left(0.954929658551372, x, -0.12900613773279798 \cdot \left(\left(x \cdot x\right) \cdot x\right)\right)}
\] |
distribute-lft-neg-in [=>]99.6 | \[ \mathsf{fma}\left(0.954929658551372, x, \color{blue}{\left(-0.12900613773279798\right) \cdot \left(\left(x \cdot x\right) \cdot x\right)}\right)
\] |
*-commutative [=>]99.6 | \[ \mathsf{fma}\left(0.954929658551372, x, \color{blue}{\left(\left(x \cdot x\right) \cdot x\right) \cdot \left(-0.12900613773279798\right)}\right)
\] |
unpow3 [<=]99.7 | \[ \mathsf{fma}\left(0.954929658551372, x, \color{blue}{{x}^{3}} \cdot \left(-0.12900613773279798\right)\right)
\] |
metadata-eval [=>]99.7 | \[ \mathsf{fma}\left(0.954929658551372, x, {x}^{3} \cdot \color{blue}{-0.12900613773279798}\right)
\] |
Final simplification99.7%
| Alternative 1 | |
|---|---|
| Accuracy | 99.7% |
| Cost | 6848 |
| Alternative 2 | |
|---|---|
| Accuracy | 98.1% |
| Cost | 713 |
| Alternative 3 | |
|---|---|
| Accuracy | 99.7% |
| Cost | 576 |
| Alternative 4 | |
|---|---|
| Accuracy | 4.4% |
| Cost | 192 |
| Alternative 5 | |
|---|---|
| Accuracy | 75.0% |
| Cost | 192 |
herbie shell --seed 2023133
(FPCore (x)
:name "Rosa's Benchmark"
:precision binary64
(- (* 0.954929658551372 x) (* 0.12900613773279798 (* (* x x) x))))