Average Error: 0.2 → 0.1
Time: 1.9s
Precision: binary64
\[\left(x \cdot x\right) \cdot \left(3 - x \cdot 2\right) \]
\[\mathsf{fma}\left(3, {x}^{2}, -2 \cdot {x}^{3}\right) \]
(FPCore (x) :precision binary64 (* (* x x) (- 3.0 (* x 2.0))))
(FPCore (x) :precision binary64 (fma 3.0 (pow x 2.0) (* -2.0 (pow x 3.0))))
double code(double x) {
	return (x * x) * (3.0 - (x * 2.0));
}
double code(double x) {
	return fma(3.0, pow(x, 2.0), (-2.0 * pow(x, 3.0)));
}
function code(x)
	return Float64(Float64(x * x) * Float64(3.0 - Float64(x * 2.0)))
end
function code(x)
	return fma(3.0, (x ^ 2.0), Float64(-2.0 * (x ^ 3.0)))
end
code[x_] := N[(N[(x * x), $MachinePrecision] * N[(3.0 - N[(x * 2.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]
code[x_] := N[(3.0 * N[Power[x, 2.0], $MachinePrecision] + N[(-2.0 * N[Power[x, 3.0], $MachinePrecision]), $MachinePrecision]), $MachinePrecision]
\left(x \cdot x\right) \cdot \left(3 - x \cdot 2\right)
\mathsf{fma}\left(3, {x}^{2}, -2 \cdot {x}^{3}\right)

Error

Bits error versus x

Target

Original0.2
Target0.2
Herbie0.1
\[x \cdot \left(x \cdot \left(3 - x \cdot 2\right)\right) \]

Derivation

  1. Initial program 0.2

    \[\left(x \cdot x\right) \cdot \left(3 - x \cdot 2\right) \]
  2. Simplified0.2

    \[\leadsto \color{blue}{\left(x \cdot x\right) \cdot \mathsf{fma}\left(x, -2, 3\right)} \]
  3. Taylor expanded in x around 0 0.1

    \[\leadsto \color{blue}{3 \cdot {x}^{2} - 2 \cdot {x}^{3}} \]
  4. Applied fma-neg_binary640.1

    \[\leadsto \color{blue}{\mathsf{fma}\left(3, {x}^{2}, -2 \cdot {x}^{3}\right)} \]
  5. Simplified0.1

    \[\leadsto \mathsf{fma}\left(3, {x}^{2}, \color{blue}{-2 \cdot {x}^{3}}\right) \]
  6. Final simplification0.1

    \[\leadsto \mathsf{fma}\left(3, {x}^{2}, -2 \cdot {x}^{3}\right) \]

Reproduce

herbie shell --seed 2022131 
(FPCore (x)
  :name "Data.Spline.Key:interpolateKeys from smoothie-0.4.0.2"
  :precision binary64

  :herbie-target
  (* x (* x (- 3.0 (* x 2.0))))

  (* (* x x) (- 3.0 (* x 2.0))))