Average Error: 0.3 → 0.2
Time: 8.3s
Precision: binary64
Cost: 13184
\[0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\left(x \cdot x\right) \cdot x\right) \]
\[\mathsf{fma}\left(0.954929658551372, x, {x}^{3} \cdot -0.12900613773279798\right) \]
(FPCore (x)
 :precision binary64
 (- (* 0.954929658551372 x) (* 0.12900613773279798 (* (* x x) x))))
(FPCore (x)
 :precision binary64
 (fma 0.954929658551372 x (* (pow x 3.0) -0.12900613773279798)))
double code(double x) {
	return (0.954929658551372 * x) - (0.12900613773279798 * ((x * x) * x));
}
double code(double x) {
	return fma(0.954929658551372, x, (pow(x, 3.0) * -0.12900613773279798));
}
function code(x)
	return Float64(Float64(0.954929658551372 * x) - Float64(0.12900613773279798 * Float64(Float64(x * x) * x)))
end
function code(x)
	return fma(0.954929658551372, x, Float64((x ^ 3.0) * -0.12900613773279798))
end
code[x_] := N[(N[(0.954929658551372 * x), $MachinePrecision] - N[(0.12900613773279798 * N[(N[(x * x), $MachinePrecision] * x), $MachinePrecision]), $MachinePrecision]), $MachinePrecision]
code[x_] := N[(0.954929658551372 * x + N[(N[Power[x, 3.0], $MachinePrecision] * -0.12900613773279798), $MachinePrecision]), $MachinePrecision]
0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\left(x \cdot x\right) \cdot x\right)
\mathsf{fma}\left(0.954929658551372, x, {x}^{3} \cdot -0.12900613773279798\right)

Error

Derivation

  1. Initial program 0.3

    \[0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\left(x \cdot x\right) \cdot x\right) \]
  2. Simplified0.2

    \[\leadsto \color{blue}{\mathsf{fma}\left(0.954929658551372, x, {x}^{3} \cdot -0.12900613773279798\right)} \]
    Proof

    [Start]0.3

    \[ 0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\left(x \cdot x\right) \cdot x\right) \]

    fma-neg [=>]0.3

    \[ \color{blue}{\mathsf{fma}\left(0.954929658551372, x, -0.12900613773279798 \cdot \left(\left(x \cdot x\right) \cdot x\right)\right)} \]

    distribute-lft-neg-in [=>]0.3

    \[ \mathsf{fma}\left(0.954929658551372, x, \color{blue}{\left(-0.12900613773279798\right) \cdot \left(\left(x \cdot x\right) \cdot x\right)}\right) \]

    *-commutative [=>]0.3

    \[ \mathsf{fma}\left(0.954929658551372, x, \color{blue}{\left(\left(x \cdot x\right) \cdot x\right) \cdot \left(-0.12900613773279798\right)}\right) \]

    unpow3 [<=]0.2

    \[ \mathsf{fma}\left(0.954929658551372, x, \color{blue}{{x}^{3}} \cdot \left(-0.12900613773279798\right)\right) \]

    metadata-eval [=>]0.2

    \[ \mathsf{fma}\left(0.954929658551372, x, {x}^{3} \cdot \color{blue}{-0.12900613773279798}\right) \]
  3. Final simplification0.2

    \[\leadsto \mathsf{fma}\left(0.954929658551372, x, {x}^{3} \cdot -0.12900613773279798\right) \]

Alternatives

Alternative 1
Error1.2
Cost713
\[\begin{array}{l} \mathbf{if}\;x \leq -2.7 \lor \neg \left(x \leq 2.7\right):\\ \;\;\;\;x \cdot \left(-0.12900613773279798 \cdot \left(x \cdot x\right)\right)\\ \mathbf{else}:\\ \;\;\;\;0.954929658551372 \cdot x\\ \end{array} \]
Alternative 2
Error1.2
Cost712
\[\begin{array}{l} \mathbf{if}\;x \leq -2.7:\\ \;\;\;\;\left(x \cdot x\right) \cdot \left(x \cdot -0.12900613773279798\right)\\ \mathbf{elif}\;x \leq 2.7:\\ \;\;\;\;0.954929658551372 \cdot x\\ \mathbf{else}:\\ \;\;\;\;x \cdot \left(-0.12900613773279798 \cdot \left(x \cdot x\right)\right)\\ \end{array} \]
Alternative 3
Error0.3
Cost704
\[0.954929658551372 \cdot x + -0.12900613773279798 \cdot \left(x \cdot \left(x \cdot x\right)\right) \]
Alternative 4
Error0.2
Cost576
\[x \cdot \left(0.954929658551372 + -0.12900613773279798 \cdot \left(x \cdot x\right)\right) \]
Alternative 5
Error16.8
Cost192
\[0.954929658551372 \cdot x \]

Error

Reproduce

herbie shell --seed 2022356 
(FPCore (x)
  :name "Rosa's Benchmark"
  :precision binary64
  (- (* 0.954929658551372 x) (* 0.12900613773279798 (* (* x x) x))))