Average Error: 0.1 → 0.1
Time: 16.8s
Precision: 64
\[0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\left(x \cdot x\right) \cdot x\right)\]
\[\mathsf{fma}\left(-{x}^{3}, 0.12900613773279798, {x}^{3} \cdot 0.12900613773279798\right) + \mathsf{fma}\left(0.954929658551372, x, \left(-0.12900613773279798\right) \cdot {x}^{3}\right)\]
0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\left(x \cdot x\right) \cdot x\right)
\mathsf{fma}\left(-{x}^{3}, 0.12900613773279798, {x}^{3} \cdot 0.12900613773279798\right) + \mathsf{fma}\left(0.954929658551372, x, \left(-0.12900613773279798\right) \cdot {x}^{3}\right)
double f(double x) {
        double r673874 = 0.954929658551372;
        double r673875 = x;
        double r673876 = r673874 * r673875;
        double r673877 = 0.12900613773279798;
        double r673878 = r673875 * r673875;
        double r673879 = r673878 * r673875;
        double r673880 = r673877 * r673879;
        double r673881 = r673876 - r673880;
        return r673881;
}

double f(double x) {
        double r673882 = x;
        double r673883 = 3.0;
        double r673884 = pow(r673882, r673883);
        double r673885 = -r673884;
        double r673886 = 0.12900613773279798;
        double r673887 = r673884 * r673886;
        double r673888 = fma(r673885, r673886, r673887);
        double r673889 = 0.954929658551372;
        double r673890 = -r673886;
        double r673891 = r673890 * r673884;
        double r673892 = fma(r673889, r673882, r673891);
        double r673893 = r673888 + r673892;
        return r673893;
}

Error

Bits error versus x

Derivation

  1. Initial program 0.1

    \[0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\left(x \cdot x\right) \cdot x\right)\]
  2. Using strategy rm
  3. Applied pow10.1

    \[\leadsto 0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\left(x \cdot x\right) \cdot \color{blue}{{x}^{1}}\right)\]
  4. Applied pow10.1

    \[\leadsto 0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\left(x \cdot \color{blue}{{x}^{1}}\right) \cdot {x}^{1}\right)\]
  5. Applied pow10.1

    \[\leadsto 0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\left(\color{blue}{{x}^{1}} \cdot {x}^{1}\right) \cdot {x}^{1}\right)\]
  6. Applied pow-prod-up0.1

    \[\leadsto 0.954929658551372 \cdot x - 0.12900613773279798 \cdot \left(\color{blue}{{x}^{\left(1 + 1\right)}} \cdot {x}^{1}\right)\]
  7. Applied pow-prod-up0.1

    \[\leadsto 0.954929658551372 \cdot x - 0.12900613773279798 \cdot \color{blue}{{x}^{\left(\left(1 + 1\right) + 1\right)}}\]
  8. Simplified0.1

    \[\leadsto 0.954929658551372 \cdot x - 0.12900613773279798 \cdot {x}^{\color{blue}{3}}\]
  9. Using strategy rm
  10. Applied prod-diff0.1

    \[\leadsto \color{blue}{\mathsf{fma}\left(0.954929658551372, x, -{x}^{3} \cdot 0.12900613773279798\right) + \mathsf{fma}\left(-{x}^{3}, 0.12900613773279798, {x}^{3} \cdot 0.12900613773279798\right)}\]
  11. Final simplification0.1

    \[\leadsto \mathsf{fma}\left(-{x}^{3}, 0.12900613773279798, {x}^{3} \cdot 0.12900613773279798\right) + \mathsf{fma}\left(0.954929658551372, x, \left(-0.12900613773279798\right) \cdot {x}^{3}\right)\]

Reproduce

herbie shell --seed 2019135 +o rules:numerics
(FPCore (x)
  :name "Rosa's Benchmark"
  (- (* 0.954929658551372 x) (* 0.12900613773279798 (* (* x x) x))))