Average Error: 0.1 → 0.1
Time: 20.3s
Precision: 64
\[0.9549296585513720181381813745247200131416 \cdot x - 0.1290061377327979819096270830414141528308 \cdot \left(\left(x \cdot x\right) \cdot x\right)\]
\[\mathsf{fma}\left(1, 0.9549296585513720181381813745247200131416, \left(x \cdot 0.1290061377327979819096270830414141528308\right) \cdot \left(-x\right)\right) \cdot x + x \cdot \mathsf{fma}\left(-x, x \cdot 0.1290061377327979819096270830414141528308, \left(x \cdot 0.1290061377327979819096270830414141528308\right) \cdot x\right)\]
0.9549296585513720181381813745247200131416 \cdot x - 0.1290061377327979819096270830414141528308 \cdot \left(\left(x \cdot x\right) \cdot x\right)
\mathsf{fma}\left(1, 0.9549296585513720181381813745247200131416, \left(x \cdot 0.1290061377327979819096270830414141528308\right) \cdot \left(-x\right)\right) \cdot x + x \cdot \mathsf{fma}\left(-x, x \cdot 0.1290061377327979819096270830414141528308, \left(x \cdot 0.1290061377327979819096270830414141528308\right) \cdot x\right)
double f(double x) {
        double r1031570 = 0.954929658551372;
        double r1031571 = x;
        double r1031572 = r1031570 * r1031571;
        double r1031573 = 0.12900613773279798;
        double r1031574 = r1031571 * r1031571;
        double r1031575 = r1031574 * r1031571;
        double r1031576 = r1031573 * r1031575;
        double r1031577 = r1031572 - r1031576;
        return r1031577;
}

double f(double x) {
        double r1031578 = 1.0;
        double r1031579 = 0.954929658551372;
        double r1031580 = x;
        double r1031581 = 0.12900613773279798;
        double r1031582 = r1031580 * r1031581;
        double r1031583 = -r1031580;
        double r1031584 = r1031582 * r1031583;
        double r1031585 = fma(r1031578, r1031579, r1031584);
        double r1031586 = r1031585 * r1031580;
        double r1031587 = r1031582 * r1031580;
        double r1031588 = fma(r1031583, r1031582, r1031587);
        double r1031589 = r1031580 * r1031588;
        double r1031590 = r1031586 + r1031589;
        return r1031590;
}

Error

Bits error versus x

Derivation

  1. Initial program 0.1

    \[0.9549296585513720181381813745247200131416 \cdot x - 0.1290061377327979819096270830414141528308 \cdot \left(\left(x \cdot x\right) \cdot x\right)\]
  2. Simplified0.1

    \[\leadsto \color{blue}{x \cdot \left(0.9549296585513720181381813745247200131416 - \left(0.1290061377327979819096270830414141528308 \cdot x\right) \cdot x\right)}\]
  3. Using strategy rm
  4. Applied *-un-lft-identity0.1

    \[\leadsto x \cdot \left(\color{blue}{1 \cdot 0.9549296585513720181381813745247200131416} - \left(0.1290061377327979819096270830414141528308 \cdot x\right) \cdot x\right)\]
  5. Applied prod-diff0.1

    \[\leadsto x \cdot \color{blue}{\left(\mathsf{fma}\left(1, 0.9549296585513720181381813745247200131416, -x \cdot \left(0.1290061377327979819096270830414141528308 \cdot x\right)\right) + \mathsf{fma}\left(-x, 0.1290061377327979819096270830414141528308 \cdot x, x \cdot \left(0.1290061377327979819096270830414141528308 \cdot x\right)\right)\right)}\]
  6. Applied distribute-rgt-in0.1

    \[\leadsto \color{blue}{\mathsf{fma}\left(1, 0.9549296585513720181381813745247200131416, -x \cdot \left(0.1290061377327979819096270830414141528308 \cdot x\right)\right) \cdot x + \mathsf{fma}\left(-x, 0.1290061377327979819096270830414141528308 \cdot x, x \cdot \left(0.1290061377327979819096270830414141528308 \cdot x\right)\right) \cdot x}\]
  7. Final simplification0.1

    \[\leadsto \mathsf{fma}\left(1, 0.9549296585513720181381813745247200131416, \left(x \cdot 0.1290061377327979819096270830414141528308\right) \cdot \left(-x\right)\right) \cdot x + x \cdot \mathsf{fma}\left(-x, x \cdot 0.1290061377327979819096270830414141528308, \left(x \cdot 0.1290061377327979819096270830414141528308\right) \cdot x\right)\]

Reproduce

herbie shell --seed 2019169 +o rules:numerics
(FPCore (x)
  :name "Rosa's Benchmark"
  (- (* 0.954929658551372 x) (* 0.12900613773279798 (* (* x x) x))))