Simplified33.2
\[\leadsto \color{blue}{\left(i \cdot \left(\beta + \left(i + \alpha\right)\right)\right) \cdot \frac{\frac{\mathsf{fma}\left(i, \beta + \left(i + \alpha\right), \alpha \cdot \beta\right)}{\left(\alpha + \mathsf{fma}\left(i, 2, \beta\right)\right) \cdot \left(\alpha + \mathsf{fma}\left(i, 2, \beta\right)\right)}}{\mathsf{fma}\left(\alpha + \mathsf{fma}\left(i, 2, \beta\right), \alpha + \mathsf{fma}\left(i, 2, \beta\right), -1\right)}}
\]
Proof
(*.f64 (*.f64 i (+.f64 beta (+.f64 i alpha))) (/.f64 (/.f64 (fma.f64 i (+.f64 beta (+.f64 i alpha)) (*.f64 alpha beta)) (*.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 beta i) alpha))) (/.f64 (/.f64 (fma.f64 i (+.f64 beta (+.f64 i alpha)) (*.f64 alpha beta)) (*.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (Rewrite<= +-commutative_binary64 (+.f64 alpha (+.f64 beta i)))) (/.f64 (/.f64 (fma.f64 i (+.f64 beta (+.f64 i alpha)) (*.f64 alpha beta)) (*.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 alpha beta) i))) (/.f64 (/.f64 (fma.f64 i (+.f64 beta (+.f64 i alpha)) (*.f64 alpha beta)) (*.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (fma.f64 i (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 beta i) alpha)) (*.f64 alpha beta)) (*.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (fma.f64 i (Rewrite<= +-commutative_binary64 (+.f64 alpha (+.f64 beta i))) (*.f64 alpha beta)) (*.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (fma.f64 i (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 alpha beta) i)) (*.f64 alpha beta)) (*.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (fma.f64 i (+.f64 (+.f64 alpha beta) i) (Rewrite<= *-commutative_binary64 (*.f64 beta alpha))) (*.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (Rewrite<= fma-def_binary64 (+.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (*.f64 beta alpha))) (*.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (Rewrite<= +-commutative_binary64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i)))) (*.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 alpha (Rewrite<= fma-def_binary64 (+.f64 (*.f64 i 2) beta))) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 alpha (+.f64 (Rewrite<= *-commutative_binary64 (*.f64 2 i)) beta)) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 alpha (Rewrite<= +-commutative_binary64 (+.f64 beta (*.f64 2 i)))) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 alpha beta) (*.f64 2 i))) (+.f64 alpha (fma.f64 i 2 beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 alpha (Rewrite<= fma-def_binary64 (+.f64 (*.f64 i 2) beta))))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 alpha (+.f64 (Rewrite<= *-commutative_binary64 (*.f64 2 i)) beta)))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 alpha (Rewrite<= +-commutative_binary64 (+.f64 beta (*.f64 2 i)))))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 alpha beta) (*.f64 2 i))))) (fma.f64 (+.f64 alpha (fma.f64 i 2 beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)))) (fma.f64 (+.f64 alpha (Rewrite<= fma-def_binary64 (+.f64 (*.f64 i 2) beta))) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)))) (fma.f64 (+.f64 alpha (+.f64 (Rewrite<= *-commutative_binary64 (*.f64 2 i)) beta)) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)))) (fma.f64 (+.f64 alpha (Rewrite<= +-commutative_binary64 (+.f64 beta (*.f64 2 i)))) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)))) (fma.f64 (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 alpha beta) (*.f64 2 i))) (+.f64 alpha (fma.f64 i 2 beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)))) (fma.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 alpha (Rewrite<= fma-def_binary64 (+.f64 (*.f64 i 2) beta))) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)))) (fma.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 alpha (+.f64 (Rewrite<= *-commutative_binary64 (*.f64 2 i)) beta)) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)))) (fma.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 alpha (Rewrite<= +-commutative_binary64 (+.f64 beta (*.f64 2 i)))) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)))) (fma.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 alpha beta) (*.f64 2 i))) -1))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)))) (fma.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (Rewrite<= metadata-eval (neg.f64 1))))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)))) (Rewrite<= fma-neg_binary64 (-.f64 (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i))) 1)))): 0 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (Rewrite<= associate-/r*_binary64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i))) (-.f64 (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i))) 1))))): 54 points increase in error, 0 points decrease in error
(*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (Rewrite<= *-commutative_binary64 (*.f64 (-.f64 (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i))) 1) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i))))))): 0 points increase in error, 0 points decrease in error
(Rewrite<= *-commutative_binary64 (*.f64 (/.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (-.f64 (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i))) 1) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i))))) (*.f64 i (+.f64 (+.f64 alpha beta) i)))): 0 points increase in error, 0 points decrease in error
(Rewrite=> associate-*l/_binary64 (/.f64 (*.f64 (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 i (+.f64 (+.f64 alpha beta) i))) (*.f64 (-.f64 (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i))) 1) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)))))): 66 points increase in error, 12 points decrease in error
(/.f64 (Rewrite<= *-commutative_binary64 (*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i))))) (*.f64 (-.f64 (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i))) 1) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i))))): 0 points increase in error, 0 points decrease in error
(Rewrite<= associate-/l/_binary64 (/.f64 (/.f64 (*.f64 (*.f64 i (+.f64 (+.f64 alpha beta) i)) (+.f64 (*.f64 beta alpha) (*.f64 i (+.f64 (+.f64 alpha beta) i)))) (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i)))) (-.f64 (*.f64 (+.f64 (+.f64 alpha beta) (*.f64 2 i)) (+.f64 (+.f64 alpha beta) (*.f64 2 i))) 1))): 3 points increase in error, 7 points decrease in error
Simplified33.2
\[\leadsto \left(i \cdot \left(\beta + \left(i + \alpha\right)\right)\right) \cdot \frac{\frac{\mathsf{fma}\left(i, \beta + \left(i + \alpha\right), \alpha \cdot \beta\right)}{\left(\alpha + \mathsf{fma}\left(i, 2, \beta\right)\right) \cdot \left(\alpha + \mathsf{fma}\left(i, 2, \beta\right)\right)}}{\color{blue}{\beta \cdot \beta + \left(\left(\alpha + i \cdot 2\right) \cdot \left(\left(\alpha + i \cdot 2\right) + \beta \cdot 2\right) + -1\right)}}
\]
Proof
(+.f64 (*.f64 beta beta) (+.f64 (*.f64 (+.f64 alpha (*.f64 i 2)) (+.f64 (+.f64 alpha (*.f64 i 2)) (*.f64 beta 2))) -1)): 0 points increase in error, 0 points decrease in error
(+.f64 (Rewrite<= unpow2_binary64 (pow.f64 beta 2)) (+.f64 (*.f64 (+.f64 alpha (*.f64 i 2)) (+.f64 (+.f64 alpha (*.f64 i 2)) (*.f64 beta 2))) -1)): 0 points increase in error, 0 points decrease in error
(+.f64 (pow.f64 beta 2) (+.f64 (*.f64 (+.f64 alpha (Rewrite<= *-commutative_binary64 (*.f64 2 i))) (+.f64 (+.f64 alpha (*.f64 i 2)) (*.f64 beta 2))) -1)): 0 points increase in error, 0 points decrease in error
(+.f64 (pow.f64 beta 2) (+.f64 (*.f64 (+.f64 alpha (*.f64 2 i)) (+.f64 (+.f64 alpha (Rewrite<= *-commutative_binary64 (*.f64 2 i))) (*.f64 beta 2))) -1)): 0 points increase in error, 0 points decrease in error
(+.f64 (pow.f64 beta 2) (+.f64 (*.f64 (+.f64 alpha (*.f64 2 i)) (+.f64 (+.f64 alpha (*.f64 2 i)) (Rewrite<= *-commutative_binary64 (*.f64 2 beta)))) -1)): 0 points increase in error, 0 points decrease in error
(+.f64 (pow.f64 beta 2) (+.f64 (Rewrite<= distribute-rgt-out_binary64 (+.f64 (*.f64 (+.f64 alpha (*.f64 2 i)) (+.f64 alpha (*.f64 2 i))) (*.f64 (*.f64 2 beta) (+.f64 alpha (*.f64 2 i))))) -1)): 0 points increase in error, 1 points decrease in error
(+.f64 (pow.f64 beta 2) (+.f64 (+.f64 (Rewrite<= unpow2_binary64 (pow.f64 (+.f64 alpha (*.f64 2 i)) 2)) (*.f64 (*.f64 2 beta) (+.f64 alpha (*.f64 2 i)))) -1)): 0 points increase in error, 0 points decrease in error
(+.f64 (pow.f64 beta 2) (+.f64 (+.f64 (pow.f64 (+.f64 alpha (*.f64 2 i)) 2) (Rewrite<= associate-*r*_binary64 (*.f64 2 (*.f64 beta (+.f64 alpha (*.f64 2 i)))))) -1)): 0 points increase in error, 0 points decrease in error
(+.f64 (pow.f64 beta 2) (+.f64 (+.f64 (pow.f64 (+.f64 alpha (*.f64 2 i)) 2) (*.f64 2 (*.f64 beta (+.f64 alpha (*.f64 2 i))))) (Rewrite<= metadata-eval (neg.f64 1)))): 0 points increase in error, 0 points decrease in error
(+.f64 (pow.f64 beta 2) (Rewrite<= sub-neg_binary64 (-.f64 (+.f64 (pow.f64 (+.f64 alpha (*.f64 2 i)) 2) (*.f64 2 (*.f64 beta (+.f64 alpha (*.f64 2 i))))) 1))): 0 points increase in error, 0 points decrease in error
(Rewrite<= associate--l+_binary64 (-.f64 (+.f64 (pow.f64 beta 2) (+.f64 (pow.f64 (+.f64 alpha (*.f64 2 i)) 2) (*.f64 2 (*.f64 beta (+.f64 alpha (*.f64 2 i)))))) 1)): 0 points increase in error, 0 points decrease in error