Simplified0.3
\[\leadsto 2 \cdot \color{blue}{\mathsf{fma}\left(y, x, \mathsf{fma}\left(t, z, 0 \cdot \left(i \cdot \left(c \cdot \mathsf{fma}\left(c, b, a\right)\right)\right) - i \cdot \left(c \cdot \mathsf{fma}\left(c, b, a\right)\right)\right)\right)}
\]
Proof
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (*.f64 0 (*.f64 i (*.f64 c (fma.f64 c b a)))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (*.f64 (Rewrite<= metadata-eval (+.f64 -1 1)) (*.f64 i (*.f64 c (fma.f64 c b a)))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (*.f64 (+.f64 -1 1) (*.f64 i (*.f64 c (Rewrite<= fma-def_binary64 (+.f64 (*.f64 c b) a))))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (*.f64 (+.f64 -1 1) (*.f64 i (*.f64 c (+.f64 (Rewrite=> *-commutative_binary64 (*.f64 b c)) a)))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (*.f64 (+.f64 -1 1) (*.f64 i (*.f64 c (Rewrite<= fma-udef_binary64 (fma.f64 b c a))))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (*.f64 (+.f64 -1 1) (Rewrite<= *-commutative_binary64 (*.f64 (*.f64 c (fma.f64 b c a)) i))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (*.f64 (+.f64 -1 1) (Rewrite<= associate-*r*_binary64 (*.f64 c (*.f64 (fma.f64 b c a) i)))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (Rewrite<= distribute-lft1-in_binary64 (+.f64 (*.f64 -1 (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 c (*.f64 (fma.f64 b c a) i)))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (+.f64 (Rewrite=> associate-*r*_binary64 (*.f64 (*.f64 -1 c) (*.f64 (fma.f64 b c a) i))) (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 29 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (+.f64 (*.f64 (Rewrite<= neg-mul-1_binary64 (neg.f64 c)) (*.f64 (fma.f64 b c a) i)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 0 points increase in error, 29 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (+.f64 (*.f64 (neg.f64 c) (Rewrite=> *-commutative_binary64 (*.f64 i (fma.f64 b c a)))) (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (+.f64 (Rewrite<= associate-*l*_binary64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a))) (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (+.f64 (Rewrite<= *-rgt-identity_binary64 (*.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (Rewrite<= fma-udef_binary64 (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i)))) (*.f64 i (*.f64 c (fma.f64 c b a))))))): 53 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 i (*.f64 c (Rewrite<= fma-def_binary64 (+.f64 (*.f64 c b) a)))))))): 6 points increase in error, 53 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 i (*.f64 c (+.f64 (Rewrite=> *-commutative_binary64 (*.f64 b c)) a))))))): 0 points increase in error, 59 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 i (*.f64 c (Rewrite<= fma-udef_binary64 (fma.f64 b c a)))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i))) (Rewrite<= *-commutative_binary64 (*.f64 (*.f64 c (fma.f64 b c a)) i)))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (-.f64 (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i))) (Rewrite<= associate-*r*_binary64 (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (Rewrite<= unsub-neg_binary64 (+.f64 (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i))) (neg.f64 (*.f64 c (*.f64 (fma.f64 b c a) i)))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (+.f64 (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i))) (Rewrite=> distribute-lft-neg-in_binary64 (*.f64 (neg.f64 c) (*.f64 (fma.f64 b c a) i))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (+.f64 (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 (neg.f64 c) (Rewrite=> *-commutative_binary64 (*.f64 i (fma.f64 b c a)))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (+.f64 (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i))) (Rewrite<= associate-*l*_binary64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (fma.f64 t z (Rewrite<= +-commutative_binary64 (+.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i)))))))): 31 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (Rewrite<= fma-def_binary64 (+.f64 (*.f64 t z) (+.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i)))))))): 0 points increase in error, 31 points decrease in error
(*.f64 2 (fma.f64 y x (+.f64 (Rewrite<= *-commutative_binary64 (*.f64 z t)) (+.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 (*.f64 z t) (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a))) (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (Rewrite=> +-commutative_binary64 (+.f64 (fma.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1 (*.f64 c (*.f64 (fma.f64 b c a) i))) (+.f64 (*.f64 z t) (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (+.f64 (Rewrite=> fma-udef_binary64 (+.f64 (*.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) 1) (*.f64 c (*.f64 (fma.f64 b c a) i)))) (+.f64 (*.f64 z t) (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)))))): 53 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (+.f64 (+.f64 (Rewrite=> *-rgt-identity_binary64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a))) (*.f64 c (*.f64 (fma.f64 b c a) i))) (+.f64 (*.f64 z t) (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)))))): 6 points increase in error, 28 points decrease in error
(*.f64 2 (fma.f64 y x (+.f64 (+.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (+.f64 (*.f64 z t) (Rewrite=> associate-*l*_binary64 (*.f64 (neg.f64 c) (*.f64 i (fma.f64 b c a)))))))): 0 points increase in error, 31 points decrease in error
(*.f64 2 (fma.f64 y x (+.f64 (+.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (+.f64 (*.f64 z t) (*.f64 (neg.f64 c) (Rewrite<= *-commutative_binary64 (*.f64 (fma.f64 b c a) i))))))): 53 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (+.f64 (+.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (Rewrite<= cancel-sign-sub-inv_binary64 (-.f64 (*.f64 z t) (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 0 points increase in error, 53 points decrease in error
(*.f64 2 (fma.f64 y x (Rewrite=> associate-+l+_binary64 (+.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) (+.f64 (*.f64 c (*.f64 (fma.f64 b c a) i)) (-.f64 (*.f64 z t) (*.f64 c (*.f64 (fma.f64 b c a) i)))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (+.f64 (*.f64 (Rewrite=> *-commutative_binary64 (*.f64 i (neg.f64 c))) (fma.f64 b c a)) (+.f64 (*.f64 c (*.f64 (fma.f64 b c a) i)) (-.f64 (*.f64 z t) (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (+.f64 (*.f64 (Rewrite<= distribute-rgt-neg-in_binary64 (neg.f64 (*.f64 i c))) (fma.f64 b c a)) (+.f64 (*.f64 c (*.f64 (fma.f64 b c a) i)) (-.f64 (*.f64 z t) (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (+.f64 (*.f64 (Rewrite=> distribute-lft-neg-in_binary64 (*.f64 (neg.f64 i) c)) (fma.f64 b c a)) (+.f64 (*.f64 c (*.f64 (fma.f64 b c a) i)) (-.f64 (*.f64 z t) (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 30 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (+.f64 (Rewrite<= associate-*r*_binary64 (*.f64 (neg.f64 i) (*.f64 c (fma.f64 b c a)))) (+.f64 (*.f64 c (*.f64 (fma.f64 b c a) i)) (-.f64 (*.f64 z t) (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 29 points increase in error, 5 points decrease in error
(*.f64 2 (fma.f64 y x (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 (*.f64 (neg.f64 i) (*.f64 c (fma.f64 b c a))) (*.f64 c (*.f64 (fma.f64 b c a) i))) (-.f64 (*.f64 z t) (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 0 points increase in error, 54 points decrease in error
(*.f64 2 (fma.f64 y x (+.f64 (Rewrite<= fma-udef_binary64 (fma.f64 (neg.f64 i) (*.f64 c (fma.f64 b c a)) (*.f64 c (*.f64 (fma.f64 b c a) i)))) (-.f64 (*.f64 z t) (*.f64 c (*.f64 (fma.f64 b c a) i)))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (fma.f64 y x (Rewrite<= +-commutative_binary64 (+.f64 (-.f64 (*.f64 z t) (*.f64 c (*.f64 (fma.f64 b c a) i))) (fma.f64 (neg.f64 i) (*.f64 c (fma.f64 b c a)) (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (Rewrite<= fma-def_binary64 (+.f64 (*.f64 y x) (+.f64 (-.f64 (*.f64 z t) (*.f64 c (*.f64 (fma.f64 b c a) i))) (fma.f64 (neg.f64 i) (*.f64 c (fma.f64 b c a)) (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (+.f64 (Rewrite=> *-commutative_binary64 (*.f64 x y)) (+.f64 (-.f64 (*.f64 z t) (*.f64 c (*.f64 (fma.f64 b c a) i))) (fma.f64 (neg.f64 i) (*.f64 c (fma.f64 b c a)) (*.f64 c (*.f64 (fma.f64 b c a) i)))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (+.f64 (*.f64 x y) (Rewrite=> associate-+l-_binary64 (-.f64 (*.f64 z t) (-.f64 (*.f64 c (*.f64 (fma.f64 b c a) i)) (fma.f64 (neg.f64 i) (*.f64 c (fma.f64 b c a)) (*.f64 c (*.f64 (fma.f64 b c a) i)))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (Rewrite=> associate-+r-_binary64 (-.f64 (+.f64 (*.f64 x y) (*.f64 z t)) (-.f64 (*.f64 c (*.f64 (fma.f64 b c a) i)) (fma.f64 (neg.f64 i) (*.f64 c (fma.f64 b c a)) (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (-.f64 (Rewrite<= fma-udef_binary64 (fma.f64 x y (*.f64 z t))) (-.f64 (*.f64 c (*.f64 (fma.f64 b c a) i)) (fma.f64 (neg.f64 i) (*.f64 c (fma.f64 b c a)) (*.f64 c (*.f64 (fma.f64 b c a) i)))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (Rewrite<= associate-+l-_binary64 (+.f64 (-.f64 (fma.f64 x y (*.f64 z t)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (fma.f64 (neg.f64 i) (*.f64 c (fma.f64 b c a)) (*.f64 c (*.f64 (fma.f64 b c a) i)))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (+.f64 (-.f64 (fma.f64 x y (*.f64 z t)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (Rewrite=> fma-udef_binary64 (+.f64 (*.f64 (neg.f64 i) (*.f64 c (fma.f64 b c a))) (*.f64 c (*.f64 (fma.f64 b c a) i)))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (Rewrite=> associate-+r+_binary64 (+.f64 (+.f64 (-.f64 (fma.f64 x y (*.f64 z t)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 (neg.f64 i) (*.f64 c (fma.f64 b c a)))) (*.f64 c (*.f64 (fma.f64 b c a) i))))): 39 points increase in error, 0 points decrease in error
(*.f64 2 (+.f64 (+.f64 (-.f64 (fma.f64 x y (*.f64 z t)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 (neg.f64 i) (Rewrite=> *-commutative_binary64 (*.f64 (fma.f64 b c a) c)))) (*.f64 c (*.f64 (fma.f64 b c a) i)))): 0 points increase in error, 39 points decrease in error
(*.f64 2 (+.f64 (+.f64 (-.f64 (fma.f64 x y (*.f64 z t)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (Rewrite=> associate-*r*_binary64 (*.f64 (*.f64 (neg.f64 i) (fma.f64 b c a)) c))) (*.f64 c (*.f64 (fma.f64 b c a) i)))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (+.f64 (+.f64 (-.f64 (fma.f64 x y (*.f64 z t)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 (Rewrite<= *-commutative_binary64 (*.f64 (fma.f64 b c a) (neg.f64 i))) c)) (*.f64 c (*.f64 (fma.f64 b c a) i)))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (+.f64 (+.f64 (-.f64 (fma.f64 x y (*.f64 z t)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (*.f64 (Rewrite<= distribute-rgt-neg-in_binary64 (neg.f64 (*.f64 (fma.f64 b c a) i))) c)) (*.f64 c (*.f64 (fma.f64 b c a) i)))): 28 points increase in error, 0 points decrease in error
(*.f64 2 (Rewrite<= associate-+r+_binary64 (+.f64 (-.f64 (fma.f64 x y (*.f64 z t)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (+.f64 (*.f64 (neg.f64 (*.f64 (fma.f64 b c a) i)) c) (*.f64 c (*.f64 (fma.f64 b c a) i)))))): 0 points increase in error, 28 points decrease in error
(*.f64 2 (+.f64 (-.f64 (fma.f64 x y (*.f64 z t)) (*.f64 c (*.f64 (fma.f64 b c a) i))) (Rewrite<= fma-udef_binary64 (fma.f64 (neg.f64 (*.f64 (fma.f64 b c a) i)) c (*.f64 c (*.f64 (fma.f64 b c a) i)))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (+.f64 (Rewrite=> cancel-sign-sub-inv_binary64 (+.f64 (fma.f64 x y (*.f64 z t)) (*.f64 (neg.f64 c) (*.f64 (fma.f64 b c a) i)))) (fma.f64 (neg.f64 (*.f64 (fma.f64 b c a) i)) c (*.f64 c (*.f64 (fma.f64 b c a) i))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (+.f64 (+.f64 (fma.f64 x y (*.f64 z t)) (*.f64 (neg.f64 c) (Rewrite=> *-commutative_binary64 (*.f64 i (fma.f64 b c a))))) (fma.f64 (neg.f64 (*.f64 (fma.f64 b c a) i)) c (*.f64 c (*.f64 (fma.f64 b c a) i))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (+.f64 (+.f64 (fma.f64 x y (*.f64 z t)) (Rewrite<= associate-*l*_binary64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)))) (fma.f64 (neg.f64 (*.f64 (fma.f64 b c a) i)) c (*.f64 c (*.f64 (fma.f64 b c a) i))))): 0 points increase in error, 0 points decrease in error
(*.f64 2 (Rewrite<= associate-+r+_binary64 (+.f64 (fma.f64 x y (*.f64 z t)) (+.f64 (*.f64 (*.f64 (neg.f64 c) i) (fma.f64 b c a)) (fma.f64 (neg.f64 (*.f64 (fma.f64 b c a) i)) c (*.f64 c (*.f64 (fma.f64 b c a) i))))))): 0 points increase in error, 0 points decrease in error