Simplified25.1
\[\leadsto \color{blue}{\frac{\mathsf{fma}\left(t, a \cdot -4, \frac{\mathsf{fma}\left(x, 9 \cdot y, b\right)}{z}\right)}{c}}
\]
Proof
(/.f64 (fma.f64 t (*.f64 a -4) (/.f64 (fma.f64 x (*.f64 9 y) b) z)) c): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 t (*.f64 a (Rewrite<= metadata-eval (neg.f64 4))) (/.f64 (fma.f64 x (*.f64 9 y) b) z)) c): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 t (Rewrite<= distribute-rgt-neg-in_binary64 (neg.f64 (*.f64 a 4))) (/.f64 (fma.f64 x (*.f64 9 y) b) z)) c): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 t (Rewrite<= distribute-lft-neg-out_binary64 (*.f64 (neg.f64 a) 4)) (/.f64 (fma.f64 x (*.f64 9 y) b) z)) c): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 t (Rewrite=> *-commutative_binary64 (*.f64 4 (neg.f64 a))) (/.f64 (fma.f64 x (*.f64 9 y) b) z)) c): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 t (*.f64 4 (neg.f64 a)) (/.f64 (Rewrite<= fma-def_binary64 (+.f64 (*.f64 x (*.f64 9 y)) b)) z)) c): 2 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 t (*.f64 4 (neg.f64 a)) (/.f64 (+.f64 (Rewrite<= associate-*l*_binary64 (*.f64 (*.f64 x 9) y)) b) z)) c): 5 points increase in error, 8 points decrease in error
(/.f64 (fma.f64 t (*.f64 4 (neg.f64 a)) (/.f64 (Rewrite<= +-commutative_binary64 (+.f64 b (*.f64 (*.f64 x 9) y))) z)) c): 0 points increase in error, 0 points decrease in error
(/.f64 (Rewrite<= fma-def_binary64 (+.f64 (*.f64 t (*.f64 4 (neg.f64 a))) (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z))) c): 2 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (Rewrite<= associate-*l*_binary64 (*.f64 (*.f64 t 4) (neg.f64 a))) (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z)) c): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (*.f64 (Rewrite<= *-commutative_binary64 (*.f64 4 t)) (neg.f64 a)) (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z)) c): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (Rewrite=> distribute-rgt-neg-out_binary64 (neg.f64 (*.f64 (*.f64 4 t) a))) (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z)) c): 0 points increase in error, 0 points decrease in error
(/.f64 (Rewrite<= +-commutative_binary64 (+.f64 (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z) (neg.f64 (*.f64 (*.f64 4 t) a)))) c): 0 points increase in error, 0 points decrease in error
(/.f64 (Rewrite=> unsub-neg_binary64 (-.f64 (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z) (*.f64 (*.f64 4 t) a))) c): 0 points increase in error, 0 points decrease in error
(/.f64 (-.f64 (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z) (Rewrite=> associate-*l*_binary64 (*.f64 4 (*.f64 t a)))) c): 0 points increase in error, 0 points decrease in error
(/.f64 (-.f64 (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z) (*.f64 (Rewrite<= metadata-eval (/.f64 4 1)) (*.f64 t a))) c): 0 points increase in error, 0 points decrease in error
(/.f64 (-.f64 (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z) (*.f64 (/.f64 4 (Rewrite<= *-inverses_binary64 (/.f64 z z))) (*.f64 t a))) c): 0 points increase in error, 0 points decrease in error
(/.f64 (-.f64 (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z) (*.f64 (Rewrite<= associate-/l*_binary64 (/.f64 (*.f64 4 z) z)) (*.f64 t a))) c): 0 points increase in error, 0 points decrease in error
(/.f64 (-.f64 (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z) (*.f64 (/.f64 (Rewrite<= *-commutative_binary64 (*.f64 z 4)) z) (*.f64 t a))) c): 0 points increase in error, 0 points decrease in error
(/.f64 (-.f64 (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z) (Rewrite<= associate-/r/_binary64 (/.f64 (*.f64 z 4) (/.f64 z (*.f64 t a))))) c): 24 points increase in error, 0 points decrease in error
(/.f64 (-.f64 (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z) (Rewrite<= associate-/l*_binary64 (/.f64 (*.f64 (*.f64 z 4) (*.f64 t a)) z))) c): 20 points increase in error, 25 points decrease in error
(/.f64 (-.f64 (/.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) z) (/.f64 (Rewrite<= associate-*l*_binary64 (*.f64 (*.f64 (*.f64 z 4) t) a)) z)) c): 29 points increase in error, 2 points decrease in error
(/.f64 (Rewrite<= div-sub_binary64 (/.f64 (-.f64 (+.f64 b (*.f64 (*.f64 x 9) y)) (*.f64 (*.f64 (*.f64 z 4) t) a)) z)) c): 0 points increase in error, 1 points decrease in error
(/.f64 (/.f64 (Rewrite<= associate-+r-_binary64 (+.f64 b (-.f64 (*.f64 (*.f64 x 9) y) (*.f64 (*.f64 (*.f64 z 4) t) a)))) z) c): 0 points increase in error, 0 points decrease in error
(/.f64 (/.f64 (Rewrite<= +-commutative_binary64 (+.f64 (-.f64 (*.f64 (*.f64 x 9) y) (*.f64 (*.f64 (*.f64 z 4) t) a)) b)) z) c): 0 points increase in error, 0 points decrease in error
(Rewrite<= associate-/r*_binary64 (/.f64 (+.f64 (-.f64 (*.f64 (*.f64 x 9) y) (*.f64 (*.f64 (*.f64 z 4) t) a)) b) (*.f64 z c))): 42 points increase in error, 41 points decrease in error
Simplified13.3
\[\leadsto \color{blue}{\mathsf{fma}\left(9, \frac{y}{\frac{z}{\frac{x}{c}}}, \mathsf{fma}\left(-4, \frac{t}{\frac{c}{a}}, \frac{b}{z \cdot c}\right)\right)}
\]
Proof
(fma.f64 9 (/.f64 y (/.f64 z (/.f64 x c))) (fma.f64 -4 (/.f64 t (/.f64 c a)) (/.f64 b (*.f64 z c)))): 0 points increase in error, 0 points decrease in error
(fma.f64 9 (/.f64 y (Rewrite<= associate-/l*_binary64 (/.f64 (*.f64 z c) x))) (fma.f64 -4 (/.f64 t (/.f64 c a)) (/.f64 b (*.f64 z c)))): 17 points increase in error, 18 points decrease in error
(fma.f64 9 (/.f64 y (/.f64 (Rewrite<= *-commutative_binary64 (*.f64 c z)) x)) (fma.f64 -4 (/.f64 t (/.f64 c a)) (/.f64 b (*.f64 z c)))): 0 points increase in error, 0 points decrease in error
(fma.f64 9 (Rewrite<= associate-/l*_binary64 (/.f64 (*.f64 y x) (*.f64 c z))) (fma.f64 -4 (/.f64 t (/.f64 c a)) (/.f64 b (*.f64 z c)))): 23 points increase in error, 14 points decrease in error
(fma.f64 9 (/.f64 (*.f64 y x) (*.f64 c z)) (fma.f64 -4 (Rewrite<= associate-/l*_binary64 (/.f64 (*.f64 t a) c)) (/.f64 b (*.f64 z c)))): 24 points increase in error, 31 points decrease in error
(fma.f64 9 (/.f64 (*.f64 y x) (*.f64 c z)) (fma.f64 -4 (/.f64 (Rewrite<= *-commutative_binary64 (*.f64 a t)) c) (/.f64 b (*.f64 z c)))): 0 points increase in error, 0 points decrease in error
(fma.f64 9 (/.f64 (*.f64 y x) (*.f64 c z)) (fma.f64 -4 (/.f64 (*.f64 a t) c) (/.f64 b (Rewrite<= *-commutative_binary64 (*.f64 c z))))): 0 points increase in error, 0 points decrease in error
(fma.f64 9 (/.f64 (*.f64 y x) (*.f64 c z)) (Rewrite<= fma-def_binary64 (+.f64 (*.f64 -4 (/.f64 (*.f64 a t) c)) (/.f64 b (*.f64 c z))))): 0 points increase in error, 0 points decrease in error
(fma.f64 9 (/.f64 (*.f64 y x) (*.f64 c z)) (Rewrite<= +-commutative_binary64 (+.f64 (/.f64 b (*.f64 c z)) (*.f64 -4 (/.f64 (*.f64 a t) c))))): 0 points increase in error, 0 points decrease in error
(Rewrite<= fma-def_binary64 (+.f64 (*.f64 9 (/.f64 (*.f64 y x) (*.f64 c z))) (+.f64 (/.f64 b (*.f64 c z)) (*.f64 -4 (/.f64 (*.f64 a t) c))))): 5 points increase in error, 0 points decrease in error
(Rewrite<= +-commutative_binary64 (+.f64 (+.f64 (/.f64 b (*.f64 c z)) (*.f64 -4 (/.f64 (*.f64 a t) c))) (*.f64 9 (/.f64 (*.f64 y x) (*.f64 c z))))): 0 points increase in error, 0 points decrease in error
(Rewrite<= associate-+r+_binary64 (+.f64 (/.f64 b (*.f64 c z)) (+.f64 (*.f64 -4 (/.f64 (*.f64 a t) c)) (*.f64 9 (/.f64 (*.f64 y x) (*.f64 c z)))))): 0 points increase in error, 0 points decrease in error