Simplified63.8
\[\leadsto \color{blue}{\frac{\mathsf{fma}\left(y, z - b, \mathsf{fma}\left(y + t, a, x \cdot z\right)\right)}{x + \left(y + t\right)}}
\]
Proof
(/.f64 (fma.f64 y (-.f64 z b) (fma.f64 (+.f64 y t) a (*.f64 x z))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 y (-.f64 z b) (fma.f64 (Rewrite<= +-commutative_binary64 (+.f64 t y)) a (*.f64 x z))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 y (-.f64 z b) (fma.f64 (+.f64 t y) a (Rewrite<= *-commutative_binary64 (*.f64 z x)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 y (-.f64 z b) (Rewrite<= fma-def_binary64 (+.f64 (*.f64 (+.f64 t y) a) (*.f64 z x)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 y (-.f64 z b) (Rewrite<= +-commutative_binary64 (+.f64 (*.f64 z x) (*.f64 (+.f64 t y) a)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (Rewrite<= fma-def_binary64 (+.f64 (*.f64 y (-.f64 z b)) (+.f64 (*.f64 z x) (*.f64 (+.f64 t y) a)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (Rewrite<= distribute-lft-out--_binary64 (-.f64 (*.f64 y z) (*.f64 y b))) (+.f64 (*.f64 z x) (*.f64 (+.f64 t y) a))) (+.f64 x (+.f64 y t))): 1 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (Rewrite<= unsub-neg_binary64 (+.f64 (*.f64 y z) (neg.f64 (*.f64 y b)))) (+.f64 (*.f64 z x) (*.f64 (+.f64 t y) a))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (Rewrite=> +-commutative_binary64 (+.f64 (neg.f64 (*.f64 y b)) (*.f64 y z))) (+.f64 (*.f64 z x) (*.f64 (+.f64 t y) a))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (Rewrite<= associate-+r+_binary64 (+.f64 (neg.f64 (*.f64 y b)) (+.f64 (*.f64 y z) (+.f64 (*.f64 z x) (*.f64 (+.f64 t y) a))))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (neg.f64 (*.f64 y b)) (+.f64 (*.f64 y z) (+.f64 (Rewrite=> *-commutative_binary64 (*.f64 x z)) (*.f64 (+.f64 t y) a)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (neg.f64 (*.f64 y b)) (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 (*.f64 y z) (*.f64 x z)) (*.f64 (+.f64 t y) a)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (neg.f64 (*.f64 y b)) (+.f64 (Rewrite<= distribute-rgt-in_binary64 (*.f64 z (+.f64 y x))) (*.f64 (+.f64 t y) a))) (+.f64 x (+.f64 y t))): 1 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (neg.f64 (*.f64 y b)) (+.f64 (*.f64 z (Rewrite<= +-commutative_binary64 (+.f64 x y))) (*.f64 (+.f64 t y) a))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (neg.f64 (*.f64 y b)) (+.f64 (Rewrite<= *-commutative_binary64 (*.f64 (+.f64 x y) z)) (*.f64 (+.f64 t y) a))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (Rewrite<= +-commutative_binary64 (+.f64 (+.f64 (*.f64 (+.f64 x y) z) (*.f64 (+.f64 t y) a)) (neg.f64 (*.f64 y b)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (Rewrite<= sub-neg_binary64 (-.f64 (+.f64 (*.f64 (+.f64 x y) z) (*.f64 (+.f64 t y) a)) (*.f64 y b))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (-.f64 (+.f64 (*.f64 (+.f64 x y) z) (*.f64 (+.f64 t y) a)) (*.f64 y b)) (+.f64 x (Rewrite<= +-commutative_binary64 (+.f64 t y)))): 0 points increase in error, 0 points decrease in error
(/.f64 (-.f64 (+.f64 (*.f64 (+.f64 x y) z) (*.f64 (+.f64 t y) a)) (*.f64 y b)) (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 x t) y))): 0 points increase in error, 0 points decrease in error
Simplified0.3
\[\leadsto \color{blue}{\frac{a}{x + \left(t + y\right)} \cdot \left(t + y\right) + \left(\frac{z - b}{\frac{x + \left(t + y\right)}{y}} + \frac{z}{x + \left(t + y\right)} \cdot x\right)}
\]
Proof
(+.f64 (*.f64 (/.f64 a (+.f64 x (+.f64 t y))) (+.f64 t y)) (+.f64 (/.f64 (-.f64 z b) (/.f64 (+.f64 x (+.f64 t y)) y)) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (*.f64 (/.f64 a (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 x t) y))) (+.f64 t y)) (+.f64 (/.f64 (-.f64 z b) (/.f64 (+.f64 x (+.f64 t y)) y)) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (*.f64 (/.f64 a (+.f64 (Rewrite<= +-commutative_binary64 (+.f64 t x)) y)) (+.f64 t y)) (+.f64 (/.f64 (-.f64 z b) (/.f64 (+.f64 x (+.f64 t y)) y)) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (*.f64 (/.f64 a (Rewrite<= +-commutative_binary64 (+.f64 y (+.f64 t x)))) (+.f64 t y)) (+.f64 (/.f64 (-.f64 z b) (/.f64 (+.f64 x (+.f64 t y)) y)) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (*.f64 (/.f64 a (+.f64 y (+.f64 t x))) (Rewrite<= +-commutative_binary64 (+.f64 y t))) (+.f64 (/.f64 (-.f64 z b) (/.f64 (+.f64 x (+.f64 t y)) y)) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (Rewrite<= associate-/r/_binary64 (/.f64 a (/.f64 (+.f64 y (+.f64 t x)) (+.f64 y t)))) (+.f64 (/.f64 (-.f64 z b) (/.f64 (+.f64 x (+.f64 t y)) y)) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 5 points increase in error, 25 points decrease in error
(+.f64 (Rewrite<= associate-/l*_binary64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x)))) (+.f64 (/.f64 (-.f64 z b) (/.f64 (+.f64 x (+.f64 t y)) y)) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 58 points increase in error, 6 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (/.f64 (-.f64 z b) (/.f64 (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 x t) y)) y)) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (/.f64 (-.f64 z b) (/.f64 (+.f64 (Rewrite<= +-commutative_binary64 (+.f64 t x)) y) y)) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (/.f64 (-.f64 z b) (/.f64 (Rewrite<= +-commutative_binary64 (+.f64 y (+.f64 t x))) y)) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (Rewrite=> div-sub_binary64 (-.f64 (/.f64 z (/.f64 (+.f64 y (+.f64 t x)) y)) (/.f64 b (/.f64 (+.f64 y (+.f64 t x)) y)))) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (-.f64 (Rewrite<= associate-/l*_binary64 (/.f64 (*.f64 z y) (+.f64 y (+.f64 t x)))) (/.f64 b (/.f64 (+.f64 y (+.f64 t x)) y))) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 17 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (-.f64 (/.f64 (Rewrite<= *-commutative_binary64 (*.f64 y z)) (+.f64 y (+.f64 t x))) (/.f64 b (/.f64 (+.f64 y (+.f64 t x)) y))) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (-.f64 (/.f64 (*.f64 y z) (+.f64 y (+.f64 t x))) (Rewrite<= associate-/l*_binary64 (/.f64 (*.f64 b y) (+.f64 y (+.f64 t x))))) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 23 points increase in error, 4 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (-.f64 (/.f64 (*.f64 y z) (+.f64 y (+.f64 t x))) (/.f64 (Rewrite<= *-commutative_binary64 (*.f64 y b)) (+.f64 y (+.f64 t x)))) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (Rewrite<= unsub-neg_binary64 (+.f64 (/.f64 (*.f64 y z) (+.f64 y (+.f64 t x))) (neg.f64 (/.f64 (*.f64 y b) (+.f64 y (+.f64 t x)))))) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (+.f64 (/.f64 (*.f64 y z) (+.f64 y (+.f64 t x))) (Rewrite<= mul-1-neg_binary64 (*.f64 -1 (/.f64 (*.f64 y b) (+.f64 y (+.f64 t x)))))) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (Rewrite<= +-commutative_binary64 (+.f64 (*.f64 -1 (/.f64 (*.f64 y b) (+.f64 y (+.f64 t x)))) (/.f64 (*.f64 y z) (+.f64 y (+.f64 t x))))) (*.f64 (/.f64 z (+.f64 x (+.f64 t y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (+.f64 (*.f64 -1 (/.f64 (*.f64 y b) (+.f64 y (+.f64 t x)))) (/.f64 (*.f64 y z) (+.f64 y (+.f64 t x)))) (*.f64 (/.f64 z (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 x t) y))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (+.f64 (*.f64 -1 (/.f64 (*.f64 y b) (+.f64 y (+.f64 t x)))) (/.f64 (*.f64 y z) (+.f64 y (+.f64 t x)))) (*.f64 (/.f64 z (+.f64 (Rewrite<= +-commutative_binary64 (+.f64 t x)) y)) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (+.f64 (*.f64 -1 (/.f64 (*.f64 y b) (+.f64 y (+.f64 t x)))) (/.f64 (*.f64 y z) (+.f64 y (+.f64 t x)))) (*.f64 (/.f64 z (Rewrite<= +-commutative_binary64 (+.f64 y (+.f64 t x)))) x))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (+.f64 (*.f64 -1 (/.f64 (*.f64 y b) (+.f64 y (+.f64 t x)))) (/.f64 (*.f64 y z) (+.f64 y (+.f64 t x)))) (Rewrite<= associate-/r/_binary64 (/.f64 z (/.f64 (+.f64 y (+.f64 t x)) x))))): 4 points increase in error, 14 points decrease in error
(+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (+.f64 (*.f64 -1 (/.f64 (*.f64 y b) (+.f64 y (+.f64 t x)))) (/.f64 (*.f64 y z) (+.f64 y (+.f64 t x)))) (Rewrite<= associate-/l*_binary64 (/.f64 (*.f64 z x) (+.f64 y (+.f64 t x)))))): 27 points increase in error, 3 points decrease in error
(Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (*.f64 -1 (/.f64 (*.f64 y b) (+.f64 y (+.f64 t x)))) (/.f64 (*.f64 y z) (+.f64 y (+.f64 t x))))) (/.f64 (*.f64 z x) (+.f64 y (+.f64 t x))))): 0 points increase in error, 0 points decrease in error
(Rewrite<= +-commutative_binary64 (+.f64 (/.f64 (*.f64 z x) (+.f64 y (+.f64 t x))) (+.f64 (/.f64 (*.f64 a (+.f64 y t)) (+.f64 y (+.f64 t x))) (+.f64 (*.f64 -1 (/.f64 (*.f64 y b) (+.f64 y (+.f64 t x)))) (/.f64 (*.f64 y z) (+.f64 y (+.f64 t x))))))): 0 points increase in error, 0 points decrease in error