Simplified62.8
\[\leadsto \color{blue}{\frac{\mathsf{fma}\left(y, z - b, \mathsf{fma}\left(y + t, a, x \cdot z\right)\right)}{x + \left(y + t\right)}}
\]
Proof
(/.f64 (fma.f64 y (-.f64 z b) (fma.f64 (+.f64 y t) a (*.f64 x z))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 y (-.f64 z b) (fma.f64 (Rewrite<= +-commutative_binary64 (+.f64 t y)) a (*.f64 x z))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 y (-.f64 z b) (fma.f64 (+.f64 t y) a (Rewrite<= *-commutative_binary64 (*.f64 z x)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 y (-.f64 z b) (Rewrite<= fma-def_binary64 (+.f64 (*.f64 (+.f64 t y) a) (*.f64 z x)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (fma.f64 y (-.f64 z b) (Rewrite<= +-commutative_binary64 (+.f64 (*.f64 z x) (*.f64 (+.f64 t y) a)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (Rewrite<= fma-def_binary64 (+.f64 (*.f64 y (-.f64 z b)) (+.f64 (*.f64 z x) (*.f64 (+.f64 t y) a)))) (+.f64 x (+.f64 y t))): 2 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (Rewrite<= distribute-lft-out--_binary64 (-.f64 (*.f64 y z) (*.f64 y b))) (+.f64 (*.f64 z x) (*.f64 (+.f64 t y) a))) (+.f64 x (+.f64 y t))): 1 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (Rewrite<= unsub-neg_binary64 (+.f64 (*.f64 y z) (neg.f64 (*.f64 y b)))) (+.f64 (*.f64 z x) (*.f64 (+.f64 t y) a))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (Rewrite=> +-commutative_binary64 (+.f64 (neg.f64 (*.f64 y b)) (*.f64 y z))) (+.f64 (*.f64 z x) (*.f64 (+.f64 t y) a))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (Rewrite<= associate-+r+_binary64 (+.f64 (neg.f64 (*.f64 y b)) (+.f64 (*.f64 y z) (+.f64 (*.f64 z x) (*.f64 (+.f64 t y) a))))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (neg.f64 (*.f64 y b)) (+.f64 (*.f64 y z) (+.f64 (Rewrite=> *-commutative_binary64 (*.f64 x z)) (*.f64 (+.f64 t y) a)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (neg.f64 (*.f64 y b)) (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 (*.f64 y z) (*.f64 x z)) (*.f64 (+.f64 t y) a)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (neg.f64 (*.f64 y b)) (+.f64 (Rewrite<= distribute-rgt-in_binary64 (*.f64 z (+.f64 y x))) (*.f64 (+.f64 t y) a))) (+.f64 x (+.f64 y t))): 0 points increase in error, 1 points decrease in error
(/.f64 (+.f64 (neg.f64 (*.f64 y b)) (+.f64 (*.f64 z (Rewrite<= +-commutative_binary64 (+.f64 x y))) (*.f64 (+.f64 t y) a))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (+.f64 (neg.f64 (*.f64 y b)) (+.f64 (Rewrite<= *-commutative_binary64 (*.f64 (+.f64 x y) z)) (*.f64 (+.f64 t y) a))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (Rewrite<= +-commutative_binary64 (+.f64 (+.f64 (*.f64 (+.f64 x y) z) (*.f64 (+.f64 t y) a)) (neg.f64 (*.f64 y b)))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (Rewrite<= sub-neg_binary64 (-.f64 (+.f64 (*.f64 (+.f64 x y) z) (*.f64 (+.f64 t y) a)) (*.f64 y b))) (+.f64 x (+.f64 y t))): 0 points increase in error, 0 points decrease in error
(/.f64 (-.f64 (+.f64 (*.f64 (+.f64 x y) z) (*.f64 (+.f64 t y) a)) (*.f64 y b)) (+.f64 x (Rewrite<= +-commutative_binary64 (+.f64 t y)))): 0 points increase in error, 0 points decrease in error
(/.f64 (-.f64 (+.f64 (*.f64 (+.f64 x y) z) (*.f64 (+.f64 t y) a)) (*.f64 y b)) (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 x t) y))): 0 points increase in error, 0 points decrease in error
Simplified0.8
\[\leadsto \color{blue}{\frac{z - b}{x + \left(t + y\right)} \cdot y + \mathsf{fma}\left(a, \frac{y}{x + \left(t + y\right)} + \frac{t}{x + \left(t + y\right)}, \frac{x}{x + \left(t + y\right)} \cdot z\right)}
\]
Proof
(+.f64 (*.f64 (/.f64 (-.f64 z b) (+.f64 x (+.f64 t y))) y) (fma.f64 a (+.f64 (/.f64 y (+.f64 x (+.f64 t y))) (/.f64 t (+.f64 x (+.f64 t y)))) (*.f64 (/.f64 x (+.f64 x (+.f64 t y))) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (*.f64 (/.f64 (-.f64 z b) (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 x t) y))) y) (fma.f64 a (+.f64 (/.f64 y (+.f64 x (+.f64 t y))) (/.f64 t (+.f64 x (+.f64 t y)))) (*.f64 (/.f64 x (+.f64 x (+.f64 t y))) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (*.f64 (/.f64 (-.f64 z b) (+.f64 (Rewrite<= +-commutative_binary64 (+.f64 t x)) y)) y) (fma.f64 a (+.f64 (/.f64 y (+.f64 x (+.f64 t y))) (/.f64 t (+.f64 x (+.f64 t y)))) (*.f64 (/.f64 x (+.f64 x (+.f64 t y))) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (*.f64 (/.f64 (-.f64 z b) (Rewrite<= +-commutative_binary64 (+.f64 y (+.f64 t x)))) y) (fma.f64 a (+.f64 (/.f64 y (+.f64 x (+.f64 t y))) (/.f64 t (+.f64 x (+.f64 t y)))) (*.f64 (/.f64 x (+.f64 x (+.f64 t y))) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (Rewrite<= associate-/r/_binary64 (/.f64 (-.f64 z b) (/.f64 (+.f64 y (+.f64 t x)) y))) (fma.f64 a (+.f64 (/.f64 y (+.f64 x (+.f64 t y))) (/.f64 t (+.f64 x (+.f64 t y)))) (*.f64 (/.f64 x (+.f64 x (+.f64 t y))) z))): 10 points increase in error, 16 points decrease in error
(+.f64 (Rewrite<= associate-/l*_binary64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x)))) (fma.f64 a (+.f64 (/.f64 y (+.f64 x (+.f64 t y))) (/.f64 t (+.f64 x (+.f64 t y)))) (*.f64 (/.f64 x (+.f64 x (+.f64 t y))) z))): 49 points increase in error, 12 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (fma.f64 a (+.f64 (/.f64 y (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 x t) y))) (/.f64 t (+.f64 x (+.f64 t y)))) (*.f64 (/.f64 x (+.f64 x (+.f64 t y))) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (fma.f64 a (+.f64 (/.f64 y (+.f64 (Rewrite<= +-commutative_binary64 (+.f64 t x)) y)) (/.f64 t (+.f64 x (+.f64 t y)))) (*.f64 (/.f64 x (+.f64 x (+.f64 t y))) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (fma.f64 a (+.f64 (/.f64 y (Rewrite<= +-commutative_binary64 (+.f64 y (+.f64 t x)))) (/.f64 t (+.f64 x (+.f64 t y)))) (*.f64 (/.f64 x (+.f64 x (+.f64 t y))) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (fma.f64 a (+.f64 (/.f64 y (+.f64 y (+.f64 t x))) (/.f64 t (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 x t) y)))) (*.f64 (/.f64 x (+.f64 x (+.f64 t y))) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (fma.f64 a (+.f64 (/.f64 y (+.f64 y (+.f64 t x))) (/.f64 t (+.f64 (Rewrite<= +-commutative_binary64 (+.f64 t x)) y))) (*.f64 (/.f64 x (+.f64 x (+.f64 t y))) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (fma.f64 a (+.f64 (/.f64 y (+.f64 y (+.f64 t x))) (/.f64 t (Rewrite<= +-commutative_binary64 (+.f64 y (+.f64 t x))))) (*.f64 (/.f64 x (+.f64 x (+.f64 t y))) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (fma.f64 a (+.f64 (/.f64 y (+.f64 y (+.f64 t x))) (/.f64 t (+.f64 y (+.f64 t x)))) (*.f64 (/.f64 x (Rewrite<= associate-+l+_binary64 (+.f64 (+.f64 x t) y))) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (fma.f64 a (+.f64 (/.f64 y (+.f64 y (+.f64 t x))) (/.f64 t (+.f64 y (+.f64 t x)))) (*.f64 (/.f64 x (+.f64 (Rewrite<= +-commutative_binary64 (+.f64 t x)) y)) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (fma.f64 a (+.f64 (/.f64 y (+.f64 y (+.f64 t x))) (/.f64 t (+.f64 y (+.f64 t x)))) (*.f64 (/.f64 x (Rewrite<= +-commutative_binary64 (+.f64 y (+.f64 t x)))) z))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (fma.f64 a (+.f64 (/.f64 y (+.f64 y (+.f64 t x))) (/.f64 t (+.f64 y (+.f64 t x)))) (Rewrite<= associate-/r/_binary64 (/.f64 x (/.f64 (+.f64 y (+.f64 t x)) z))))): 16 points increase in error, 5 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (fma.f64 a (+.f64 (/.f64 y (+.f64 y (+.f64 t x))) (/.f64 t (+.f64 y (+.f64 t x)))) (Rewrite<= associate-/l*_binary64 (/.f64 (*.f64 x z) (+.f64 y (+.f64 t x)))))): 28 points increase in error, 14 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (fma.f64 a (+.f64 (/.f64 y (+.f64 y (+.f64 t x))) (/.f64 t (+.f64 y (+.f64 t x)))) (/.f64 (Rewrite<= *-commutative_binary64 (*.f64 z x)) (+.f64 y (+.f64 t x))))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (Rewrite<= fma-def_binary64 (+.f64 (*.f64 a (+.f64 (/.f64 y (+.f64 y (+.f64 t x))) (/.f64 t (+.f64 y (+.f64 t x))))) (/.f64 (*.f64 z x) (+.f64 y (+.f64 t x)))))): 0 points increase in error, 0 points decrease in error
(+.f64 (/.f64 (*.f64 (-.f64 z b) y) (+.f64 y (+.f64 t x))) (Rewrite<= +-commutative_binary64 (+.f64 (/.f64 (*.f64 z x) (+.f64 y (+.f64 t x))) (*.f64 a (+.f64 (/.f64 y (+.f64 y (+.f64 t x))) (/.f64 t (+.f64 y (+.f64 t x)))))))): 0 points increase in error, 0 points decrease in error