| Alternative 1 | |
|---|---|
| Accuracy | 99.2% |
| Cost | 13632 |
\[\left(\left(x \cdot \log y - \log y\right) - y \cdot \left(z + -1\right)\right) - t
\]
(FPCore (x y z t) :precision binary64 (- (+ (* (- x 1.0) (log y)) (* (- z 1.0) (log (- 1.0 y)))) t))
(FPCore (x y z t) :precision binary64 (- (fma (+ x -1.0) (log y) (* (log1p (- y)) (+ z -1.0))) t))
double code(double x, double y, double z, double t) {
return (((x - 1.0) * log(y)) + ((z - 1.0) * log((1.0 - y)))) - t;
}
double code(double x, double y, double z, double t) {
return fma((x + -1.0), log(y), (log1p(-y) * (z + -1.0))) - t;
}
function code(x, y, z, t) return Float64(Float64(Float64(Float64(x - 1.0) * log(y)) + Float64(Float64(z - 1.0) * log(Float64(1.0 - y)))) - t) end
function code(x, y, z, t) return Float64(fma(Float64(x + -1.0), log(y), Float64(log1p(Float64(-y)) * Float64(z + -1.0))) - t) end
code[x_, y_, z_, t_] := N[(N[(N[(N[(x - 1.0), $MachinePrecision] * N[Log[y], $MachinePrecision]), $MachinePrecision] + N[(N[(z - 1.0), $MachinePrecision] * N[Log[N[(1.0 - y), $MachinePrecision]], $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]
code[x_, y_, z_, t_] := N[(N[(N[(x + -1.0), $MachinePrecision] * N[Log[y], $MachinePrecision] + N[(N[Log[1 + (-y)], $MachinePrecision] * N[(z + -1.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] - t), $MachinePrecision]
\left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t
\mathsf{fma}\left(x + -1, \log y, \mathsf{log1p}\left(-y\right) \cdot \left(z + -1\right)\right) - t
Initial program 91.4%
Simplified99.8%
[Start]91.4 | \[ \left(\left(x - 1\right) \cdot \log y + \left(z - 1\right) \cdot \log \left(1 - y\right)\right) - t
\] |
|---|---|
cancel-sign-sub [<=]91.4 | \[ \color{blue}{\left(\left(x - 1\right) \cdot \log y - \left(-\left(z - 1\right)\right) \cdot \log \left(1 - y\right)\right)} - t
\] |
distribute-lft-neg-in [<=]91.4 | \[ \left(\left(x - 1\right) \cdot \log y - \color{blue}{\left(-\left(z - 1\right) \cdot \log \left(1 - y\right)\right)}\right) - t
\] |
fma-neg [=>]91.4 | \[ \color{blue}{\mathsf{fma}\left(x - 1, \log y, -\left(-\left(z - 1\right) \cdot \log \left(1 - y\right)\right)\right)} - t
\] |
remove-double-neg [=>]91.4 | \[ \mathsf{fma}\left(x - 1, \log y, \color{blue}{\left(z - 1\right) \cdot \log \left(1 - y\right)}\right) - t
\] |
sub-neg [=>]91.4 | \[ \mathsf{fma}\left(x - 1, \log y, \left(z - 1\right) \cdot \log \color{blue}{\left(1 + \left(-y\right)\right)}\right) - t
\] |
log1p-def [=>]99.8 | \[ \mathsf{fma}\left(x - 1, \log y, \left(z - 1\right) \cdot \color{blue}{\mathsf{log1p}\left(-y\right)}\right) - t
\] |
Final simplification99.8%
| Alternative 1 | |
|---|---|
| Accuracy | 99.2% |
| Cost | 13632 |
| Alternative 2 | |
|---|---|
| Accuracy | 88.3% |
| Cost | 7497 |
| Alternative 3 | |
|---|---|
| Accuracy | 80.6% |
| Cost | 7497 |
| Alternative 4 | |
|---|---|
| Accuracy | 99.1% |
| Cost | 7104 |
| Alternative 5 | |
|---|---|
| Accuracy | 70.1% |
| Cost | 6985 |
| Alternative 6 | |
|---|---|
| Accuracy | 44.9% |
| Cost | 6980 |
| Alternative 7 | |
|---|---|
| Accuracy | 36.9% |
| Cost | 6921 |
| Alternative 8 | |
|---|---|
| Accuracy | 36.9% |
| Cost | 6857 |
| Alternative 9 | |
|---|---|
| Accuracy | 45.9% |
| Cost | 704 |
| Alternative 10 | |
|---|---|
| Accuracy | 35.8% |
| Cost | 520 |
| Alternative 11 | |
|---|---|
| Accuracy | 45.9% |
| Cost | 448 |
| Alternative 12 | |
|---|---|
| Accuracy | 45.7% |
| Cost | 384 |
| Alternative 13 | |
|---|---|
| Accuracy | 35.4% |
| Cost | 128 |
herbie shell --seed 2023159
(FPCore (x y z t)
:name "Statistics.Distribution.Beta:$cdensity from math-functions-0.1.5.2"
:precision binary64
(- (+ (* (- x 1.0) (log y)) (* (- z 1.0) (log (- 1.0 y)))) t))