
(FPCore (alpha beta i) :precision binary64 (let* ((t_0 (+ (+ alpha beta) (* 2.0 i)))) (/ (+ (/ (/ (* (+ alpha beta) (- beta alpha)) t_0) (+ t_0 2.0)) 1.0) 2.0)))
double code(double alpha, double beta, double i) {
double t_0 = (alpha + beta) + (2.0 * i);
return (((((alpha + beta) * (beta - alpha)) / t_0) / (t_0 + 2.0)) + 1.0) / 2.0;
}
real(8) function code(alpha, beta, i)
real(8), intent (in) :: alpha
real(8), intent (in) :: beta
real(8), intent (in) :: i
real(8) :: t_0
t_0 = (alpha + beta) + (2.0d0 * i)
code = (((((alpha + beta) * (beta - alpha)) / t_0) / (t_0 + 2.0d0)) + 1.0d0) / 2.0d0
end function
public static double code(double alpha, double beta, double i) {
double t_0 = (alpha + beta) + (2.0 * i);
return (((((alpha + beta) * (beta - alpha)) / t_0) / (t_0 + 2.0)) + 1.0) / 2.0;
}
def code(alpha, beta, i): t_0 = (alpha + beta) + (2.0 * i) return (((((alpha + beta) * (beta - alpha)) / t_0) / (t_0 + 2.0)) + 1.0) / 2.0
function code(alpha, beta, i) t_0 = Float64(Float64(alpha + beta) + Float64(2.0 * i)) return Float64(Float64(Float64(Float64(Float64(Float64(alpha + beta) * Float64(beta - alpha)) / t_0) / Float64(t_0 + 2.0)) + 1.0) / 2.0) end
function tmp = code(alpha, beta, i) t_0 = (alpha + beta) + (2.0 * i); tmp = (((((alpha + beta) * (beta - alpha)) / t_0) / (t_0 + 2.0)) + 1.0) / 2.0; end
code[alpha_, beta_, i_] := Block[{t$95$0 = N[(N[(alpha + beta), $MachinePrecision] + N[(2.0 * i), $MachinePrecision]), $MachinePrecision]}, N[(N[(N[(N[(N[(N[(alpha + beta), $MachinePrecision] * N[(beta - alpha), $MachinePrecision]), $MachinePrecision] / t$95$0), $MachinePrecision] / N[(t$95$0 + 2.0), $MachinePrecision]), $MachinePrecision] + 1.0), $MachinePrecision] / 2.0), $MachinePrecision]]
\begin{array}{l}
\\
\begin{array}{l}
t_0 := \left(\alpha + \beta\right) + 2 \cdot i\\
\frac{\frac{\frac{\left(\alpha + \beta\right) \cdot \left(\beta - \alpha\right)}{t\_0}}{t\_0 + 2} + 1}{2}
\end{array}
\end{array}
Sampling outcomes in binary64 precision:
Herbie found 1 alternatives:
| Alternative | Accuracy | Speedup |
|---|
(FPCore (alpha beta i) :precision binary64 (let* ((t_0 (+ (+ alpha beta) (* 2.0 i)))) (/ (+ (/ (/ (* (+ alpha beta) (- beta alpha)) t_0) (+ t_0 2.0)) 1.0) 2.0)))
double code(double alpha, double beta, double i) {
double t_0 = (alpha + beta) + (2.0 * i);
return (((((alpha + beta) * (beta - alpha)) / t_0) / (t_0 + 2.0)) + 1.0) / 2.0;
}
real(8) function code(alpha, beta, i)
real(8), intent (in) :: alpha
real(8), intent (in) :: beta
real(8), intent (in) :: i
real(8) :: t_0
t_0 = (alpha + beta) + (2.0d0 * i)
code = (((((alpha + beta) * (beta - alpha)) / t_0) / (t_0 + 2.0d0)) + 1.0d0) / 2.0d0
end function
public static double code(double alpha, double beta, double i) {
double t_0 = (alpha + beta) + (2.0 * i);
return (((((alpha + beta) * (beta - alpha)) / t_0) / (t_0 + 2.0)) + 1.0) / 2.0;
}
def code(alpha, beta, i): t_0 = (alpha + beta) + (2.0 * i) return (((((alpha + beta) * (beta - alpha)) / t_0) / (t_0 + 2.0)) + 1.0) / 2.0
function code(alpha, beta, i) t_0 = Float64(Float64(alpha + beta) + Float64(2.0 * i)) return Float64(Float64(Float64(Float64(Float64(Float64(alpha + beta) * Float64(beta - alpha)) / t_0) / Float64(t_0 + 2.0)) + 1.0) / 2.0) end
function tmp = code(alpha, beta, i) t_0 = (alpha + beta) + (2.0 * i); tmp = (((((alpha + beta) * (beta - alpha)) / t_0) / (t_0 + 2.0)) + 1.0) / 2.0; end
code[alpha_, beta_, i_] := Block[{t$95$0 = N[(N[(alpha + beta), $MachinePrecision] + N[(2.0 * i), $MachinePrecision]), $MachinePrecision]}, N[(N[(N[(N[(N[(N[(alpha + beta), $MachinePrecision] * N[(beta - alpha), $MachinePrecision]), $MachinePrecision] / t$95$0), $MachinePrecision] / N[(t$95$0 + 2.0), $MachinePrecision]), $MachinePrecision] + 1.0), $MachinePrecision] / 2.0), $MachinePrecision]]
\begin{array}{l}
\\
\begin{array}{l}
t_0 := \left(\alpha + \beta\right) + 2 \cdot i\\
\frac{\frac{\frac{\left(\alpha + \beta\right) \cdot \left(\beta - \alpha\right)}{t\_0}}{t\_0 + 2} + 1}{2}
\end{array}
\end{array}
(FPCore (alpha beta i) :precision binary64 (let* ((t_0 (+ (* i 2.0) (+ beta alpha)))) (/ (+ 1.0 (/ (/ (* (- beta alpha) (+ beta alpha)) t_0) (+ t_0 2.0))) 2.0)))
double code(double alpha, double beta, double i) {
double t_0 = (i * 2.0) + (beta + alpha);
return (1.0 + ((((beta - alpha) * (beta + alpha)) / t_0) / (t_0 + 2.0))) / 2.0;
}
real(8) function code(alpha, beta, i)
real(8), intent (in) :: alpha
real(8), intent (in) :: beta
real(8), intent (in) :: i
real(8) :: t_0
t_0 = (i * 2.0d0) + (beta + alpha)
code = (1.0d0 + ((((beta - alpha) * (beta + alpha)) / t_0) / (t_0 + 2.0d0))) / 2.0d0
end function
public static double code(double alpha, double beta, double i) {
double t_0 = (i * 2.0) + (beta + alpha);
return (1.0 + ((((beta - alpha) * (beta + alpha)) / t_0) / (t_0 + 2.0))) / 2.0;
}
def code(alpha, beta, i): t_0 = (i * 2.0) + (beta + alpha) return (1.0 + ((((beta - alpha) * (beta + alpha)) / t_0) / (t_0 + 2.0))) / 2.0
function code(alpha, beta, i) t_0 = Float64(Float64(i * 2.0) + Float64(beta + alpha)) return Float64(Float64(1.0 + Float64(Float64(Float64(Float64(beta - alpha) * Float64(beta + alpha)) / t_0) / Float64(t_0 + 2.0))) / 2.0) end
function tmp = code(alpha, beta, i) t_0 = (i * 2.0) + (beta + alpha); tmp = (1.0 + ((((beta - alpha) * (beta + alpha)) / t_0) / (t_0 + 2.0))) / 2.0; end
code[alpha_, beta_, i_] := Block[{t$95$0 = N[(N[(i * 2.0), $MachinePrecision] + N[(beta + alpha), $MachinePrecision]), $MachinePrecision]}, N[(N[(1.0 + N[(N[(N[(N[(beta - alpha), $MachinePrecision] * N[(beta + alpha), $MachinePrecision]), $MachinePrecision] / t$95$0), $MachinePrecision] / N[(t$95$0 + 2.0), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] / 2.0), $MachinePrecision]]
\begin{array}{l}
\\
\begin{array}{l}
t_0 := i \cdot 2 + \left(\beta + \alpha\right)\\
\frac{1 + \frac{\frac{\left(\beta - \alpha\right) \cdot \left(\beta + \alpha\right)}{t\_0}}{t\_0 + 2}}{2}
\end{array}
\end{array}
Initial program 58.5%
Final simplification58.5%
herbie shell --seed 2024337
(FPCore (alpha beta i)
:name "Octave 3.8, jcobi/2"
:precision binary64
:pre (and (and (> alpha -1.0) (> beta -1.0)) (> i 0.0))
(/ (+ (/ (/ (* (+ alpha beta) (- beta alpha)) (+ (+ alpha beta) (* 2.0 i))) (+ (+ (+ alpha beta) (* 2.0 i)) 2.0)) 1.0) 2.0))