Maksimov and Kolovsky, Equation (4)

Percentage Accurate: 86.5% → 100.0%
Time: 4.9s
Alternatives: 13
Speedup: 1.2×

Specification

?
\[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
(FPCore (J l K U)
 :precision binary64
 (+ (* (* J (- (exp l) (exp (- l)))) (cos (/ K 2.0))) U))
double code(double J, double l, double K, double U) {
	return ((J * (exp(l) - exp(-l))) * cos((K / 2.0))) + U;
}
module fmin_fmax_functions
    implicit none
    private
    public fmax
    public fmin

    interface fmax
        module procedure fmax88
        module procedure fmax44
        module procedure fmax84
        module procedure fmax48
    end interface
    interface fmin
        module procedure fmin88
        module procedure fmin44
        module procedure fmin84
        module procedure fmin48
    end interface
contains
    real(8) function fmax88(x, y) result (res)
        real(8), intent (in) :: x
        real(8), intent (in) :: y
        res = merge(y, merge(x, max(x, y), y /= y), x /= x)
    end function
    real(4) function fmax44(x, y) result (res)
        real(4), intent (in) :: x
        real(4), intent (in) :: y
        res = merge(y, merge(x, max(x, y), y /= y), x /= x)
    end function
    real(8) function fmax84(x, y) result(res)
        real(8), intent (in) :: x
        real(4), intent (in) :: y
        res = merge(dble(y), merge(x, max(x, dble(y)), y /= y), x /= x)
    end function
    real(8) function fmax48(x, y) result(res)
        real(4), intent (in) :: x
        real(8), intent (in) :: y
        res = merge(y, merge(dble(x), max(dble(x), y), y /= y), x /= x)
    end function
    real(8) function fmin88(x, y) result (res)
        real(8), intent (in) :: x
        real(8), intent (in) :: y
        res = merge(y, merge(x, min(x, y), y /= y), x /= x)
    end function
    real(4) function fmin44(x, y) result (res)
        real(4), intent (in) :: x
        real(4), intent (in) :: y
        res = merge(y, merge(x, min(x, y), y /= y), x /= x)
    end function
    real(8) function fmin84(x, y) result(res)
        real(8), intent (in) :: x
        real(4), intent (in) :: y
        res = merge(dble(y), merge(x, min(x, dble(y)), y /= y), x /= x)
    end function
    real(8) function fmin48(x, y) result(res)
        real(4), intent (in) :: x
        real(8), intent (in) :: y
        res = merge(y, merge(dble(x), min(dble(x), y), y /= y), x /= x)
    end function
end module

real(8) function code(j, l, k, u)
use fmin_fmax_functions
    real(8), intent (in) :: j
    real(8), intent (in) :: l
    real(8), intent (in) :: k
    real(8), intent (in) :: u
    code = ((j * (exp(l) - exp(-l))) * cos((k / 2.0d0))) + u
end function
public static double code(double J, double l, double K, double U) {
	return ((J * (Math.exp(l) - Math.exp(-l))) * Math.cos((K / 2.0))) + U;
}
def code(J, l, K, U):
	return ((J * (math.exp(l) - math.exp(-l))) * math.cos((K / 2.0))) + U
function code(J, l, K, U)
	return Float64(Float64(Float64(J * Float64(exp(l) - exp(Float64(-l)))) * cos(Float64(K / 2.0))) + U)
end
function tmp = code(J, l, K, U)
	tmp = ((J * (exp(l) - exp(-l))) * cos((K / 2.0))) + U;
end
code[J_, l_, K_, U_] := N[(N[(N[(J * N[(N[Exp[l], $MachinePrecision] - N[Exp[(-l)], $MachinePrecision]), $MachinePrecision]), $MachinePrecision] * N[Cos[N[(K / 2.0), $MachinePrecision]], $MachinePrecision]), $MachinePrecision] + U), $MachinePrecision]
\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U

Local Percentage Accuracy vs ?

The average percentage accuracy by input value. Horizontal axis shows value of an input variable; the variable is choosen in the title. Vertical axis is accuracy; higher is better. Red represent the original program, while blue represents Herbie's suggestion. These can be toggled with buttons below the plot. The line is an average while dots represent individual samples.

Accuracy vs Speed?

Herbie found 13 alternatives:

AlternativeAccuracySpeedup
The accuracy (vertical axis) and speed (horizontal axis) of each alternatives. Up and to the right is better. The red square shows the initial program, and each blue circle shows an alternative.The line shows the best available speed-accuracy tradeoffs.

Initial Program: 86.5% accurate, 1.0× speedup?

\[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
(FPCore (J l K U)
 :precision binary64
 (+ (* (* J (- (exp l) (exp (- l)))) (cos (/ K 2.0))) U))
double code(double J, double l, double K, double U) {
	return ((J * (exp(l) - exp(-l))) * cos((K / 2.0))) + U;
}
module fmin_fmax_functions
    implicit none
    private
    public fmax
    public fmin

    interface fmax
        module procedure fmax88
        module procedure fmax44
        module procedure fmax84
        module procedure fmax48
    end interface
    interface fmin
        module procedure fmin88
        module procedure fmin44
        module procedure fmin84
        module procedure fmin48
    end interface
contains
    real(8) function fmax88(x, y) result (res)
        real(8), intent (in) :: x
        real(8), intent (in) :: y
        res = merge(y, merge(x, max(x, y), y /= y), x /= x)
    end function
    real(4) function fmax44(x, y) result (res)
        real(4), intent (in) :: x
        real(4), intent (in) :: y
        res = merge(y, merge(x, max(x, y), y /= y), x /= x)
    end function
    real(8) function fmax84(x, y) result(res)
        real(8), intent (in) :: x
        real(4), intent (in) :: y
        res = merge(dble(y), merge(x, max(x, dble(y)), y /= y), x /= x)
    end function
    real(8) function fmax48(x, y) result(res)
        real(4), intent (in) :: x
        real(8), intent (in) :: y
        res = merge(y, merge(dble(x), max(dble(x), y), y /= y), x /= x)
    end function
    real(8) function fmin88(x, y) result (res)
        real(8), intent (in) :: x
        real(8), intent (in) :: y
        res = merge(y, merge(x, min(x, y), y /= y), x /= x)
    end function
    real(4) function fmin44(x, y) result (res)
        real(4), intent (in) :: x
        real(4), intent (in) :: y
        res = merge(y, merge(x, min(x, y), y /= y), x /= x)
    end function
    real(8) function fmin84(x, y) result(res)
        real(8), intent (in) :: x
        real(4), intent (in) :: y
        res = merge(dble(y), merge(x, min(x, dble(y)), y /= y), x /= x)
    end function
    real(8) function fmin48(x, y) result(res)
        real(4), intent (in) :: x
        real(8), intent (in) :: y
        res = merge(y, merge(dble(x), min(dble(x), y), y /= y), x /= x)
    end function
end module

real(8) function code(j, l, k, u)
use fmin_fmax_functions
    real(8), intent (in) :: j
    real(8), intent (in) :: l
    real(8), intent (in) :: k
    real(8), intent (in) :: u
    code = ((j * (exp(l) - exp(-l))) * cos((k / 2.0d0))) + u
end function
public static double code(double J, double l, double K, double U) {
	return ((J * (Math.exp(l) - Math.exp(-l))) * Math.cos((K / 2.0))) + U;
}
def code(J, l, K, U):
	return ((J * (math.exp(l) - math.exp(-l))) * math.cos((K / 2.0))) + U
function code(J, l, K, U)
	return Float64(Float64(Float64(J * Float64(exp(l) - exp(Float64(-l)))) * cos(Float64(K / 2.0))) + U)
end
function tmp = code(J, l, K, U)
	tmp = ((J * (exp(l) - exp(-l))) * cos((K / 2.0))) + U;
end
code[J_, l_, K_, U_] := N[(N[(N[(J * N[(N[Exp[l], $MachinePrecision] - N[Exp[(-l)], $MachinePrecision]), $MachinePrecision]), $MachinePrecision] * N[Cos[N[(K / 2.0), $MachinePrecision]], $MachinePrecision]), $MachinePrecision] + U), $MachinePrecision]
\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U

Alternative 1: 100.0% accurate, 1.2× speedup?

\[\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \cos \left(0.5 \cdot K\right), U\right) \]
(FPCore (J l K U)
 :precision binary64
 (fma (* (+ J J) (sinh l)) (cos (* 0.5 K)) U))
double code(double J, double l, double K, double U) {
	return fma(((J + J) * sinh(l)), cos((0.5 * K)), U);
}
function code(J, l, K, U)
	return fma(Float64(Float64(J + J) * sinh(l)), cos(Float64(0.5 * K)), U)
end
code[J_, l_, K_, U_] := N[(N[(N[(J + J), $MachinePrecision] * N[Sinh[l], $MachinePrecision]), $MachinePrecision] * N[Cos[N[(0.5 * K), $MachinePrecision]], $MachinePrecision] + U), $MachinePrecision]
\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \cos \left(0.5 \cdot K\right), U\right)
Derivation
  1. Initial program 86.5%

    \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
  2. Step-by-step derivation
    1. lift-+.f64N/A

      \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U} \]
    2. lift-*.f64N/A

      \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right)} + U \]
    3. *-commutativeN/A

      \[\leadsto \color{blue}{\cos \left(\frac{K}{2}\right) \cdot \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
    4. lift-*.f64N/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
    5. *-commutativeN/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(\left(e^{\ell} - e^{-\ell}\right) \cdot J\right)} + U \]
    6. lift--.f64N/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(e^{\ell} - e^{-\ell}\right)} \cdot J\right) + U \]
    7. lift-exp.f64N/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(\color{blue}{e^{\ell}} - e^{-\ell}\right) \cdot J\right) + U \]
    8. lift-exp.f64N/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - \color{blue}{e^{-\ell}}\right) \cdot J\right) + U \]
    9. lift-neg.f64N/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \cdot J\right) + U \]
    10. sinh-undefN/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(2 \cdot \sinh \ell\right)} \cdot J\right) + U \]
    11. associate-*l*N/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(2 \cdot \left(\sinh \ell \cdot J\right)\right)} + U \]
    12. associate-*r*N/A

      \[\leadsto \color{blue}{\left(\cos \left(\frac{K}{2}\right) \cdot 2\right) \cdot \left(\sinh \ell \cdot J\right)} + U \]
    13. lower-fma.f64N/A

      \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(\frac{K}{2}\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
  3. Applied rewrites100.0%

    \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
  4. Step-by-step derivation
    1. lift-fma.f64N/A

      \[\leadsto \color{blue}{\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \left(\sinh \ell \cdot J\right) + U} \]
    2. lift-*.f64N/A

      \[\leadsto \left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \color{blue}{\left(\sinh \ell \cdot J\right)} + U \]
    3. associate-*r*N/A

      \[\leadsto \color{blue}{\left(\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \sinh \ell\right) \cdot J} + U \]
  5. Applied rewrites100.0%

    \[\leadsto \color{blue}{\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \cos \left(0.5 \cdot K\right), U\right)} \]
  6. Add Preprocessing

Alternative 2: 100.0% accurate, 1.2× speedup?

\[\mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot \left(J + J\right), \sinh \ell, U\right) \]
(FPCore (J l K U)
 :precision binary64
 (fma (* (cos (* -0.5 K)) (+ J J)) (sinh l) U))
double code(double J, double l, double K, double U) {
	return fma((cos((-0.5 * K)) * (J + J)), sinh(l), U);
}
function code(J, l, K, U)
	return fma(Float64(cos(Float64(-0.5 * K)) * Float64(J + J)), sinh(l), U)
end
code[J_, l_, K_, U_] := N[(N[(N[Cos[N[(-0.5 * K), $MachinePrecision]], $MachinePrecision] * N[(J + J), $MachinePrecision]), $MachinePrecision] * N[Sinh[l], $MachinePrecision] + U), $MachinePrecision]
\mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot \left(J + J\right), \sinh \ell, U\right)
Derivation
  1. Initial program 86.5%

    \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
  2. Step-by-step derivation
    1. lift-+.f64N/A

      \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U} \]
    2. lift-*.f64N/A

      \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right)} + U \]
    3. *-commutativeN/A

      \[\leadsto \color{blue}{\cos \left(\frac{K}{2}\right) \cdot \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
    4. lift-*.f64N/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
    5. *-commutativeN/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(\left(e^{\ell} - e^{-\ell}\right) \cdot J\right)} + U \]
    6. lift--.f64N/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(e^{\ell} - e^{-\ell}\right)} \cdot J\right) + U \]
    7. lift-exp.f64N/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(\color{blue}{e^{\ell}} - e^{-\ell}\right) \cdot J\right) + U \]
    8. lift-exp.f64N/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - \color{blue}{e^{-\ell}}\right) \cdot J\right) + U \]
    9. lift-neg.f64N/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \cdot J\right) + U \]
    10. sinh-undefN/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(2 \cdot \sinh \ell\right)} \cdot J\right) + U \]
    11. associate-*l*N/A

      \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(2 \cdot \left(\sinh \ell \cdot J\right)\right)} + U \]
    12. associate-*r*N/A

      \[\leadsto \color{blue}{\left(\cos \left(\frac{K}{2}\right) \cdot 2\right) \cdot \left(\sinh \ell \cdot J\right)} + U \]
    13. lower-fma.f64N/A

      \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(\frac{K}{2}\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
  3. Applied rewrites100.0%

    \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
  4. Step-by-step derivation
    1. lift-fma.f64N/A

      \[\leadsto \color{blue}{\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \left(\sinh \ell \cdot J\right) + U} \]
    2. lift-*.f64N/A

      \[\leadsto \left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \color{blue}{\left(\sinh \ell \cdot J\right)} + U \]
    3. associate-*r*N/A

      \[\leadsto \color{blue}{\left(\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \sinh \ell\right) \cdot J} + U \]
  5. Applied rewrites100.0%

    \[\leadsto \color{blue}{\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \cos \left(0.5 \cdot K\right), U\right)} \]
  6. Step-by-step derivation
    1. lift-fma.f64N/A

      \[\leadsto \color{blue}{\left(\left(J + J\right) \cdot \sinh \ell\right) \cdot \cos \left(\frac{1}{2} \cdot K\right) + U} \]
    2. *-commutativeN/A

      \[\leadsto \color{blue}{\cos \left(\frac{1}{2} \cdot K\right) \cdot \left(\left(J + J\right) \cdot \sinh \ell\right)} + U \]
    3. lift-*.f64N/A

      \[\leadsto \cos \left(\frac{1}{2} \cdot K\right) \cdot \color{blue}{\left(\left(J + J\right) \cdot \sinh \ell\right)} + U \]
    4. associate-*r*N/A

      \[\leadsto \color{blue}{\left(\cos \left(\frac{1}{2} \cdot K\right) \cdot \left(J + J\right)\right) \cdot \sinh \ell} + U \]
    5. lower-fma.f64N/A

      \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(\frac{1}{2} \cdot K\right) \cdot \left(J + J\right), \sinh \ell, U\right)} \]
    6. lift-cos.f64N/A

      \[\leadsto \mathsf{fma}\left(\color{blue}{\cos \left(\frac{1}{2} \cdot K\right)} \cdot \left(J + J\right), \sinh \ell, U\right) \]
    7. lift-*.f64N/A

      \[\leadsto \mathsf{fma}\left(\cos \color{blue}{\left(\frac{1}{2} \cdot K\right)} \cdot \left(J + J\right), \sinh \ell, U\right) \]
    8. metadata-evalN/A

      \[\leadsto \mathsf{fma}\left(\cos \left(\color{blue}{\left(\mathsf{neg}\left(\frac{-1}{2}\right)\right)} \cdot K\right) \cdot \left(J + J\right), \sinh \ell, U\right) \]
    9. distribute-lft-neg-inN/A

      \[\leadsto \mathsf{fma}\left(\cos \color{blue}{\left(\mathsf{neg}\left(\frac{-1}{2} \cdot K\right)\right)} \cdot \left(J + J\right), \sinh \ell, U\right) \]
    10. lift-*.f64N/A

      \[\leadsto \mathsf{fma}\left(\cos \left(\mathsf{neg}\left(\color{blue}{\frac{-1}{2} \cdot K}\right)\right) \cdot \left(J + J\right), \sinh \ell, U\right) \]
    11. cos-neg-revN/A

      \[\leadsto \mathsf{fma}\left(\color{blue}{\cos \left(\frac{-1}{2} \cdot K\right)} \cdot \left(J + J\right), \sinh \ell, U\right) \]
    12. lift-cos.f64N/A

      \[\leadsto \mathsf{fma}\left(\color{blue}{\cos \left(\frac{-1}{2} \cdot K\right)} \cdot \left(J + J\right), \sinh \ell, U\right) \]
    13. lower-*.f64100.0%

      \[\leadsto \mathsf{fma}\left(\color{blue}{\cos \left(-0.5 \cdot K\right) \cdot \left(J + J\right)}, \sinh \ell, U\right) \]
  7. Applied rewrites100.0%

    \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot \left(J + J\right), \sinh \ell, U\right)} \]
  8. Add Preprocessing

Alternative 3: 88.4% accurate, 0.6× speedup?

\[\begin{array}{l} t_0 := \cos \left(\frac{K}{2}\right)\\ \mathbf{if}\;t\_0 \leq -0.56:\\ \;\;\;\;\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + -0.125 \cdot {K}^{2}\right) + U\\ \mathbf{elif}\;t\_0 \leq 0.02:\\ \;\;\;\;\mathsf{fma}\left(\ell, \cos \left(-0.5 \cdot K\right) \cdot \left(J + J\right), U\right)\\ \mathbf{else}:\\ \;\;\;\;\mathsf{fma}\left(J + J, \sinh \ell, U\right)\\ \end{array} \]
(FPCore (J l K U)
 :precision binary64
 (let* ((t_0 (cos (/ K 2.0))))
   (if (<= t_0 -0.56)
     (+ (* (* J (- (exp l) (exp (- l)))) (+ 1.0 (* -0.125 (pow K 2.0)))) U)
     (if (<= t_0 0.02)
       (fma l (* (cos (* -0.5 K)) (+ J J)) U)
       (fma (+ J J) (sinh l) U)))))
double code(double J, double l, double K, double U) {
	double t_0 = cos((K / 2.0));
	double tmp;
	if (t_0 <= -0.56) {
		tmp = ((J * (exp(l) - exp(-l))) * (1.0 + (-0.125 * pow(K, 2.0)))) + U;
	} else if (t_0 <= 0.02) {
		tmp = fma(l, (cos((-0.5 * K)) * (J + J)), U);
	} else {
		tmp = fma((J + J), sinh(l), U);
	}
	return tmp;
}
function code(J, l, K, U)
	t_0 = cos(Float64(K / 2.0))
	tmp = 0.0
	if (t_0 <= -0.56)
		tmp = Float64(Float64(Float64(J * Float64(exp(l) - exp(Float64(-l)))) * Float64(1.0 + Float64(-0.125 * (K ^ 2.0)))) + U);
	elseif (t_0 <= 0.02)
		tmp = fma(l, Float64(cos(Float64(-0.5 * K)) * Float64(J + J)), U);
	else
		tmp = fma(Float64(J + J), sinh(l), U);
	end
	return tmp
end
code[J_, l_, K_, U_] := Block[{t$95$0 = N[Cos[N[(K / 2.0), $MachinePrecision]], $MachinePrecision]}, If[LessEqual[t$95$0, -0.56], N[(N[(N[(J * N[(N[Exp[l], $MachinePrecision] - N[Exp[(-l)], $MachinePrecision]), $MachinePrecision]), $MachinePrecision] * N[(1.0 + N[(-0.125 * N[Power[K, 2.0], $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision] + U), $MachinePrecision], If[LessEqual[t$95$0, 0.02], N[(l * N[(N[Cos[N[(-0.5 * K), $MachinePrecision]], $MachinePrecision] * N[(J + J), $MachinePrecision]), $MachinePrecision] + U), $MachinePrecision], N[(N[(J + J), $MachinePrecision] * N[Sinh[l], $MachinePrecision] + U), $MachinePrecision]]]]
\begin{array}{l}
t_0 := \cos \left(\frac{K}{2}\right)\\
\mathbf{if}\;t\_0 \leq -0.56:\\
\;\;\;\;\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + -0.125 \cdot {K}^{2}\right) + U\\

\mathbf{elif}\;t\_0 \leq 0.02:\\
\;\;\;\;\mathsf{fma}\left(\ell, \cos \left(-0.5 \cdot K\right) \cdot \left(J + J\right), U\right)\\

\mathbf{else}:\\
\;\;\;\;\mathsf{fma}\left(J + J, \sinh \ell, U\right)\\


\end{array}
Derivation
  1. Split input into 3 regimes
  2. if (cos.f64 (/.f64 K #s(literal 2 binary64))) < -0.56000000000000005

    1. Initial program 86.5%

      \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
    2. Taylor expanded in K around 0

      \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
    3. Step-by-step derivation
      1. lower-+.f64N/A

        \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \color{blue}{\frac{-1}{8} \cdot {K}^{2}}\right) + U \]
      2. lower-*.f64N/A

        \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot \color{blue}{{K}^{2}}\right) + U \]
      3. lower-pow.f6464.8%

        \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + -0.125 \cdot {K}^{\color{blue}{2}}\right) + U \]
    4. Applied rewrites64.8%

      \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + -0.125 \cdot {K}^{2}\right)} + U \]

    if -0.56000000000000005 < (cos.f64 (/.f64 K #s(literal 2 binary64))) < 0.0200000000000000004

    1. Initial program 86.5%

      \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
    2. Step-by-step derivation
      1. lift-+.f64N/A

        \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U} \]
      2. lift-*.f64N/A

        \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right)} + U \]
      3. *-commutativeN/A

        \[\leadsto \color{blue}{\cos \left(\frac{K}{2}\right) \cdot \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
      4. lift-*.f64N/A

        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
      5. *-commutativeN/A

        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(\left(e^{\ell} - e^{-\ell}\right) \cdot J\right)} + U \]
      6. lift--.f64N/A

        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(e^{\ell} - e^{-\ell}\right)} \cdot J\right) + U \]
      7. lift-exp.f64N/A

        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(\color{blue}{e^{\ell}} - e^{-\ell}\right) \cdot J\right) + U \]
      8. lift-exp.f64N/A

        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - \color{blue}{e^{-\ell}}\right) \cdot J\right) + U \]
      9. lift-neg.f64N/A

        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \cdot J\right) + U \]
      10. sinh-undefN/A

        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(2 \cdot \sinh \ell\right)} \cdot J\right) + U \]
      11. associate-*l*N/A

        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(2 \cdot \left(\sinh \ell \cdot J\right)\right)} + U \]
      12. associate-*r*N/A

        \[\leadsto \color{blue}{\left(\cos \left(\frac{K}{2}\right) \cdot 2\right) \cdot \left(\sinh \ell \cdot J\right)} + U \]
      13. lower-fma.f64N/A

        \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(\frac{K}{2}\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
    3. Applied rewrites100.0%

      \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
    4. Taylor expanded in l around 0

      \[\leadsto \mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \color{blue}{\ell} \cdot J, U\right) \]
    5. Step-by-step derivation
      1. Applied rewrites63.5%

        \[\leadsto \mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \color{blue}{\ell} \cdot J, U\right) \]
      2. Step-by-step derivation
        1. lift-fma.f64N/A

          \[\leadsto \color{blue}{\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \left(\ell \cdot J\right) + U} \]
        2. add-flipN/A

          \[\leadsto \color{blue}{\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \left(\ell \cdot J\right) - \left(\mathsf{neg}\left(U\right)\right)} \]
        3. sub-flipN/A

          \[\leadsto \color{blue}{\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \left(\ell \cdot J\right) + \left(\mathsf{neg}\left(\left(\mathsf{neg}\left(U\right)\right)\right)\right)} \]
      3. Applied rewrites63.5%

        \[\leadsto \color{blue}{\mathsf{fma}\left(\ell, \cos \left(-0.5 \cdot K\right) \cdot \left(J + J\right), U\right)} \]

      if 0.0200000000000000004 < (cos.f64 (/.f64 K #s(literal 2 binary64)))

      1. Initial program 86.5%

        \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
      2. Taylor expanded in K around 0

        \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
      3. Step-by-step derivation
        1. lower-+.f64N/A

          \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
        2. lower-*.f64N/A

          \[\leadsto U + J \cdot \color{blue}{\left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
        3. lower--.f64N/A

          \[\leadsto U + J \cdot \left(e^{\ell} - \color{blue}{e^{\mathsf{neg}\left(\ell\right)}}\right) \]
        4. lower-exp.f64N/A

          \[\leadsto U + J \cdot \left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \]
        5. lower-exp.f64N/A

          \[\leadsto U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) \]
        6. lower-neg.f6472.9%

          \[\leadsto U + J \cdot \left(e^{\ell} - e^{-\ell}\right) \]
      4. Applied rewrites72.9%

        \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
      5. Step-by-step derivation
        1. lift-+.f64N/A

          \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
        2. +-commutativeN/A

          \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + \color{blue}{U} \]
        3. lift-*.f64N/A

          \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
        4. lift--.f64N/A

          \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
        5. lift-exp.f64N/A

          \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
        6. lift-exp.f64N/A

          \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
        7. lift-neg.f64N/A

          \[\leadsto J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) + U \]
        8. sinh-undefN/A

          \[\leadsto J \cdot \left(2 \cdot \sinh \ell\right) + U \]
        9. lift-sinh.f64N/A

          \[\leadsto J \cdot \left(2 \cdot \sinh \ell\right) + U \]
        10. associate-*r*N/A

          \[\leadsto \left(J \cdot 2\right) \cdot \sinh \ell + U \]
        11. *-commutativeN/A

          \[\leadsto \left(2 \cdot J\right) \cdot \sinh \ell + U \]
        12. count-2N/A

          \[\leadsto \left(J + J\right) \cdot \sinh \ell + U \]
        13. lift-+.f64N/A

          \[\leadsto \left(J + J\right) \cdot \sinh \ell + U \]
        14. lower-fma.f6480.2%

          \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\sinh \ell}, U\right) \]
      6. Applied rewrites80.2%

        \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\sinh \ell}, U\right) \]
    6. Recombined 3 regimes into one program.
    7. Add Preprocessing

    Alternative 4: 87.8% accurate, 0.4× speedup?

    \[\begin{array}{l} t_0 := \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U\\ \mathbf{if}\;t\_0 \leq -\infty:\\ \;\;\;\;\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)\\ \mathbf{elif}\;t\_0 \leq 2 \cdot 10^{+111}:\\ \;\;\;\;\mathsf{fma}\left(\ell \cdot \left(J + J\right), \cos \left(-0.5 \cdot K\right), U\right)\\ \mathbf{else}:\\ \;\;\;\;\mathsf{fma}\left(J + J, \sinh \ell, U\right)\\ \end{array} \]
    (FPCore (J l K U)
     :precision binary64
     (let* ((t_0 (+ (* (* J (- (exp l) (exp (- l)))) (cos (/ K 2.0))) U)))
       (if (<= t_0 (- INFINITY))
         (fma (* (+ J J) (sinh l)) (fma (* K K) -0.125 1.0) U)
         (if (<= t_0 2e+111)
           (fma (* l (+ J J)) (cos (* -0.5 K)) U)
           (fma (+ J J) (sinh l) U)))))
    double code(double J, double l, double K, double U) {
    	double t_0 = ((J * (exp(l) - exp(-l))) * cos((K / 2.0))) + U;
    	double tmp;
    	if (t_0 <= -((double) INFINITY)) {
    		tmp = fma(((J + J) * sinh(l)), fma((K * K), -0.125, 1.0), U);
    	} else if (t_0 <= 2e+111) {
    		tmp = fma((l * (J + J)), cos((-0.5 * K)), U);
    	} else {
    		tmp = fma((J + J), sinh(l), U);
    	}
    	return tmp;
    }
    
    function code(J, l, K, U)
    	t_0 = Float64(Float64(Float64(J * Float64(exp(l) - exp(Float64(-l)))) * cos(Float64(K / 2.0))) + U)
    	tmp = 0.0
    	if (t_0 <= Float64(-Inf))
    		tmp = fma(Float64(Float64(J + J) * sinh(l)), fma(Float64(K * K), -0.125, 1.0), U);
    	elseif (t_0 <= 2e+111)
    		tmp = fma(Float64(l * Float64(J + J)), cos(Float64(-0.5 * K)), U);
    	else
    		tmp = fma(Float64(J + J), sinh(l), U);
    	end
    	return tmp
    end
    
    code[J_, l_, K_, U_] := Block[{t$95$0 = N[(N[(N[(J * N[(N[Exp[l], $MachinePrecision] - N[Exp[(-l)], $MachinePrecision]), $MachinePrecision]), $MachinePrecision] * N[Cos[N[(K / 2.0), $MachinePrecision]], $MachinePrecision]), $MachinePrecision] + U), $MachinePrecision]}, If[LessEqual[t$95$0, (-Infinity)], N[(N[(N[(J + J), $MachinePrecision] * N[Sinh[l], $MachinePrecision]), $MachinePrecision] * N[(N[(K * K), $MachinePrecision] * -0.125 + 1.0), $MachinePrecision] + U), $MachinePrecision], If[LessEqual[t$95$0, 2e+111], N[(N[(l * N[(J + J), $MachinePrecision]), $MachinePrecision] * N[Cos[N[(-0.5 * K), $MachinePrecision]], $MachinePrecision] + U), $MachinePrecision], N[(N[(J + J), $MachinePrecision] * N[Sinh[l], $MachinePrecision] + U), $MachinePrecision]]]]
    
    \begin{array}{l}
    t_0 := \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U\\
    \mathbf{if}\;t\_0 \leq -\infty:\\
    \;\;\;\;\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)\\
    
    \mathbf{elif}\;t\_0 \leq 2 \cdot 10^{+111}:\\
    \;\;\;\;\mathsf{fma}\left(\ell \cdot \left(J + J\right), \cos \left(-0.5 \cdot K\right), U\right)\\
    
    \mathbf{else}:\\
    \;\;\;\;\mathsf{fma}\left(J + J, \sinh \ell, U\right)\\
    
    
    \end{array}
    
    Derivation
    1. Split input into 3 regimes
    2. if (+.f64 (*.f64 (*.f64 J (-.f64 (exp.f64 l) (exp.f64 (neg.f64 l)))) (cos.f64 (/.f64 K #s(literal 2 binary64)))) U) < -inf.0

      1. Initial program 86.5%

        \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
      2. Taylor expanded in K around 0

        \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
      3. Step-by-step derivation
        1. lower-+.f64N/A

          \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \color{blue}{\frac{-1}{8} \cdot {K}^{2}}\right) + U \]
        2. lower-*.f64N/A

          \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot \color{blue}{{K}^{2}}\right) + U \]
        3. lower-pow.f6464.8%

          \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + -0.125 \cdot {K}^{\color{blue}{2}}\right) + U \]
      4. Applied rewrites64.8%

        \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + -0.125 \cdot {K}^{2}\right)} + U \]
      5. Step-by-step derivation
        1. lift-+.f64N/A

          \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right) + U} \]
        2. lift-*.f64N/A

          \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
        3. lower-fma.f6464.8%

          \[\leadsto \color{blue}{\mathsf{fma}\left(J \cdot \left(e^{\ell} - e^{-\ell}\right), 1 + -0.125 \cdot {K}^{2}, U\right)} \]
      6. Applied rewrites69.5%

        \[\leadsto \color{blue}{\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)} \]

      if -inf.0 < (+.f64 (*.f64 (*.f64 J (-.f64 (exp.f64 l) (exp.f64 (neg.f64 l)))) (cos.f64 (/.f64 K #s(literal 2 binary64)))) U) < 1.99999999999999991e111

      1. Initial program 86.5%

        \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
      2. Step-by-step derivation
        1. lift-+.f64N/A

          \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U} \]
        2. lift-*.f64N/A

          \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right)} + U \]
        3. *-commutativeN/A

          \[\leadsto \color{blue}{\cos \left(\frac{K}{2}\right) \cdot \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
        4. lift-*.f64N/A

          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
        5. *-commutativeN/A

          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(\left(e^{\ell} - e^{-\ell}\right) \cdot J\right)} + U \]
        6. lift--.f64N/A

          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(e^{\ell} - e^{-\ell}\right)} \cdot J\right) + U \]
        7. lift-exp.f64N/A

          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(\color{blue}{e^{\ell}} - e^{-\ell}\right) \cdot J\right) + U \]
        8. lift-exp.f64N/A

          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - \color{blue}{e^{-\ell}}\right) \cdot J\right) + U \]
        9. lift-neg.f64N/A

          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \cdot J\right) + U \]
        10. sinh-undefN/A

          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(2 \cdot \sinh \ell\right)} \cdot J\right) + U \]
        11. associate-*l*N/A

          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(2 \cdot \left(\sinh \ell \cdot J\right)\right)} + U \]
        12. associate-*r*N/A

          \[\leadsto \color{blue}{\left(\cos \left(\frac{K}{2}\right) \cdot 2\right) \cdot \left(\sinh \ell \cdot J\right)} + U \]
        13. lower-fma.f64N/A

          \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(\frac{K}{2}\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
      3. Applied rewrites100.0%

        \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
      4. Taylor expanded in l around 0

        \[\leadsto \mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \color{blue}{\ell} \cdot J, U\right) \]
      5. Step-by-step derivation
        1. Applied rewrites63.5%

          \[\leadsto \mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \color{blue}{\ell} \cdot J, U\right) \]
        2. Step-by-step derivation
          1. lift-fma.f64N/A

            \[\leadsto \color{blue}{\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \left(\ell \cdot J\right) + U} \]
          2. add-flipN/A

            \[\leadsto \color{blue}{\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \left(\ell \cdot J\right) - \left(\mathsf{neg}\left(U\right)\right)} \]
          3. sub-flipN/A

            \[\leadsto \color{blue}{\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \left(\ell \cdot J\right) + \left(\mathsf{neg}\left(\left(\mathsf{neg}\left(U\right)\right)\right)\right)} \]
          4. lift-*.f64N/A

            \[\leadsto \color{blue}{\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right)} \cdot \left(\ell \cdot J\right) + \left(\mathsf{neg}\left(\left(\mathsf{neg}\left(U\right)\right)\right)\right) \]
          5. associate-*l*N/A

            \[\leadsto \color{blue}{\cos \left(\frac{-1}{2} \cdot K\right) \cdot \left(2 \cdot \left(\ell \cdot J\right)\right)} + \left(\mathsf{neg}\left(\left(\mathsf{neg}\left(U\right)\right)\right)\right) \]
          6. *-commutativeN/A

            \[\leadsto \color{blue}{\left(2 \cdot \left(\ell \cdot J\right)\right) \cdot \cos \left(\frac{-1}{2} \cdot K\right)} + \left(\mathsf{neg}\left(\left(\mathsf{neg}\left(U\right)\right)\right)\right) \]
          7. remove-double-negN/A

            \[\leadsto \left(2 \cdot \left(\ell \cdot J\right)\right) \cdot \cos \left(\frac{-1}{2} \cdot K\right) + \color{blue}{U} \]
          8. lower-fma.f64N/A

            \[\leadsto \color{blue}{\mathsf{fma}\left(2 \cdot \left(\ell \cdot J\right), \cos \left(\frac{-1}{2} \cdot K\right), U\right)} \]
          9. count-2-revN/A

            \[\leadsto \mathsf{fma}\left(\color{blue}{\ell \cdot J + \ell \cdot J}, \cos \left(\frac{-1}{2} \cdot K\right), U\right) \]
          10. lift-*.f64N/A

            \[\leadsto \mathsf{fma}\left(\color{blue}{\ell \cdot J} + \ell \cdot J, \cos \left(\frac{-1}{2} \cdot K\right), U\right) \]
          11. lift-*.f64N/A

            \[\leadsto \mathsf{fma}\left(\ell \cdot J + \color{blue}{\ell \cdot J}, \cos \left(\frac{-1}{2} \cdot K\right), U\right) \]
          12. distribute-lft-outN/A

            \[\leadsto \mathsf{fma}\left(\color{blue}{\ell \cdot \left(J + J\right)}, \cos \left(\frac{-1}{2} \cdot K\right), U\right) \]
          13. lift-+.f64N/A

            \[\leadsto \mathsf{fma}\left(\ell \cdot \color{blue}{\left(J + J\right)}, \cos \left(\frac{-1}{2} \cdot K\right), U\right) \]
          14. lower-*.f6463.5%

            \[\leadsto \mathsf{fma}\left(\color{blue}{\ell \cdot \left(J + J\right)}, \cos \left(-0.5 \cdot K\right), U\right) \]
        3. Applied rewrites63.5%

          \[\leadsto \color{blue}{\mathsf{fma}\left(\ell \cdot \left(J + J\right), \cos \left(-0.5 \cdot K\right), U\right)} \]

        if 1.99999999999999991e111 < (+.f64 (*.f64 (*.f64 J (-.f64 (exp.f64 l) (exp.f64 (neg.f64 l)))) (cos.f64 (/.f64 K #s(literal 2 binary64)))) U)

        1. Initial program 86.5%

          \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
        2. Taylor expanded in K around 0

          \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
        3. Step-by-step derivation
          1. lower-+.f64N/A

            \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
          2. lower-*.f64N/A

            \[\leadsto U + J \cdot \color{blue}{\left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
          3. lower--.f64N/A

            \[\leadsto U + J \cdot \left(e^{\ell} - \color{blue}{e^{\mathsf{neg}\left(\ell\right)}}\right) \]
          4. lower-exp.f64N/A

            \[\leadsto U + J \cdot \left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \]
          5. lower-exp.f64N/A

            \[\leadsto U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) \]
          6. lower-neg.f6472.9%

            \[\leadsto U + J \cdot \left(e^{\ell} - e^{-\ell}\right) \]
        4. Applied rewrites72.9%

          \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
        5. Step-by-step derivation
          1. lift-+.f64N/A

            \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
          2. +-commutativeN/A

            \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + \color{blue}{U} \]
          3. lift-*.f64N/A

            \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
          4. lift--.f64N/A

            \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
          5. lift-exp.f64N/A

            \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
          6. lift-exp.f64N/A

            \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
          7. lift-neg.f64N/A

            \[\leadsto J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) + U \]
          8. sinh-undefN/A

            \[\leadsto J \cdot \left(2 \cdot \sinh \ell\right) + U \]
          9. lift-sinh.f64N/A

            \[\leadsto J \cdot \left(2 \cdot \sinh \ell\right) + U \]
          10. associate-*r*N/A

            \[\leadsto \left(J \cdot 2\right) \cdot \sinh \ell + U \]
          11. *-commutativeN/A

            \[\leadsto \left(2 \cdot J\right) \cdot \sinh \ell + U \]
          12. count-2N/A

            \[\leadsto \left(J + J\right) \cdot \sinh \ell + U \]
          13. lift-+.f64N/A

            \[\leadsto \left(J + J\right) \cdot \sinh \ell + U \]
          14. lower-fma.f6480.2%

            \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\sinh \ell}, U\right) \]
        6. Applied rewrites80.2%

          \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\sinh \ell}, U\right) \]
      6. Recombined 3 regimes into one program.
      7. Add Preprocessing

      Alternative 5: 87.7% accurate, 0.4× speedup?

      \[\begin{array}{l} t_0 := \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U\\ \mathbf{if}\;t\_0 \leq -\infty:\\ \;\;\;\;\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)\\ \mathbf{elif}\;t\_0 \leq 2 \cdot 10^{+111}:\\ \;\;\;\;\mathsf{fma}\left(\ell, \cos \left(-0.5 \cdot K\right) \cdot \left(J + J\right), U\right)\\ \mathbf{else}:\\ \;\;\;\;\mathsf{fma}\left(J + J, \sinh \ell, U\right)\\ \end{array} \]
      (FPCore (J l K U)
       :precision binary64
       (let* ((t_0 (+ (* (* J (- (exp l) (exp (- l)))) (cos (/ K 2.0))) U)))
         (if (<= t_0 (- INFINITY))
           (fma (* (+ J J) (sinh l)) (fma (* K K) -0.125 1.0) U)
           (if (<= t_0 2e+111)
             (fma l (* (cos (* -0.5 K)) (+ J J)) U)
             (fma (+ J J) (sinh l) U)))))
      double code(double J, double l, double K, double U) {
      	double t_0 = ((J * (exp(l) - exp(-l))) * cos((K / 2.0))) + U;
      	double tmp;
      	if (t_0 <= -((double) INFINITY)) {
      		tmp = fma(((J + J) * sinh(l)), fma((K * K), -0.125, 1.0), U);
      	} else if (t_0 <= 2e+111) {
      		tmp = fma(l, (cos((-0.5 * K)) * (J + J)), U);
      	} else {
      		tmp = fma((J + J), sinh(l), U);
      	}
      	return tmp;
      }
      
      function code(J, l, K, U)
      	t_0 = Float64(Float64(Float64(J * Float64(exp(l) - exp(Float64(-l)))) * cos(Float64(K / 2.0))) + U)
      	tmp = 0.0
      	if (t_0 <= Float64(-Inf))
      		tmp = fma(Float64(Float64(J + J) * sinh(l)), fma(Float64(K * K), -0.125, 1.0), U);
      	elseif (t_0 <= 2e+111)
      		tmp = fma(l, Float64(cos(Float64(-0.5 * K)) * Float64(J + J)), U);
      	else
      		tmp = fma(Float64(J + J), sinh(l), U);
      	end
      	return tmp
      end
      
      code[J_, l_, K_, U_] := Block[{t$95$0 = N[(N[(N[(J * N[(N[Exp[l], $MachinePrecision] - N[Exp[(-l)], $MachinePrecision]), $MachinePrecision]), $MachinePrecision] * N[Cos[N[(K / 2.0), $MachinePrecision]], $MachinePrecision]), $MachinePrecision] + U), $MachinePrecision]}, If[LessEqual[t$95$0, (-Infinity)], N[(N[(N[(J + J), $MachinePrecision] * N[Sinh[l], $MachinePrecision]), $MachinePrecision] * N[(N[(K * K), $MachinePrecision] * -0.125 + 1.0), $MachinePrecision] + U), $MachinePrecision], If[LessEqual[t$95$0, 2e+111], N[(l * N[(N[Cos[N[(-0.5 * K), $MachinePrecision]], $MachinePrecision] * N[(J + J), $MachinePrecision]), $MachinePrecision] + U), $MachinePrecision], N[(N[(J + J), $MachinePrecision] * N[Sinh[l], $MachinePrecision] + U), $MachinePrecision]]]]
      
      \begin{array}{l}
      t_0 := \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U\\
      \mathbf{if}\;t\_0 \leq -\infty:\\
      \;\;\;\;\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)\\
      
      \mathbf{elif}\;t\_0 \leq 2 \cdot 10^{+111}:\\
      \;\;\;\;\mathsf{fma}\left(\ell, \cos \left(-0.5 \cdot K\right) \cdot \left(J + J\right), U\right)\\
      
      \mathbf{else}:\\
      \;\;\;\;\mathsf{fma}\left(J + J, \sinh \ell, U\right)\\
      
      
      \end{array}
      
      Derivation
      1. Split input into 3 regimes
      2. if (+.f64 (*.f64 (*.f64 J (-.f64 (exp.f64 l) (exp.f64 (neg.f64 l)))) (cos.f64 (/.f64 K #s(literal 2 binary64)))) U) < -inf.0

        1. Initial program 86.5%

          \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
        2. Taylor expanded in K around 0

          \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
        3. Step-by-step derivation
          1. lower-+.f64N/A

            \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \color{blue}{\frac{-1}{8} \cdot {K}^{2}}\right) + U \]
          2. lower-*.f64N/A

            \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot \color{blue}{{K}^{2}}\right) + U \]
          3. lower-pow.f6464.8%

            \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + -0.125 \cdot {K}^{\color{blue}{2}}\right) + U \]
        4. Applied rewrites64.8%

          \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + -0.125 \cdot {K}^{2}\right)} + U \]
        5. Step-by-step derivation
          1. lift-+.f64N/A

            \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right) + U} \]
          2. lift-*.f64N/A

            \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
          3. lower-fma.f6464.8%

            \[\leadsto \color{blue}{\mathsf{fma}\left(J \cdot \left(e^{\ell} - e^{-\ell}\right), 1 + -0.125 \cdot {K}^{2}, U\right)} \]
        6. Applied rewrites69.5%

          \[\leadsto \color{blue}{\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)} \]

        if -inf.0 < (+.f64 (*.f64 (*.f64 J (-.f64 (exp.f64 l) (exp.f64 (neg.f64 l)))) (cos.f64 (/.f64 K #s(literal 2 binary64)))) U) < 1.99999999999999991e111

        1. Initial program 86.5%

          \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
        2. Step-by-step derivation
          1. lift-+.f64N/A

            \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U} \]
          2. lift-*.f64N/A

            \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right)} + U \]
          3. *-commutativeN/A

            \[\leadsto \color{blue}{\cos \left(\frac{K}{2}\right) \cdot \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
          4. lift-*.f64N/A

            \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
          5. *-commutativeN/A

            \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(\left(e^{\ell} - e^{-\ell}\right) \cdot J\right)} + U \]
          6. lift--.f64N/A

            \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(e^{\ell} - e^{-\ell}\right)} \cdot J\right) + U \]
          7. lift-exp.f64N/A

            \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(\color{blue}{e^{\ell}} - e^{-\ell}\right) \cdot J\right) + U \]
          8. lift-exp.f64N/A

            \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - \color{blue}{e^{-\ell}}\right) \cdot J\right) + U \]
          9. lift-neg.f64N/A

            \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \cdot J\right) + U \]
          10. sinh-undefN/A

            \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(2 \cdot \sinh \ell\right)} \cdot J\right) + U \]
          11. associate-*l*N/A

            \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(2 \cdot \left(\sinh \ell \cdot J\right)\right)} + U \]
          12. associate-*r*N/A

            \[\leadsto \color{blue}{\left(\cos \left(\frac{K}{2}\right) \cdot 2\right) \cdot \left(\sinh \ell \cdot J\right)} + U \]
          13. lower-fma.f64N/A

            \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(\frac{K}{2}\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
        3. Applied rewrites100.0%

          \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
        4. Taylor expanded in l around 0

          \[\leadsto \mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \color{blue}{\ell} \cdot J, U\right) \]
        5. Step-by-step derivation
          1. Applied rewrites63.5%

            \[\leadsto \mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \color{blue}{\ell} \cdot J, U\right) \]
          2. Step-by-step derivation
            1. lift-fma.f64N/A

              \[\leadsto \color{blue}{\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \left(\ell \cdot J\right) + U} \]
            2. add-flipN/A

              \[\leadsto \color{blue}{\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \left(\ell \cdot J\right) - \left(\mathsf{neg}\left(U\right)\right)} \]
            3. sub-flipN/A

              \[\leadsto \color{blue}{\left(\cos \left(\frac{-1}{2} \cdot K\right) \cdot 2\right) \cdot \left(\ell \cdot J\right) + \left(\mathsf{neg}\left(\left(\mathsf{neg}\left(U\right)\right)\right)\right)} \]
          3. Applied rewrites63.5%

            \[\leadsto \color{blue}{\mathsf{fma}\left(\ell, \cos \left(-0.5 \cdot K\right) \cdot \left(J + J\right), U\right)} \]

          if 1.99999999999999991e111 < (+.f64 (*.f64 (*.f64 J (-.f64 (exp.f64 l) (exp.f64 (neg.f64 l)))) (cos.f64 (/.f64 K #s(literal 2 binary64)))) U)

          1. Initial program 86.5%

            \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
          2. Taylor expanded in K around 0

            \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
          3. Step-by-step derivation
            1. lower-+.f64N/A

              \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
            2. lower-*.f64N/A

              \[\leadsto U + J \cdot \color{blue}{\left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
            3. lower--.f64N/A

              \[\leadsto U + J \cdot \left(e^{\ell} - \color{blue}{e^{\mathsf{neg}\left(\ell\right)}}\right) \]
            4. lower-exp.f64N/A

              \[\leadsto U + J \cdot \left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \]
            5. lower-exp.f64N/A

              \[\leadsto U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) \]
            6. lower-neg.f6472.9%

              \[\leadsto U + J \cdot \left(e^{\ell} - e^{-\ell}\right) \]
          4. Applied rewrites72.9%

            \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
          5. Step-by-step derivation
            1. lift-+.f64N/A

              \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
            2. +-commutativeN/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + \color{blue}{U} \]
            3. lift-*.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
            4. lift--.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
            5. lift-exp.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
            6. lift-exp.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
            7. lift-neg.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) + U \]
            8. sinh-undefN/A

              \[\leadsto J \cdot \left(2 \cdot \sinh \ell\right) + U \]
            9. lift-sinh.f64N/A

              \[\leadsto J \cdot \left(2 \cdot \sinh \ell\right) + U \]
            10. associate-*r*N/A

              \[\leadsto \left(J \cdot 2\right) \cdot \sinh \ell + U \]
            11. *-commutativeN/A

              \[\leadsto \left(2 \cdot J\right) \cdot \sinh \ell + U \]
            12. count-2N/A

              \[\leadsto \left(J + J\right) \cdot \sinh \ell + U \]
            13. lift-+.f64N/A

              \[\leadsto \left(J + J\right) \cdot \sinh \ell + U \]
            14. lower-fma.f6480.2%

              \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\sinh \ell}, U\right) \]
          6. Applied rewrites80.2%

            \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\sinh \ell}, U\right) \]
        6. Recombined 3 regimes into one program.
        7. Add Preprocessing

        Alternative 6: 86.3% accurate, 1.0× speedup?

        \[\begin{array}{l} \mathbf{if}\;\cos \left(\frac{K}{2}\right) \leq -0.04:\\ \;\;\;\;\mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \sinh \ell, U\right)\\ \mathbf{else}:\\ \;\;\;\;\mathsf{fma}\left(J + J, \sinh \ell, U\right)\\ \end{array} \]
        (FPCore (J l K U)
         :precision binary64
         (if (<= (cos (/ K 2.0)) -0.04)
           (fma (+ J J) (* (fma -0.125 (* K K) 1.0) (sinh l)) U)
           (fma (+ J J) (sinh l) U)))
        double code(double J, double l, double K, double U) {
        	double tmp;
        	if (cos((K / 2.0)) <= -0.04) {
        		tmp = fma((J + J), (fma(-0.125, (K * K), 1.0) * sinh(l)), U);
        	} else {
        		tmp = fma((J + J), sinh(l), U);
        	}
        	return tmp;
        }
        
        function code(J, l, K, U)
        	tmp = 0.0
        	if (cos(Float64(K / 2.0)) <= -0.04)
        		tmp = fma(Float64(J + J), Float64(fma(-0.125, Float64(K * K), 1.0) * sinh(l)), U);
        	else
        		tmp = fma(Float64(J + J), sinh(l), U);
        	end
        	return tmp
        end
        
        code[J_, l_, K_, U_] := If[LessEqual[N[Cos[N[(K / 2.0), $MachinePrecision]], $MachinePrecision], -0.04], N[(N[(J + J), $MachinePrecision] * N[(N[(-0.125 * N[(K * K), $MachinePrecision] + 1.0), $MachinePrecision] * N[Sinh[l], $MachinePrecision]), $MachinePrecision] + U), $MachinePrecision], N[(N[(J + J), $MachinePrecision] * N[Sinh[l], $MachinePrecision] + U), $MachinePrecision]]
        
        \begin{array}{l}
        \mathbf{if}\;\cos \left(\frac{K}{2}\right) \leq -0.04:\\
        \;\;\;\;\mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \sinh \ell, U\right)\\
        
        \mathbf{else}:\\
        \;\;\;\;\mathsf{fma}\left(J + J, \sinh \ell, U\right)\\
        
        
        \end{array}
        
        Derivation
        1. Split input into 2 regimes
        2. if (cos.f64 (/.f64 K #s(literal 2 binary64))) < -0.0400000000000000008

          1. Initial program 86.5%

            \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
          2. Taylor expanded in K around 0

            \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
          3. Step-by-step derivation
            1. lower-+.f64N/A

              \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \color{blue}{\frac{-1}{8} \cdot {K}^{2}}\right) + U \]
            2. lower-*.f64N/A

              \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot \color{blue}{{K}^{2}}\right) + U \]
            3. lower-pow.f6464.8%

              \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + -0.125 \cdot {K}^{\color{blue}{2}}\right) + U \]
          4. Applied rewrites64.8%

            \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + -0.125 \cdot {K}^{2}\right)} + U \]
          5. Step-by-step derivation
            1. lift-+.f64N/A

              \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right) + U} \]
            2. lift-*.f64N/A

              \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
            3. lower-fma.f6464.8%

              \[\leadsto \color{blue}{\mathsf{fma}\left(J \cdot \left(e^{\ell} - e^{-\ell}\right), 1 + -0.125 \cdot {K}^{2}, U\right)} \]
          6. Applied rewrites69.5%

            \[\leadsto \color{blue}{\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)} \]
          7. Step-by-step derivation
            1. lift-fma.f64N/A

              \[\leadsto \color{blue}{\left(\left(J + J\right) \cdot \sinh \ell\right) \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right) + U} \]
            2. lift-*.f64N/A

              \[\leadsto \color{blue}{\left(\left(J + J\right) \cdot \sinh \ell\right)} \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right) + U \]
            3. associate-*l*N/A

              \[\leadsto \color{blue}{\left(J + J\right) \cdot \left(\sinh \ell \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right)\right)} + U \]
            4. lower-fma.f64N/A

              \[\leadsto \color{blue}{\mathsf{fma}\left(J + J, \sinh \ell \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right), U\right)} \]
            5. *-commutativeN/A

              \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right) \cdot \sinh \ell}, U\right) \]
            6. lower-*.f6469.2%

              \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\mathsf{fma}\left(K \cdot K, -0.125, 1\right) \cdot \sinh \ell}, U\right) \]
            7. lift-fma.f64N/A

              \[\leadsto \mathsf{fma}\left(J + J, \left(\left(K \cdot K\right) \cdot \frac{-1}{8} + \color{blue}{1}\right) \cdot \sinh \ell, U\right) \]
            8. *-commutativeN/A

              \[\leadsto \mathsf{fma}\left(J + J, \left(\frac{-1}{8} \cdot \left(K \cdot K\right) + 1\right) \cdot \sinh \ell, U\right) \]
            9. lower-fma.f6469.2%

              \[\leadsto \mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, \color{blue}{K \cdot K}, 1\right) \cdot \sinh \ell, U\right) \]
          8. Applied rewrites69.2%

            \[\leadsto \color{blue}{\mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \sinh \ell, U\right)} \]

          if -0.0400000000000000008 < (cos.f64 (/.f64 K #s(literal 2 binary64)))

          1. Initial program 86.5%

            \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
          2. Taylor expanded in K around 0

            \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
          3. Step-by-step derivation
            1. lower-+.f64N/A

              \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
            2. lower-*.f64N/A

              \[\leadsto U + J \cdot \color{blue}{\left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
            3. lower--.f64N/A

              \[\leadsto U + J \cdot \left(e^{\ell} - \color{blue}{e^{\mathsf{neg}\left(\ell\right)}}\right) \]
            4. lower-exp.f64N/A

              \[\leadsto U + J \cdot \left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \]
            5. lower-exp.f64N/A

              \[\leadsto U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) \]
            6. lower-neg.f6472.9%

              \[\leadsto U + J \cdot \left(e^{\ell} - e^{-\ell}\right) \]
          4. Applied rewrites72.9%

            \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
          5. Step-by-step derivation
            1. lift-+.f64N/A

              \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
            2. +-commutativeN/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + \color{blue}{U} \]
            3. lift-*.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
            4. lift--.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
            5. lift-exp.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
            6. lift-exp.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
            7. lift-neg.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) + U \]
            8. sinh-undefN/A

              \[\leadsto J \cdot \left(2 \cdot \sinh \ell\right) + U \]
            9. lift-sinh.f64N/A

              \[\leadsto J \cdot \left(2 \cdot \sinh \ell\right) + U \]
            10. associate-*r*N/A

              \[\leadsto \left(J \cdot 2\right) \cdot \sinh \ell + U \]
            11. *-commutativeN/A

              \[\leadsto \left(2 \cdot J\right) \cdot \sinh \ell + U \]
            12. count-2N/A

              \[\leadsto \left(J + J\right) \cdot \sinh \ell + U \]
            13. lift-+.f64N/A

              \[\leadsto \left(J + J\right) \cdot \sinh \ell + U \]
            14. lower-fma.f6480.2%

              \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\sinh \ell}, U\right) \]
          6. Applied rewrites80.2%

            \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\sinh \ell}, U\right) \]
        3. Recombined 2 regimes into one program.
        4. Add Preprocessing

        Alternative 7: 86.3% accurate, 1.0× speedup?

        \[\begin{array}{l} \mathbf{if}\;\cos \left(\frac{K}{2}\right) \leq -0.04:\\ \;\;\;\;\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)\\ \mathbf{else}:\\ \;\;\;\;\mathsf{fma}\left(J + J, \sinh \ell, U\right)\\ \end{array} \]
        (FPCore (J l K U)
         :precision binary64
         (if (<= (cos (/ K 2.0)) -0.04)
           (fma (* (+ J J) (sinh l)) (fma (* K K) -0.125 1.0) U)
           (fma (+ J J) (sinh l) U)))
        double code(double J, double l, double K, double U) {
        	double tmp;
        	if (cos((K / 2.0)) <= -0.04) {
        		tmp = fma(((J + J) * sinh(l)), fma((K * K), -0.125, 1.0), U);
        	} else {
        		tmp = fma((J + J), sinh(l), U);
        	}
        	return tmp;
        }
        
        function code(J, l, K, U)
        	tmp = 0.0
        	if (cos(Float64(K / 2.0)) <= -0.04)
        		tmp = fma(Float64(Float64(J + J) * sinh(l)), fma(Float64(K * K), -0.125, 1.0), U);
        	else
        		tmp = fma(Float64(J + J), sinh(l), U);
        	end
        	return tmp
        end
        
        code[J_, l_, K_, U_] := If[LessEqual[N[Cos[N[(K / 2.0), $MachinePrecision]], $MachinePrecision], -0.04], N[(N[(N[(J + J), $MachinePrecision] * N[Sinh[l], $MachinePrecision]), $MachinePrecision] * N[(N[(K * K), $MachinePrecision] * -0.125 + 1.0), $MachinePrecision] + U), $MachinePrecision], N[(N[(J + J), $MachinePrecision] * N[Sinh[l], $MachinePrecision] + U), $MachinePrecision]]
        
        \begin{array}{l}
        \mathbf{if}\;\cos \left(\frac{K}{2}\right) \leq -0.04:\\
        \;\;\;\;\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)\\
        
        \mathbf{else}:\\
        \;\;\;\;\mathsf{fma}\left(J + J, \sinh \ell, U\right)\\
        
        
        \end{array}
        
        Derivation
        1. Split input into 2 regimes
        2. if (cos.f64 (/.f64 K #s(literal 2 binary64))) < -0.0400000000000000008

          1. Initial program 86.5%

            \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
          2. Taylor expanded in K around 0

            \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
          3. Step-by-step derivation
            1. lower-+.f64N/A

              \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \color{blue}{\frac{-1}{8} \cdot {K}^{2}}\right) + U \]
            2. lower-*.f64N/A

              \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot \color{blue}{{K}^{2}}\right) + U \]
            3. lower-pow.f6464.8%

              \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + -0.125 \cdot {K}^{\color{blue}{2}}\right) + U \]
          4. Applied rewrites64.8%

            \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + -0.125 \cdot {K}^{2}\right)} + U \]
          5. Step-by-step derivation
            1. lift-+.f64N/A

              \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right) + U} \]
            2. lift-*.f64N/A

              \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
            3. lower-fma.f6464.8%

              \[\leadsto \color{blue}{\mathsf{fma}\left(J \cdot \left(e^{\ell} - e^{-\ell}\right), 1 + -0.125 \cdot {K}^{2}, U\right)} \]
          6. Applied rewrites69.5%

            \[\leadsto \color{blue}{\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)} \]

          if -0.0400000000000000008 < (cos.f64 (/.f64 K #s(literal 2 binary64)))

          1. Initial program 86.5%

            \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
          2. Taylor expanded in K around 0

            \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
          3. Step-by-step derivation
            1. lower-+.f64N/A

              \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
            2. lower-*.f64N/A

              \[\leadsto U + J \cdot \color{blue}{\left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
            3. lower--.f64N/A

              \[\leadsto U + J \cdot \left(e^{\ell} - \color{blue}{e^{\mathsf{neg}\left(\ell\right)}}\right) \]
            4. lower-exp.f64N/A

              \[\leadsto U + J \cdot \left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \]
            5. lower-exp.f64N/A

              \[\leadsto U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) \]
            6. lower-neg.f6472.9%

              \[\leadsto U + J \cdot \left(e^{\ell} - e^{-\ell}\right) \]
          4. Applied rewrites72.9%

            \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
          5. Step-by-step derivation
            1. lift-+.f64N/A

              \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
            2. +-commutativeN/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + \color{blue}{U} \]
            3. lift-*.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
            4. lift--.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
            5. lift-exp.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
            6. lift-exp.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
            7. lift-neg.f64N/A

              \[\leadsto J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) + U \]
            8. sinh-undefN/A

              \[\leadsto J \cdot \left(2 \cdot \sinh \ell\right) + U \]
            9. lift-sinh.f64N/A

              \[\leadsto J \cdot \left(2 \cdot \sinh \ell\right) + U \]
            10. associate-*r*N/A

              \[\leadsto \left(J \cdot 2\right) \cdot \sinh \ell + U \]
            11. *-commutativeN/A

              \[\leadsto \left(2 \cdot J\right) \cdot \sinh \ell + U \]
            12. count-2N/A

              \[\leadsto \left(J + J\right) \cdot \sinh \ell + U \]
            13. lift-+.f64N/A

              \[\leadsto \left(J + J\right) \cdot \sinh \ell + U \]
            14. lower-fma.f6480.2%

              \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\sinh \ell}, U\right) \]
          6. Applied rewrites80.2%

            \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\sinh \ell}, U\right) \]
        3. Recombined 2 regimes into one program.
        4. Add Preprocessing

        Alternative 8: 85.6% accurate, 1.1× speedup?

        \[\begin{array}{l} \mathbf{if}\;\cos \left(\frac{K}{2}\right) \leq -0.04:\\ \;\;\;\;\mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \ell, U\right)\\ \mathbf{else}:\\ \;\;\;\;\mathsf{fma}\left(J + J, \sinh \ell, U\right)\\ \end{array} \]
        (FPCore (J l K U)
         :precision binary64
         (if (<= (cos (/ K 2.0)) -0.04)
           (fma (+ J J) (* (fma -0.125 (* K K) 1.0) l) U)
           (fma (+ J J) (sinh l) U)))
        double code(double J, double l, double K, double U) {
        	double tmp;
        	if (cos((K / 2.0)) <= -0.04) {
        		tmp = fma((J + J), (fma(-0.125, (K * K), 1.0) * l), U);
        	} else {
        		tmp = fma((J + J), sinh(l), U);
        	}
        	return tmp;
        }
        
        function code(J, l, K, U)
        	tmp = 0.0
        	if (cos(Float64(K / 2.0)) <= -0.04)
        		tmp = fma(Float64(J + J), Float64(fma(-0.125, Float64(K * K), 1.0) * l), U);
        	else
        		tmp = fma(Float64(J + J), sinh(l), U);
        	end
        	return tmp
        end
        
        code[J_, l_, K_, U_] := If[LessEqual[N[Cos[N[(K / 2.0), $MachinePrecision]], $MachinePrecision], -0.04], N[(N[(J + J), $MachinePrecision] * N[(N[(-0.125 * N[(K * K), $MachinePrecision] + 1.0), $MachinePrecision] * l), $MachinePrecision] + U), $MachinePrecision], N[(N[(J + J), $MachinePrecision] * N[Sinh[l], $MachinePrecision] + U), $MachinePrecision]]
        
        \begin{array}{l}
        \mathbf{if}\;\cos \left(\frac{K}{2}\right) \leq -0.04:\\
        \;\;\;\;\mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \ell, U\right)\\
        
        \mathbf{else}:\\
        \;\;\;\;\mathsf{fma}\left(J + J, \sinh \ell, U\right)\\
        
        
        \end{array}
        
        Derivation
        1. Split input into 2 regimes
        2. if (cos.f64 (/.f64 K #s(literal 2 binary64))) < -0.0400000000000000008

          1. Initial program 86.5%

            \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
          2. Taylor expanded in K around 0

            \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
          3. Step-by-step derivation
            1. lower-+.f64N/A

              \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \color{blue}{\frac{-1}{8} \cdot {K}^{2}}\right) + U \]
            2. lower-*.f64N/A

              \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot \color{blue}{{K}^{2}}\right) + U \]
            3. lower-pow.f6464.8%

              \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + -0.125 \cdot {K}^{\color{blue}{2}}\right) + U \]
          4. Applied rewrites64.8%

            \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + -0.125 \cdot {K}^{2}\right)} + U \]
          5. Step-by-step derivation
            1. lift-+.f64N/A

              \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right) + U} \]
            2. lift-*.f64N/A

              \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
            3. lower-fma.f6464.8%

              \[\leadsto \color{blue}{\mathsf{fma}\left(J \cdot \left(e^{\ell} - e^{-\ell}\right), 1 + -0.125 \cdot {K}^{2}, U\right)} \]
          6. Applied rewrites69.5%

            \[\leadsto \color{blue}{\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)} \]
          7. Step-by-step derivation
            1. lift-fma.f64N/A

              \[\leadsto \color{blue}{\left(\left(J + J\right) \cdot \sinh \ell\right) \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right) + U} \]
            2. lift-*.f64N/A

              \[\leadsto \color{blue}{\left(\left(J + J\right) \cdot \sinh \ell\right)} \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right) + U \]
            3. associate-*l*N/A

              \[\leadsto \color{blue}{\left(J + J\right) \cdot \left(\sinh \ell \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right)\right)} + U \]
            4. lower-fma.f64N/A

              \[\leadsto \color{blue}{\mathsf{fma}\left(J + J, \sinh \ell \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right), U\right)} \]
            5. *-commutativeN/A

              \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right) \cdot \sinh \ell}, U\right) \]
            6. lower-*.f6469.2%

              \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\mathsf{fma}\left(K \cdot K, -0.125, 1\right) \cdot \sinh \ell}, U\right) \]
            7. lift-fma.f64N/A

              \[\leadsto \mathsf{fma}\left(J + J, \left(\left(K \cdot K\right) \cdot \frac{-1}{8} + \color{blue}{1}\right) \cdot \sinh \ell, U\right) \]
            8. *-commutativeN/A

              \[\leadsto \mathsf{fma}\left(J + J, \left(\frac{-1}{8} \cdot \left(K \cdot K\right) + 1\right) \cdot \sinh \ell, U\right) \]
            9. lower-fma.f6469.2%

              \[\leadsto \mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, \color{blue}{K \cdot K}, 1\right) \cdot \sinh \ell, U\right) \]
          8. Applied rewrites69.2%

            \[\leadsto \color{blue}{\mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \sinh \ell, U\right)} \]
          9. Taylor expanded in l around 0

            \[\leadsto \mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \color{blue}{\ell}, U\right) \]
          10. Step-by-step derivation
            1. Applied rewrites48.9%

              \[\leadsto \mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \color{blue}{\ell}, U\right) \]

            if -0.0400000000000000008 < (cos.f64 (/.f64 K #s(literal 2 binary64)))

            1. Initial program 86.5%

              \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
            2. Taylor expanded in K around 0

              \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
            3. Step-by-step derivation
              1. lower-+.f64N/A

                \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
              2. lower-*.f64N/A

                \[\leadsto U + J \cdot \color{blue}{\left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
              3. lower--.f64N/A

                \[\leadsto U + J \cdot \left(e^{\ell} - \color{blue}{e^{\mathsf{neg}\left(\ell\right)}}\right) \]
              4. lower-exp.f64N/A

                \[\leadsto U + J \cdot \left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \]
              5. lower-exp.f64N/A

                \[\leadsto U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) \]
              6. lower-neg.f6472.9%

                \[\leadsto U + J \cdot \left(e^{\ell} - e^{-\ell}\right) \]
            4. Applied rewrites72.9%

              \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
            5. Step-by-step derivation
              1. lift-+.f64N/A

                \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
              2. +-commutativeN/A

                \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + \color{blue}{U} \]
              3. lift-*.f64N/A

                \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
              4. lift--.f64N/A

                \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
              5. lift-exp.f64N/A

                \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
              6. lift-exp.f64N/A

                \[\leadsto J \cdot \left(e^{\ell} - e^{-\ell}\right) + U \]
              7. lift-neg.f64N/A

                \[\leadsto J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) + U \]
              8. sinh-undefN/A

                \[\leadsto J \cdot \left(2 \cdot \sinh \ell\right) + U \]
              9. lift-sinh.f64N/A

                \[\leadsto J \cdot \left(2 \cdot \sinh \ell\right) + U \]
              10. associate-*r*N/A

                \[\leadsto \left(J \cdot 2\right) \cdot \sinh \ell + U \]
              11. *-commutativeN/A

                \[\leadsto \left(2 \cdot J\right) \cdot \sinh \ell + U \]
              12. count-2N/A

                \[\leadsto \left(J + J\right) \cdot \sinh \ell + U \]
              13. lift-+.f64N/A

                \[\leadsto \left(J + J\right) \cdot \sinh \ell + U \]
              14. lower-fma.f6480.2%

                \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\sinh \ell}, U\right) \]
            6. Applied rewrites80.2%

              \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\sinh \ell}, U\right) \]
          11. Recombined 2 regimes into one program.
          12. Add Preprocessing

          Alternative 9: 70.4% accurate, 2.5× speedup?

          \[\begin{array}{l} \mathbf{if}\;\ell \leq -11:\\ \;\;\;\;U + J \cdot \left(1 - e^{-\ell}\right)\\ \mathbf{elif}\;\ell \leq 1.34 \cdot 10^{+26}:\\ \;\;\;\;\mathsf{fma}\left(2, J \cdot \ell, U\right)\\ \mathbf{else}:\\ \;\;\;\;\mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \ell, U\right)\\ \end{array} \]
          (FPCore (J l K U)
           :precision binary64
           (if (<= l -11.0)
             (+ U (* J (- 1.0 (exp (- l)))))
             (if (<= l 1.34e+26)
               (fma 2.0 (* J l) U)
               (fma (+ J J) (* (fma -0.125 (* K K) 1.0) l) U))))
          double code(double J, double l, double K, double U) {
          	double tmp;
          	if (l <= -11.0) {
          		tmp = U + (J * (1.0 - exp(-l)));
          	} else if (l <= 1.34e+26) {
          		tmp = fma(2.0, (J * l), U);
          	} else {
          		tmp = fma((J + J), (fma(-0.125, (K * K), 1.0) * l), U);
          	}
          	return tmp;
          }
          
          function code(J, l, K, U)
          	tmp = 0.0
          	if (l <= -11.0)
          		tmp = Float64(U + Float64(J * Float64(1.0 - exp(Float64(-l)))));
          	elseif (l <= 1.34e+26)
          		tmp = fma(2.0, Float64(J * l), U);
          	else
          		tmp = fma(Float64(J + J), Float64(fma(-0.125, Float64(K * K), 1.0) * l), U);
          	end
          	return tmp
          end
          
          code[J_, l_, K_, U_] := If[LessEqual[l, -11.0], N[(U + N[(J * N[(1.0 - N[Exp[(-l)], $MachinePrecision]), $MachinePrecision]), $MachinePrecision]), $MachinePrecision], If[LessEqual[l, 1.34e+26], N[(2.0 * N[(J * l), $MachinePrecision] + U), $MachinePrecision], N[(N[(J + J), $MachinePrecision] * N[(N[(-0.125 * N[(K * K), $MachinePrecision] + 1.0), $MachinePrecision] * l), $MachinePrecision] + U), $MachinePrecision]]]
          
          \begin{array}{l}
          \mathbf{if}\;\ell \leq -11:\\
          \;\;\;\;U + J \cdot \left(1 - e^{-\ell}\right)\\
          
          \mathbf{elif}\;\ell \leq 1.34 \cdot 10^{+26}:\\
          \;\;\;\;\mathsf{fma}\left(2, J \cdot \ell, U\right)\\
          
          \mathbf{else}:\\
          \;\;\;\;\mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \ell, U\right)\\
          
          
          \end{array}
          
          Derivation
          1. Split input into 3 regimes
          2. if l < -11

            1. Initial program 86.5%

              \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
            2. Taylor expanded in K around 0

              \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
            3. Step-by-step derivation
              1. lower-+.f64N/A

                \[\leadsto U + \color{blue}{J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
              2. lower-*.f64N/A

                \[\leadsto U + J \cdot \color{blue}{\left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right)} \]
              3. lower--.f64N/A

                \[\leadsto U + J \cdot \left(e^{\ell} - \color{blue}{e^{\mathsf{neg}\left(\ell\right)}}\right) \]
              4. lower-exp.f64N/A

                \[\leadsto U + J \cdot \left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \]
              5. lower-exp.f64N/A

                \[\leadsto U + J \cdot \left(e^{\ell} - e^{\mathsf{neg}\left(\ell\right)}\right) \]
              6. lower-neg.f6472.9%

                \[\leadsto U + J \cdot \left(e^{\ell} - e^{-\ell}\right) \]
            4. Applied rewrites72.9%

              \[\leadsto \color{blue}{U + J \cdot \left(e^{\ell} - e^{-\ell}\right)} \]
            5. Taylor expanded in l around 0

              \[\leadsto U + J \cdot \left(1 - e^{\color{blue}{-\ell}}\right) \]
            6. Step-by-step derivation
              1. Applied rewrites55.3%

                \[\leadsto U + J \cdot \left(1 - e^{\color{blue}{-\ell}}\right) \]

              if -11 < l < 1.34000000000000007e26

              1. Initial program 86.5%

                \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
              2. Step-by-step derivation
                1. lift-+.f64N/A

                  \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U} \]
                2. lift-*.f64N/A

                  \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right)} + U \]
                3. *-commutativeN/A

                  \[\leadsto \color{blue}{\cos \left(\frac{K}{2}\right) \cdot \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
                4. lift-*.f64N/A

                  \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
                5. *-commutativeN/A

                  \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(\left(e^{\ell} - e^{-\ell}\right) \cdot J\right)} + U \]
                6. lift--.f64N/A

                  \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(e^{\ell} - e^{-\ell}\right)} \cdot J\right) + U \]
                7. lift-exp.f64N/A

                  \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(\color{blue}{e^{\ell}} - e^{-\ell}\right) \cdot J\right) + U \]
                8. lift-exp.f64N/A

                  \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - \color{blue}{e^{-\ell}}\right) \cdot J\right) + U \]
                9. lift-neg.f64N/A

                  \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \cdot J\right) + U \]
                10. sinh-undefN/A

                  \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(2 \cdot \sinh \ell\right)} \cdot J\right) + U \]
                11. associate-*l*N/A

                  \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(2 \cdot \left(\sinh \ell \cdot J\right)\right)} + U \]
                12. associate-*r*N/A

                  \[\leadsto \color{blue}{\left(\cos \left(\frac{K}{2}\right) \cdot 2\right) \cdot \left(\sinh \ell \cdot J\right)} + U \]
                13. lower-fma.f64N/A

                  \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(\frac{K}{2}\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
              3. Applied rewrites100.0%

                \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
              4. Taylor expanded in K around 0

                \[\leadsto \mathsf{fma}\left(\color{blue}{2}, \sinh \ell \cdot J, U\right) \]
              5. Step-by-step derivation
                1. Applied rewrites80.2%

                  \[\leadsto \mathsf{fma}\left(\color{blue}{2}, \sinh \ell \cdot J, U\right) \]
                2. Taylor expanded in l around 0

                  \[\leadsto \mathsf{fma}\left(2, \color{blue}{J \cdot \ell}, U\right) \]
                3. Step-by-step derivation
                  1. lower-*.f6453.4%

                    \[\leadsto \mathsf{fma}\left(2, J \cdot \color{blue}{\ell}, U\right) \]
                4. Applied rewrites53.4%

                  \[\leadsto \mathsf{fma}\left(2, \color{blue}{J \cdot \ell}, U\right) \]

                if 1.34000000000000007e26 < l

                1. Initial program 86.5%

                  \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
                2. Taylor expanded in K around 0

                  \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
                3. Step-by-step derivation
                  1. lower-+.f64N/A

                    \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \color{blue}{\frac{-1}{8} \cdot {K}^{2}}\right) + U \]
                  2. lower-*.f64N/A

                    \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot \color{blue}{{K}^{2}}\right) + U \]
                  3. lower-pow.f6464.8%

                    \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + -0.125 \cdot {K}^{\color{blue}{2}}\right) + U \]
                4. Applied rewrites64.8%

                  \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + -0.125 \cdot {K}^{2}\right)} + U \]
                5. Step-by-step derivation
                  1. lift-+.f64N/A

                    \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right) + U} \]
                  2. lift-*.f64N/A

                    \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
                  3. lower-fma.f6464.8%

                    \[\leadsto \color{blue}{\mathsf{fma}\left(J \cdot \left(e^{\ell} - e^{-\ell}\right), 1 + -0.125 \cdot {K}^{2}, U\right)} \]
                6. Applied rewrites69.5%

                  \[\leadsto \color{blue}{\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)} \]
                7. Step-by-step derivation
                  1. lift-fma.f64N/A

                    \[\leadsto \color{blue}{\left(\left(J + J\right) \cdot \sinh \ell\right) \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right) + U} \]
                  2. lift-*.f64N/A

                    \[\leadsto \color{blue}{\left(\left(J + J\right) \cdot \sinh \ell\right)} \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right) + U \]
                  3. associate-*l*N/A

                    \[\leadsto \color{blue}{\left(J + J\right) \cdot \left(\sinh \ell \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right)\right)} + U \]
                  4. lower-fma.f64N/A

                    \[\leadsto \color{blue}{\mathsf{fma}\left(J + J, \sinh \ell \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right), U\right)} \]
                  5. *-commutativeN/A

                    \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right) \cdot \sinh \ell}, U\right) \]
                  6. lower-*.f6469.2%

                    \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\mathsf{fma}\left(K \cdot K, -0.125, 1\right) \cdot \sinh \ell}, U\right) \]
                  7. lift-fma.f64N/A

                    \[\leadsto \mathsf{fma}\left(J + J, \left(\left(K \cdot K\right) \cdot \frac{-1}{8} + \color{blue}{1}\right) \cdot \sinh \ell, U\right) \]
                  8. *-commutativeN/A

                    \[\leadsto \mathsf{fma}\left(J + J, \left(\frac{-1}{8} \cdot \left(K \cdot K\right) + 1\right) \cdot \sinh \ell, U\right) \]
                  9. lower-fma.f6469.2%

                    \[\leadsto \mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, \color{blue}{K \cdot K}, 1\right) \cdot \sinh \ell, U\right) \]
                8. Applied rewrites69.2%

                  \[\leadsto \color{blue}{\mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \sinh \ell, U\right)} \]
                9. Taylor expanded in l around 0

                  \[\leadsto \mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \color{blue}{\ell}, U\right) \]
                10. Step-by-step derivation
                  1. Applied rewrites48.9%

                    \[\leadsto \mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \color{blue}{\ell}, U\right) \]
                11. Recombined 3 regimes into one program.
                12. Add Preprocessing

                Alternative 10: 62.4% accurate, 1.2× speedup?

                \[\begin{array}{l} \mathbf{if}\;\cos \left(\frac{K}{2}\right) \leq -0.04:\\ \;\;\;\;\mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \ell, U\right)\\ \mathbf{else}:\\ \;\;\;\;\left(1 + \frac{\left(\ell \cdot J\right) \cdot 2}{U}\right) \cdot U\\ \end{array} \]
                (FPCore (J l K U)
                 :precision binary64
                 (if (<= (cos (/ K 2.0)) -0.04)
                   (fma (+ J J) (* (fma -0.125 (* K K) 1.0) l) U)
                   (* (+ 1.0 (/ (* (* l J) 2.0) U)) U)))
                double code(double J, double l, double K, double U) {
                	double tmp;
                	if (cos((K / 2.0)) <= -0.04) {
                		tmp = fma((J + J), (fma(-0.125, (K * K), 1.0) * l), U);
                	} else {
                		tmp = (1.0 + (((l * J) * 2.0) / U)) * U;
                	}
                	return tmp;
                }
                
                function code(J, l, K, U)
                	tmp = 0.0
                	if (cos(Float64(K / 2.0)) <= -0.04)
                		tmp = fma(Float64(J + J), Float64(fma(-0.125, Float64(K * K), 1.0) * l), U);
                	else
                		tmp = Float64(Float64(1.0 + Float64(Float64(Float64(l * J) * 2.0) / U)) * U);
                	end
                	return tmp
                end
                
                code[J_, l_, K_, U_] := If[LessEqual[N[Cos[N[(K / 2.0), $MachinePrecision]], $MachinePrecision], -0.04], N[(N[(J + J), $MachinePrecision] * N[(N[(-0.125 * N[(K * K), $MachinePrecision] + 1.0), $MachinePrecision] * l), $MachinePrecision] + U), $MachinePrecision], N[(N[(1.0 + N[(N[(N[(l * J), $MachinePrecision] * 2.0), $MachinePrecision] / U), $MachinePrecision]), $MachinePrecision] * U), $MachinePrecision]]
                
                \begin{array}{l}
                \mathbf{if}\;\cos \left(\frac{K}{2}\right) \leq -0.04:\\
                \;\;\;\;\mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \ell, U\right)\\
                
                \mathbf{else}:\\
                \;\;\;\;\left(1 + \frac{\left(\ell \cdot J\right) \cdot 2}{U}\right) \cdot U\\
                
                
                \end{array}
                
                Derivation
                1. Split input into 2 regimes
                2. if (cos.f64 (/.f64 K #s(literal 2 binary64))) < -0.0400000000000000008

                  1. Initial program 86.5%

                    \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
                  2. Taylor expanded in K around 0

                    \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
                  3. Step-by-step derivation
                    1. lower-+.f64N/A

                      \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \color{blue}{\frac{-1}{8} \cdot {K}^{2}}\right) + U \]
                    2. lower-*.f64N/A

                      \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot \color{blue}{{K}^{2}}\right) + U \]
                    3. lower-pow.f6464.8%

                      \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + -0.125 \cdot {K}^{\color{blue}{2}}\right) + U \]
                  4. Applied rewrites64.8%

                    \[\leadsto \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \color{blue}{\left(1 + -0.125 \cdot {K}^{2}\right)} + U \]
                  5. Step-by-step derivation
                    1. lift-+.f64N/A

                      \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right) + U} \]
                    2. lift-*.f64N/A

                      \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \left(1 + \frac{-1}{8} \cdot {K}^{2}\right)} + U \]
                    3. lower-fma.f6464.8%

                      \[\leadsto \color{blue}{\mathsf{fma}\left(J \cdot \left(e^{\ell} - e^{-\ell}\right), 1 + -0.125 \cdot {K}^{2}, U\right)} \]
                  6. Applied rewrites69.5%

                    \[\leadsto \color{blue}{\mathsf{fma}\left(\left(J + J\right) \cdot \sinh \ell, \mathsf{fma}\left(K \cdot K, -0.125, 1\right), U\right)} \]
                  7. Step-by-step derivation
                    1. lift-fma.f64N/A

                      \[\leadsto \color{blue}{\left(\left(J + J\right) \cdot \sinh \ell\right) \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right) + U} \]
                    2. lift-*.f64N/A

                      \[\leadsto \color{blue}{\left(\left(J + J\right) \cdot \sinh \ell\right)} \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right) + U \]
                    3. associate-*l*N/A

                      \[\leadsto \color{blue}{\left(J + J\right) \cdot \left(\sinh \ell \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right)\right)} + U \]
                    4. lower-fma.f64N/A

                      \[\leadsto \color{blue}{\mathsf{fma}\left(J + J, \sinh \ell \cdot \mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right), U\right)} \]
                    5. *-commutativeN/A

                      \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\mathsf{fma}\left(K \cdot K, \frac{-1}{8}, 1\right) \cdot \sinh \ell}, U\right) \]
                    6. lower-*.f6469.2%

                      \[\leadsto \mathsf{fma}\left(J + J, \color{blue}{\mathsf{fma}\left(K \cdot K, -0.125, 1\right) \cdot \sinh \ell}, U\right) \]
                    7. lift-fma.f64N/A

                      \[\leadsto \mathsf{fma}\left(J + J, \left(\left(K \cdot K\right) \cdot \frac{-1}{8} + \color{blue}{1}\right) \cdot \sinh \ell, U\right) \]
                    8. *-commutativeN/A

                      \[\leadsto \mathsf{fma}\left(J + J, \left(\frac{-1}{8} \cdot \left(K \cdot K\right) + 1\right) \cdot \sinh \ell, U\right) \]
                    9. lower-fma.f6469.2%

                      \[\leadsto \mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, \color{blue}{K \cdot K}, 1\right) \cdot \sinh \ell, U\right) \]
                  8. Applied rewrites69.2%

                    \[\leadsto \color{blue}{\mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \sinh \ell, U\right)} \]
                  9. Taylor expanded in l around 0

                    \[\leadsto \mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \color{blue}{\ell}, U\right) \]
                  10. Step-by-step derivation
                    1. Applied rewrites48.9%

                      \[\leadsto \mathsf{fma}\left(J + J, \mathsf{fma}\left(-0.125, K \cdot K, 1\right) \cdot \color{blue}{\ell}, U\right) \]

                    if -0.0400000000000000008 < (cos.f64 (/.f64 K #s(literal 2 binary64)))

                    1. Initial program 86.5%

                      \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
                    2. Step-by-step derivation
                      1. lift-+.f64N/A

                        \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U} \]
                      2. lift-*.f64N/A

                        \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right)} + U \]
                      3. *-commutativeN/A

                        \[\leadsto \color{blue}{\cos \left(\frac{K}{2}\right) \cdot \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
                      4. lift-*.f64N/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
                      5. *-commutativeN/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(\left(e^{\ell} - e^{-\ell}\right) \cdot J\right)} + U \]
                      6. lift--.f64N/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(e^{\ell} - e^{-\ell}\right)} \cdot J\right) + U \]
                      7. lift-exp.f64N/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(\color{blue}{e^{\ell}} - e^{-\ell}\right) \cdot J\right) + U \]
                      8. lift-exp.f64N/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - \color{blue}{e^{-\ell}}\right) \cdot J\right) + U \]
                      9. lift-neg.f64N/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \cdot J\right) + U \]
                      10. sinh-undefN/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(2 \cdot \sinh \ell\right)} \cdot J\right) + U \]
                      11. associate-*l*N/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(2 \cdot \left(\sinh \ell \cdot J\right)\right)} + U \]
                      12. associate-*r*N/A

                        \[\leadsto \color{blue}{\left(\cos \left(\frac{K}{2}\right) \cdot 2\right) \cdot \left(\sinh \ell \cdot J\right)} + U \]
                      13. lower-fma.f64N/A

                        \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(\frac{K}{2}\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
                    3. Applied rewrites100.0%

                      \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
                    4. Taylor expanded in K around 0

                      \[\leadsto \mathsf{fma}\left(\color{blue}{2}, \sinh \ell \cdot J, U\right) \]
                    5. Step-by-step derivation
                      1. Applied rewrites80.2%

                        \[\leadsto \mathsf{fma}\left(\color{blue}{2}, \sinh \ell \cdot J, U\right) \]
                      2. Taylor expanded in l around 0

                        \[\leadsto \mathsf{fma}\left(2, \color{blue}{J \cdot \ell}, U\right) \]
                      3. Step-by-step derivation
                        1. lower-*.f6453.4%

                          \[\leadsto \mathsf{fma}\left(2, J \cdot \color{blue}{\ell}, U\right) \]
                      4. Applied rewrites53.4%

                        \[\leadsto \mathsf{fma}\left(2, \color{blue}{J \cdot \ell}, U\right) \]
                      5. Step-by-step derivation
                        1. lift-fma.f64N/A

                          \[\leadsto \color{blue}{2 \cdot \left(J \cdot \ell\right) + U} \]
                        2. +-commutativeN/A

                          \[\leadsto \color{blue}{U + 2 \cdot \left(J \cdot \ell\right)} \]
                        3. sum-to-multN/A

                          \[\leadsto \color{blue}{\left(1 + \frac{2 \cdot \left(J \cdot \ell\right)}{U}\right) \cdot U} \]
                        4. lower-unsound-*.f64N/A

                          \[\leadsto \color{blue}{\left(1 + \frac{2 \cdot \left(J \cdot \ell\right)}{U}\right) \cdot U} \]
                        5. lower-unsound-+.f64N/A

                          \[\leadsto \color{blue}{\left(1 + \frac{2 \cdot \left(J \cdot \ell\right)}{U}\right)} \cdot U \]
                        6. lower-unsound-/.f64N/A

                          \[\leadsto \left(1 + \color{blue}{\frac{2 \cdot \left(J \cdot \ell\right)}{U}}\right) \cdot U \]
                        7. *-commutativeN/A

                          \[\leadsto \left(1 + \frac{\color{blue}{\left(J \cdot \ell\right) \cdot 2}}{U}\right) \cdot U \]
                        8. lower-*.f6457.2%

                          \[\leadsto \left(1 + \frac{\color{blue}{\left(J \cdot \ell\right) \cdot 2}}{U}\right) \cdot U \]
                        9. lift-*.f64N/A

                          \[\leadsto \left(1 + \frac{\left(J \cdot \color{blue}{\ell}\right) \cdot 2}{U}\right) \cdot U \]
                        10. *-commutativeN/A

                          \[\leadsto \left(1 + \frac{\left(\ell \cdot \color{blue}{J}\right) \cdot 2}{U}\right) \cdot U \]
                        11. lower-*.f6457.2%

                          \[\leadsto \left(1 + \frac{\left(\ell \cdot \color{blue}{J}\right) \cdot 2}{U}\right) \cdot U \]
                      6. Applied rewrites57.2%

                        \[\leadsto \color{blue}{\left(1 + \frac{\left(\ell \cdot J\right) \cdot 2}{U}\right) \cdot U} \]
                    6. Recombined 2 regimes into one program.
                    7. Add Preprocessing

                    Alternative 11: 57.2% accurate, 4.3× speedup?

                    \[\left(1 + \frac{\left(\ell \cdot J\right) \cdot 2}{U}\right) \cdot U \]
                    (FPCore (J l K U) :precision binary64 (* (+ 1.0 (/ (* (* l J) 2.0) U)) U))
                    double code(double J, double l, double K, double U) {
                    	return (1.0 + (((l * J) * 2.0) / U)) * U;
                    }
                    
                    module fmin_fmax_functions
                        implicit none
                        private
                        public fmax
                        public fmin
                    
                        interface fmax
                            module procedure fmax88
                            module procedure fmax44
                            module procedure fmax84
                            module procedure fmax48
                        end interface
                        interface fmin
                            module procedure fmin88
                            module procedure fmin44
                            module procedure fmin84
                            module procedure fmin48
                        end interface
                    contains
                        real(8) function fmax88(x, y) result (res)
                            real(8), intent (in) :: x
                            real(8), intent (in) :: y
                            res = merge(y, merge(x, max(x, y), y /= y), x /= x)
                        end function
                        real(4) function fmax44(x, y) result (res)
                            real(4), intent (in) :: x
                            real(4), intent (in) :: y
                            res = merge(y, merge(x, max(x, y), y /= y), x /= x)
                        end function
                        real(8) function fmax84(x, y) result(res)
                            real(8), intent (in) :: x
                            real(4), intent (in) :: y
                            res = merge(dble(y), merge(x, max(x, dble(y)), y /= y), x /= x)
                        end function
                        real(8) function fmax48(x, y) result(res)
                            real(4), intent (in) :: x
                            real(8), intent (in) :: y
                            res = merge(y, merge(dble(x), max(dble(x), y), y /= y), x /= x)
                        end function
                        real(8) function fmin88(x, y) result (res)
                            real(8), intent (in) :: x
                            real(8), intent (in) :: y
                            res = merge(y, merge(x, min(x, y), y /= y), x /= x)
                        end function
                        real(4) function fmin44(x, y) result (res)
                            real(4), intent (in) :: x
                            real(4), intent (in) :: y
                            res = merge(y, merge(x, min(x, y), y /= y), x /= x)
                        end function
                        real(8) function fmin84(x, y) result(res)
                            real(8), intent (in) :: x
                            real(4), intent (in) :: y
                            res = merge(dble(y), merge(x, min(x, dble(y)), y /= y), x /= x)
                        end function
                        real(8) function fmin48(x, y) result(res)
                            real(4), intent (in) :: x
                            real(8), intent (in) :: y
                            res = merge(y, merge(dble(x), min(dble(x), y), y /= y), x /= x)
                        end function
                    end module
                    
                    real(8) function code(j, l, k, u)
                    use fmin_fmax_functions
                        real(8), intent (in) :: j
                        real(8), intent (in) :: l
                        real(8), intent (in) :: k
                        real(8), intent (in) :: u
                        code = (1.0d0 + (((l * j) * 2.0d0) / u)) * u
                    end function
                    
                    public static double code(double J, double l, double K, double U) {
                    	return (1.0 + (((l * J) * 2.0) / U)) * U;
                    }
                    
                    def code(J, l, K, U):
                    	return (1.0 + (((l * J) * 2.0) / U)) * U
                    
                    function code(J, l, K, U)
                    	return Float64(Float64(1.0 + Float64(Float64(Float64(l * J) * 2.0) / U)) * U)
                    end
                    
                    function tmp = code(J, l, K, U)
                    	tmp = (1.0 + (((l * J) * 2.0) / U)) * U;
                    end
                    
                    code[J_, l_, K_, U_] := N[(N[(1.0 + N[(N[(N[(l * J), $MachinePrecision] * 2.0), $MachinePrecision] / U), $MachinePrecision]), $MachinePrecision] * U), $MachinePrecision]
                    
                    \left(1 + \frac{\left(\ell \cdot J\right) \cdot 2}{U}\right) \cdot U
                    
                    Derivation
                    1. Initial program 86.5%

                      \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
                    2. Step-by-step derivation
                      1. lift-+.f64N/A

                        \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U} \]
                      2. lift-*.f64N/A

                        \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right)} + U \]
                      3. *-commutativeN/A

                        \[\leadsto \color{blue}{\cos \left(\frac{K}{2}\right) \cdot \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
                      4. lift-*.f64N/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
                      5. *-commutativeN/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(\left(e^{\ell} - e^{-\ell}\right) \cdot J\right)} + U \]
                      6. lift--.f64N/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(e^{\ell} - e^{-\ell}\right)} \cdot J\right) + U \]
                      7. lift-exp.f64N/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(\color{blue}{e^{\ell}} - e^{-\ell}\right) \cdot J\right) + U \]
                      8. lift-exp.f64N/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - \color{blue}{e^{-\ell}}\right) \cdot J\right) + U \]
                      9. lift-neg.f64N/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \cdot J\right) + U \]
                      10. sinh-undefN/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(2 \cdot \sinh \ell\right)} \cdot J\right) + U \]
                      11. associate-*l*N/A

                        \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(2 \cdot \left(\sinh \ell \cdot J\right)\right)} + U \]
                      12. associate-*r*N/A

                        \[\leadsto \color{blue}{\left(\cos \left(\frac{K}{2}\right) \cdot 2\right) \cdot \left(\sinh \ell \cdot J\right)} + U \]
                      13. lower-fma.f64N/A

                        \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(\frac{K}{2}\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
                    3. Applied rewrites100.0%

                      \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
                    4. Taylor expanded in K around 0

                      \[\leadsto \mathsf{fma}\left(\color{blue}{2}, \sinh \ell \cdot J, U\right) \]
                    5. Step-by-step derivation
                      1. Applied rewrites80.2%

                        \[\leadsto \mathsf{fma}\left(\color{blue}{2}, \sinh \ell \cdot J, U\right) \]
                      2. Taylor expanded in l around 0

                        \[\leadsto \mathsf{fma}\left(2, \color{blue}{J \cdot \ell}, U\right) \]
                      3. Step-by-step derivation
                        1. lower-*.f6453.4%

                          \[\leadsto \mathsf{fma}\left(2, J \cdot \color{blue}{\ell}, U\right) \]
                      4. Applied rewrites53.4%

                        \[\leadsto \mathsf{fma}\left(2, \color{blue}{J \cdot \ell}, U\right) \]
                      5. Step-by-step derivation
                        1. lift-fma.f64N/A

                          \[\leadsto \color{blue}{2 \cdot \left(J \cdot \ell\right) + U} \]
                        2. +-commutativeN/A

                          \[\leadsto \color{blue}{U + 2 \cdot \left(J \cdot \ell\right)} \]
                        3. sum-to-multN/A

                          \[\leadsto \color{blue}{\left(1 + \frac{2 \cdot \left(J \cdot \ell\right)}{U}\right) \cdot U} \]
                        4. lower-unsound-*.f64N/A

                          \[\leadsto \color{blue}{\left(1 + \frac{2 \cdot \left(J \cdot \ell\right)}{U}\right) \cdot U} \]
                        5. lower-unsound-+.f64N/A

                          \[\leadsto \color{blue}{\left(1 + \frac{2 \cdot \left(J \cdot \ell\right)}{U}\right)} \cdot U \]
                        6. lower-unsound-/.f64N/A

                          \[\leadsto \left(1 + \color{blue}{\frac{2 \cdot \left(J \cdot \ell\right)}{U}}\right) \cdot U \]
                        7. *-commutativeN/A

                          \[\leadsto \left(1 + \frac{\color{blue}{\left(J \cdot \ell\right) \cdot 2}}{U}\right) \cdot U \]
                        8. lower-*.f6457.2%

                          \[\leadsto \left(1 + \frac{\color{blue}{\left(J \cdot \ell\right) \cdot 2}}{U}\right) \cdot U \]
                        9. lift-*.f64N/A

                          \[\leadsto \left(1 + \frac{\left(J \cdot \color{blue}{\ell}\right) \cdot 2}{U}\right) \cdot U \]
                        10. *-commutativeN/A

                          \[\leadsto \left(1 + \frac{\left(\ell \cdot \color{blue}{J}\right) \cdot 2}{U}\right) \cdot U \]
                        11. lower-*.f6457.2%

                          \[\leadsto \left(1 + \frac{\left(\ell \cdot \color{blue}{J}\right) \cdot 2}{U}\right) \cdot U \]
                      6. Applied rewrites57.2%

                        \[\leadsto \color{blue}{\left(1 + \frac{\left(\ell \cdot J\right) \cdot 2}{U}\right) \cdot U} \]
                      7. Add Preprocessing

                      Alternative 12: 53.4% accurate, 7.7× speedup?

                      \[\mathsf{fma}\left(2, J \cdot \ell, U\right) \]
                      (FPCore (J l K U) :precision binary64 (fma 2.0 (* J l) U))
                      double code(double J, double l, double K, double U) {
                      	return fma(2.0, (J * l), U);
                      }
                      
                      function code(J, l, K, U)
                      	return fma(2.0, Float64(J * l), U)
                      end
                      
                      code[J_, l_, K_, U_] := N[(2.0 * N[(J * l), $MachinePrecision] + U), $MachinePrecision]
                      
                      \mathsf{fma}\left(2, J \cdot \ell, U\right)
                      
                      Derivation
                      1. Initial program 86.5%

                        \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
                      2. Step-by-step derivation
                        1. lift-+.f64N/A

                          \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U} \]
                        2. lift-*.f64N/A

                          \[\leadsto \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right)} + U \]
                        3. *-commutativeN/A

                          \[\leadsto \color{blue}{\cos \left(\frac{K}{2}\right) \cdot \left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
                        4. lift-*.f64N/A

                          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right)} + U \]
                        5. *-commutativeN/A

                          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(\left(e^{\ell} - e^{-\ell}\right) \cdot J\right)} + U \]
                        6. lift--.f64N/A

                          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(e^{\ell} - e^{-\ell}\right)} \cdot J\right) + U \]
                        7. lift-exp.f64N/A

                          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(\color{blue}{e^{\ell}} - e^{-\ell}\right) \cdot J\right) + U \]
                        8. lift-exp.f64N/A

                          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - \color{blue}{e^{-\ell}}\right) \cdot J\right) + U \]
                        9. lift-neg.f64N/A

                          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\left(e^{\ell} - e^{\color{blue}{\mathsf{neg}\left(\ell\right)}}\right) \cdot J\right) + U \]
                        10. sinh-undefN/A

                          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \left(\color{blue}{\left(2 \cdot \sinh \ell\right)} \cdot J\right) + U \]
                        11. associate-*l*N/A

                          \[\leadsto \cos \left(\frac{K}{2}\right) \cdot \color{blue}{\left(2 \cdot \left(\sinh \ell \cdot J\right)\right)} + U \]
                        12. associate-*r*N/A

                          \[\leadsto \color{blue}{\left(\cos \left(\frac{K}{2}\right) \cdot 2\right) \cdot \left(\sinh \ell \cdot J\right)} + U \]
                        13. lower-fma.f64N/A

                          \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(\frac{K}{2}\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
                      3. Applied rewrites100.0%

                        \[\leadsto \color{blue}{\mathsf{fma}\left(\cos \left(-0.5 \cdot K\right) \cdot 2, \sinh \ell \cdot J, U\right)} \]
                      4. Taylor expanded in K around 0

                        \[\leadsto \mathsf{fma}\left(\color{blue}{2}, \sinh \ell \cdot J, U\right) \]
                      5. Step-by-step derivation
                        1. Applied rewrites80.2%

                          \[\leadsto \mathsf{fma}\left(\color{blue}{2}, \sinh \ell \cdot J, U\right) \]
                        2. Taylor expanded in l around 0

                          \[\leadsto \mathsf{fma}\left(2, \color{blue}{J \cdot \ell}, U\right) \]
                        3. Step-by-step derivation
                          1. lower-*.f6453.4%

                            \[\leadsto \mathsf{fma}\left(2, J \cdot \color{blue}{\ell}, U\right) \]
                        4. Applied rewrites53.4%

                          \[\leadsto \mathsf{fma}\left(2, \color{blue}{J \cdot \ell}, U\right) \]
                        5. Add Preprocessing

                        Alternative 13: 36.3% accurate, 68.7× speedup?

                        \[U \]
                        (FPCore (J l K U) :precision binary64 U)
                        double code(double J, double l, double K, double U) {
                        	return U;
                        }
                        
                        module fmin_fmax_functions
                            implicit none
                            private
                            public fmax
                            public fmin
                        
                            interface fmax
                                module procedure fmax88
                                module procedure fmax44
                                module procedure fmax84
                                module procedure fmax48
                            end interface
                            interface fmin
                                module procedure fmin88
                                module procedure fmin44
                                module procedure fmin84
                                module procedure fmin48
                            end interface
                        contains
                            real(8) function fmax88(x, y) result (res)
                                real(8), intent (in) :: x
                                real(8), intent (in) :: y
                                res = merge(y, merge(x, max(x, y), y /= y), x /= x)
                            end function
                            real(4) function fmax44(x, y) result (res)
                                real(4), intent (in) :: x
                                real(4), intent (in) :: y
                                res = merge(y, merge(x, max(x, y), y /= y), x /= x)
                            end function
                            real(8) function fmax84(x, y) result(res)
                                real(8), intent (in) :: x
                                real(4), intent (in) :: y
                                res = merge(dble(y), merge(x, max(x, dble(y)), y /= y), x /= x)
                            end function
                            real(8) function fmax48(x, y) result(res)
                                real(4), intent (in) :: x
                                real(8), intent (in) :: y
                                res = merge(y, merge(dble(x), max(dble(x), y), y /= y), x /= x)
                            end function
                            real(8) function fmin88(x, y) result (res)
                                real(8), intent (in) :: x
                                real(8), intent (in) :: y
                                res = merge(y, merge(x, min(x, y), y /= y), x /= x)
                            end function
                            real(4) function fmin44(x, y) result (res)
                                real(4), intent (in) :: x
                                real(4), intent (in) :: y
                                res = merge(y, merge(x, min(x, y), y /= y), x /= x)
                            end function
                            real(8) function fmin84(x, y) result(res)
                                real(8), intent (in) :: x
                                real(4), intent (in) :: y
                                res = merge(dble(y), merge(x, min(x, dble(y)), y /= y), x /= x)
                            end function
                            real(8) function fmin48(x, y) result(res)
                                real(4), intent (in) :: x
                                real(8), intent (in) :: y
                                res = merge(y, merge(dble(x), min(dble(x), y), y /= y), x /= x)
                            end function
                        end module
                        
                        real(8) function code(j, l, k, u)
                        use fmin_fmax_functions
                            real(8), intent (in) :: j
                            real(8), intent (in) :: l
                            real(8), intent (in) :: k
                            real(8), intent (in) :: u
                            code = u
                        end function
                        
                        public static double code(double J, double l, double K, double U) {
                        	return U;
                        }
                        
                        def code(J, l, K, U):
                        	return U
                        
                        function code(J, l, K, U)
                        	return U
                        end
                        
                        function tmp = code(J, l, K, U)
                        	tmp = U;
                        end
                        
                        code[J_, l_, K_, U_] := U
                        
                        U
                        
                        Derivation
                        1. Initial program 86.5%

                          \[\left(J \cdot \left(e^{\ell} - e^{-\ell}\right)\right) \cdot \cos \left(\frac{K}{2}\right) + U \]
                        2. Taylor expanded in J around 0

                          \[\leadsto \color{blue}{U} \]
                        3. Step-by-step derivation
                          1. Applied rewrites36.3%

                            \[\leadsto \color{blue}{U} \]
                          2. Add Preprocessing

                          Reproduce

                          ?
                          herbie shell --seed 2025183 
                          (FPCore (J l K U)
                            :name "Maksimov and Kolovsky, Equation (4)"
                            :precision binary64
                            (+ (* (* J (- (exp l) (exp (- l)))) (cos (/ K 2.0))) U))