?

Average Accuracy: 99.5% → 99.5%
Time: 16.4s
Precision: binary32
Cost: 9952

?

\[\left(10^{-5} \leq u \land u \leq 1\right) \land \left(0 \leq v \land v \leq 109.746574\right)\]
\[1 + v \cdot \log \left(u + \left(1 - u\right) \cdot e^{\frac{-2}{v}}\right) \]
\[1 + v \cdot \log \left(\mathsf{fma}\left(1 - u, e^{\frac{-2}{v}}, u\right)\right) \]
(FPCore (u v)
 :precision binary32
 (+ 1.0 (* v (log (+ u (* (- 1.0 u) (exp (/ -2.0 v))))))))
(FPCore (u v)
 :precision binary32
 (+ 1.0 (* v (log (fma (- 1.0 u) (exp (/ -2.0 v)) u)))))
float code(float u, float v) {
	return 1.0f + (v * logf((u + ((1.0f - u) * expf((-2.0f / v))))));
}
float code(float u, float v) {
	return 1.0f + (v * logf(fmaf((1.0f - u), expf((-2.0f / v)), u)));
}
function code(u, v)
	return Float32(Float32(1.0) + Float32(v * log(Float32(u + Float32(Float32(Float32(1.0) - u) * exp(Float32(Float32(-2.0) / v)))))))
end
function code(u, v)
	return Float32(Float32(1.0) + Float32(v * log(fma(Float32(Float32(1.0) - u), exp(Float32(Float32(-2.0) / v)), u))))
end
1 + v \cdot \log \left(u + \left(1 - u\right) \cdot e^{\frac{-2}{v}}\right)
1 + v \cdot \log \left(\mathsf{fma}\left(1 - u, e^{\frac{-2}{v}}, u\right)\right)

Error?

Derivation?

  1. Initial program 99.5%

    \[1 + v \cdot \log \left(u + \left(1 - u\right) \cdot e^{\frac{-2}{v}}\right) \]
  2. Simplified99.5%

    \[\leadsto \color{blue}{\mathsf{fma}\left(v, \log \left(\mathsf{fma}\left(1 - u, e^{\frac{-2}{v}}, u\right)\right), 1\right)} \]
    Proof

    [Start]99.5

    \[ 1 + v \cdot \log \left(u + \left(1 - u\right) \cdot e^{\frac{-2}{v}}\right) \]

    +-commutative [=>]99.5

    \[ \color{blue}{v \cdot \log \left(u + \left(1 - u\right) \cdot e^{\frac{-2}{v}}\right) + 1} \]

    fma-def [=>]99.5

    \[ \color{blue}{\mathsf{fma}\left(v, \log \left(u + \left(1 - u\right) \cdot e^{\frac{-2}{v}}\right), 1\right)} \]

    +-commutative [=>]99.5

    \[ \mathsf{fma}\left(v, \log \color{blue}{\left(\left(1 - u\right) \cdot e^{\frac{-2}{v}} + u\right)}, 1\right) \]

    fma-def [=>]99.5

    \[ \mathsf{fma}\left(v, \log \color{blue}{\left(\mathsf{fma}\left(1 - u, e^{\frac{-2}{v}}, u\right)\right)}, 1\right) \]
  3. Applied egg-rr99.5%

    \[\leadsto \color{blue}{v \cdot \log \left(\mathsf{fma}\left(1 - u, e^{\frac{-2}{v}}, u\right)\right) + 1} \]
  4. Final simplification99.5%

    \[\leadsto 1 + v \cdot \log \left(\mathsf{fma}\left(1 - u, e^{\frac{-2}{v}}, u\right)\right) \]

Alternatives

Alternative 1
Accuracy99.5%
Cost6816
\[1 + v \cdot \log \left(u + \left(1 - u\right) \cdot e^{\frac{-2}{v}}\right) \]
Alternative 2
Accuracy91.0%
Cost3748
\[\begin{array}{l} \mathbf{if}\;v \leq 0.41999998688697815:\\ \;\;\;\;1 + \left(v \cdot u\right) \cdot \left(\frac{1}{1 + \left(\frac{2}{v \cdot v} - \frac{2}{v}\right)} + -1\right)\\ \mathbf{else}:\\ \;\;\;\;1 + v \cdot \left(u \cdot e^{\frac{2}{v}} + \left(\frac{-2}{v} - u\right)\right)\\ \end{array} \]
Alternative 3
Accuracy91.0%
Cost3684
\[\begin{array}{l} \mathbf{if}\;v \leq 0.41999998688697815:\\ \;\;\;\;1 + \left(v \cdot u\right) \cdot \left(\frac{1}{1 + \left(\frac{2}{v \cdot v} - \frac{2}{v}\right)} + -1\right)\\ \mathbf{else}:\\ \;\;\;\;1 + v \cdot \left(u \cdot \mathsf{expm1}\left(\frac{2}{v}\right) + \frac{-2}{v}\right)\\ \end{array} \]
Alternative 4
Accuracy91.0%
Cost3620
\[\begin{array}{l} \mathbf{if}\;v \leq 0.41999998688697815:\\ \;\;\;\;1 + \left(v \cdot u\right) \cdot \left(\frac{1}{1 + \left(\frac{2}{v \cdot v} - \frac{2}{v}\right)} + -1\right)\\ \mathbf{else}:\\ \;\;\;\;1 + \left(-2 + v \cdot \left(u \cdot \mathsf{expm1}\left(\frac{2}{v}\right)\right)\right)\\ \end{array} \]
Alternative 5
Accuracy87.0%
Cost672
\[1 + \left(v \cdot u\right) \cdot \left(\frac{1}{1 + \left(\frac{2}{v \cdot v} - \frac{2}{v}\right)} + -1\right) \]
Alternative 6
Accuracy5.7%
Cost32
\[-1 \]
Alternative 7
Accuracy87.4%
Cost32
\[1 \]

Error

Reproduce?

herbie shell --seed 2023129 
(FPCore (u v)
  :name "HairBSDF, sample_f, cosTheta"
  :precision binary32
  :pre (and (and (<= 1e-5 u) (<= u 1.0)) (and (<= 0.0 v) (<= v 109.746574)))
  (+ 1.0 (* v (log (+ u (* (- 1.0 u) (exp (/ -2.0 v))))))))