From 25cbf12237f692fab6cb5fbcd908c2b966b67d9b Mon Sep 17 00:00:00 2001 From: Alex Arslan Date: Tue, 25 Jun 2024 20:23:41 -0700 Subject: [PATCH] Define fallback implementations for mean, var, and entropy We have the internal `expectation` function that uses `quadgk` to compute integrals, as well as a fallback implementation for `kldivergence` that uses `expectation`, so it seems reasonable to similarly define fallbacks for other quantities trivially computable using `expectation`: `mean`, `var`, and `entropy`. We could probably do `skewness` and `kurtosis` as well. NOTE: Skipping CI until I've actually done more with this since we run a lot of CI on this repo [ci skip] --- src/univariates.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/univariates.jl b/src/univariates.jl index b60e5a2949..68bb7e86f3 100644 --- a/src/univariates.jl +++ b/src/univariates.jl @@ -163,14 +163,14 @@ rand(rng::AbstractRNG, d::UnivariateDistribution) = quantile(d, rand(rng)) Compute the expectation. """ -mean(d::UnivariateDistribution) +mean(d::UnivariateDistribution) = expectation(identity, d) """ var(d::UnivariateDistribution) Compute the variance. (A generic std is provided as `std(d) = sqrt(var(d))`) """ -var(d::UnivariateDistribution) +var(d::UnivariateDistribution) = (μ = mean(d); expectation(x -> (x - μ)^2, d)) """ std(d::UnivariateDistribution) @@ -214,7 +214,7 @@ skewness(d::UnivariateDistribution) Compute the entropy value of distribution `d`. """ -entropy(d::UnivariateDistribution) +entropy(d::UnivariateDistribution) = expectation(x -> -log(pdf(d, x)), d) """ entropy(d::UnivariateDistribution, b::Real)