diff --git a/Project.toml b/Project.toml index 3fd46e26..5806a4a7 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "ExponentialFamily" uuid = "62312e5e-252a-4322-ace9-a5f4bf9b357b" authors = ["Ismail Senoz ", "Dmitry Bagaev "] -version = "1.2.2" +version = "1.3.0" [deps] BayesBase = "b4ee3484-f114-42fe-b91c-797d54a0c67e" @@ -28,7 +28,7 @@ TinyHugeNumbers = "783c9a47-75a3-44ac-a16b-f1ab7b3acf04" [compat] Aqua = "0.7" -BayesBase = "1.1" +BayesBase = "1.2" Distributions = "0.25" DomainSets = "0.5.2, 0.6, 0.7" FastCholesky = "1.0" diff --git a/src/distributions/gamma_family/gamma_shape_rate.jl b/src/distributions/gamma_family/gamma_shape_rate.jl index 08e55922..cf8aa8bd 100644 --- a/src/distributions/gamma_family/gamma_shape_rate.jl +++ b/src/distributions/gamma_family/gamma_shape_rate.jl @@ -26,13 +26,15 @@ GammaShapeRate() = GammaShapeRate(1.0, 1.0) Distributions.@distr_support GammaShapeRate 0 Inf -BayesBase.support(dist::GammaShapeRate) = Distributions.RealInterval(minimum(dist), maximum(dist)) -BayesBase.shape(dist::GammaShapeRate) = dist.a -BayesBase.rate(dist::GammaShapeRate) = dist.b -BayesBase.scale(dist::GammaShapeRate) = inv(dist.b) -BayesBase.mean(dist::GammaShapeRate) = shape(dist) / rate(dist) -BayesBase.var(dist::GammaShapeRate) = shape(dist) / abs2(rate(dist)) -BayesBase.params(dist::GammaShapeRate) = (shape(dist), rate(dist)) +BayesBase.support(dist::GammaShapeRate) = Distributions.RealInterval(minimum(dist), maximum(dist)) +BayesBase.shape(dist::GammaShapeRate) = dist.a +BayesBase.rate(dist::GammaShapeRate) = dist.b +BayesBase.scale(dist::GammaShapeRate) = inv(dist.b) +BayesBase.mean(dist::GammaShapeRate) = shape(dist) / rate(dist) +BayesBase.var(dist::GammaShapeRate) = shape(dist) / abs2(rate(dist)) +BayesBase.params(dist::GammaShapeRate) = (shape(dist), rate(dist)) +BayesBase.kurtosis(dist::GammaShapeRate) = kurtosis(convert(Gamma, dist)) +BayesBase.skewness(dist::GammaShapeRate) = skewness(convert(Gamma, dist)) BayesBase.mode(d::GammaShapeRate) = shape(d) >= 1 ? mode(Gamma(shape(d), scale(d))) : throw(error("Gamma has no mode when shape < 1")) diff --git a/src/distributions/normal_family/normal_mean_precision.jl b/src/distributions/normal_family/normal_mean_precision.jl index bda7c297..72e90303 100644 --- a/src/distributions/normal_family/normal_mean_precision.jl +++ b/src/distributions/normal_family/normal_mean_precision.jl @@ -27,15 +27,17 @@ BayesBase.support(dist::NormalMeanPrecision) = Distributions.RealInterval(minimu BayesBase.weightedmean(dist::NormalMeanPrecision) = precision(dist) * mean(dist) -BayesBase.mean(dist::NormalMeanPrecision) = dist.μ -BayesBase.median(dist::NormalMeanPrecision) = mean(dist) -BayesBase.mode(dist::NormalMeanPrecision) = mean(dist) -BayesBase.var(dist::NormalMeanPrecision) = inv(dist.w) -BayesBase.std(dist::NormalMeanPrecision) = sqrt(var(dist)) -BayesBase.cov(dist::NormalMeanPrecision) = var(dist) -BayesBase.invcov(dist::NormalMeanPrecision) = dist.w +BayesBase.mean(dist::NormalMeanPrecision) = dist.μ +BayesBase.median(dist::NormalMeanPrecision) = mean(dist) +BayesBase.mode(dist::NormalMeanPrecision) = mean(dist) +BayesBase.var(dist::NormalMeanPrecision) = inv(dist.w) +BayesBase.std(dist::NormalMeanPrecision) = sqrt(var(dist)) +BayesBase.cov(dist::NormalMeanPrecision) = var(dist) +BayesBase.invcov(dist::NormalMeanPrecision) = dist.w BayesBase.entropy(dist::NormalMeanPrecision) = (1 + log2π - log(precision(dist))) / 2 -BayesBase.params(dist::NormalMeanPrecision) = (mean(dist), precision(dist)) +BayesBase.params(dist::NormalMeanPrecision) = (mean(dist), precision(dist)) +BayesBase.kurtosis(dist::NormalMeanPrecision) = kurtosis(convert(Normal, dist)) +BayesBase.skewness(dist::NormalMeanPrecision) = skewness(convert(Normal, dist)) BayesBase.pdf(dist::NormalMeanPrecision, x::Real) = (invsqrt2π * exp(-abs2(x - mean(dist)) * precision(dist) / 2)) * sqrt(precision(dist)) BayesBase.logpdf(dist::NormalMeanPrecision, x::Real) = -(log2π - log(precision(dist)) + abs2(x - mean(dist)) * precision(dist)) / 2 diff --git a/src/distributions/normal_family/normal_mean_variance.jl b/src/distributions/normal_family/normal_mean_variance.jl index 5e41f58a..1d1becf4 100644 --- a/src/distributions/normal_family/normal_mean_variance.jl +++ b/src/distributions/normal_family/normal_mean_variance.jl @@ -32,15 +32,18 @@ function BayesBase.weightedmean_invcov(dist::NormalMeanVariance) return (xi, w) end -BayesBase.mean(dist::NormalMeanVariance) = dist.μ -BayesBase.median(dist::NormalMeanVariance) = mean(dist) -BayesBase.mode(dist::NormalMeanVariance) = mean(dist) -BayesBase.var(dist::NormalMeanVariance) = dist.v -BayesBase.std(dist::NormalMeanVariance) = sqrt(var(dist)) -BayesBase.cov(dist::NormalMeanVariance) = var(dist) -BayesBase.invcov(dist::NormalMeanVariance) = inv(cov(dist)) -BayesBase.entropy(dist::NormalMeanVariance) = (1 + log2π + log(var(dist))) / 2 -BayesBase.params(dist::NormalMeanVariance) = (dist.μ, dist.v) +BayesBase.mean(dist::NormalMeanVariance) = dist.μ +BayesBase.median(dist::NormalMeanVariance) = mean(dist) +BayesBase.mode(dist::NormalMeanVariance) = mean(dist) +BayesBase.var(dist::NormalMeanVariance) = dist.v +BayesBase.std(dist::NormalMeanVariance) = sqrt(var(dist)) +BayesBase.cov(dist::NormalMeanVariance) = var(dist) +BayesBase.invcov(dist::NormalMeanVariance) = inv(cov(dist)) +BayesBase.entropy(dist::NormalMeanVariance) = (1 + log2π + log(var(dist))) / 2 +BayesBase.params(dist::NormalMeanVariance) = (dist.μ, dist.v) +BayesBase.kurtosis(dist::NormalMeanVariance) = kurtosis(convert(Normal, dist)) +BayesBase.skewness(dist::NormalMeanVariance) = skewness(convert(Normal, dist)) + BayesBase.pdf(dist::NormalMeanVariance, x::Real) = (invsqrt2π * exp(-abs2(x - mean(dist)) / 2cov(dist))) / std(dist) BayesBase.logpdf(dist::NormalMeanVariance, x::Real) = -(log2π + log(var(dist)) + abs2(x - mean(dist)) / var(dist)) / 2 diff --git a/src/distributions/normal_family/normal_weighted_mean_precision.jl b/src/distributions/normal_family/normal_weighted_mean_precision.jl index cc15f868..f49bb1fe 100644 --- a/src/distributions/normal_family/normal_weighted_mean_precision.jl +++ b/src/distributions/normal_family/normal_weighted_mean_precision.jl @@ -42,6 +42,8 @@ BayesBase.cov(dist::NormalWeightedMeanPrecision) = var(dist) BayesBase.invcov(dist::NormalWeightedMeanPrecision) = dist.w BayesBase.entropy(dist::NormalWeightedMeanPrecision) = (1 + log2π - log(precision(dist))) / 2 BayesBase.params(dist::NormalWeightedMeanPrecision) = (weightedmean(dist), precision(dist)) +BayesBase.kurtosis(dist::NormalWeightedMeanPrecision) = kurtosis(convert(Normal, dist)) +BayesBase.skewness(dist::NormalWeightedMeanPrecision) = skewness(convert(Normal, dist)) BayesBase.pdf(dist::NormalWeightedMeanPrecision, x::Real) = (invsqrt2π * exp(-abs2(x - mean(dist)) * precision(dist) / 2)) * sqrt(precision(dist)) BayesBase.logpdf(dist::NormalWeightedMeanPrecision, x::Real) = -(log2π - log(precision(dist)) + abs2(x - mean(dist)) * precision(dist)) / 2 diff --git a/src/exponential_family.jl b/src/exponential_family.jl index dccad370..8041f3d6 100644 --- a/src/exponential_family.jl +++ b/src/exponential_family.jl @@ -830,6 +830,8 @@ BayesBase.mean(ef::ExponentialFamilyDistribution{T}) where {T <: Distribution} = BayesBase.var(ef::ExponentialFamilyDistribution{T}) where {T <: Distribution} = var(convert(T, ef)) BayesBase.std(ef::ExponentialFamilyDistribution{T}) where {T <: Distribution} = std(convert(T, ef)) BayesBase.cov(ef::ExponentialFamilyDistribution{T}) where {T <: Distribution} = cov(convert(T, ef)) +BayesBase.skewness(ef::ExponentialFamilyDistribution{T}) where {T <: Distribution} = skewness(convert(T, ef)) +BayesBase.kurtosis(ef::ExponentialFamilyDistribution{T}) where {T <: Distribution} = kurtosis(convert(T, ef)) BayesBase.rand(ef::ExponentialFamilyDistribution, args...) = rand(Random.default_rng(), ef, args...) BayesBase.rand!(ef::ExponentialFamilyDistribution, args...) = rand!(Random.default_rng(), ef, args...) diff --git a/test/distributions/distributions_setuptests.jl b/test/distributions/distributions_setuptests.jl index f1b544c0..ecfdd135 100644 --- a/test/distributions/distributions_setuptests.jl +++ b/test/distributions/distributions_setuptests.jl @@ -57,7 +57,7 @@ function test_exponentialfamily_interface(distribution; test_fisherinformation_properties = true, test_fisherinformation_against_hessian = true, test_fisherinformation_against_jacobian = true, - option_assume_no_allocations = false, + option_assume_no_allocations = false ) T = ExponentialFamily.exponential_family_typetag(distribution) @@ -203,6 +203,17 @@ function run_test_basic_functions(distribution; nsamples = 10, test_gradients = # ! do not use fixed RNG samples = [rand(distribution) for _ in 1:nsamples] + # Not all methods are defined for all objects in Distributions.jl + # For this methods we first test if the method is defined for the distribution + # And only then we test the method for the exponential family form + potentially_missing_methods = ( + cov, + skewness, + kurtosis + ) + + argument_type = Tuple{typeof(distribution)} + for x in samples # We believe in the implementation in the `Distributions.jl` @test @inferred(logpdf(ef, x)) ≈ logpdf(distribution, x) @@ -214,6 +225,12 @@ function run_test_basic_functions(distribution; nsamples = 10, test_gradients = @test all(rand(StableRNG(42), ef, 10) .≈ rand(StableRNG(42), distribution, 10)) @test all(rand!(StableRNG(42), ef, [deepcopy(x) for _ in 1:10]) .≈ rand!(StableRNG(42), distribution, [deepcopy(x) for _ in 1:10])) + for method in potentially_missing_methods + if hasmethod(method, argument_type) + @test @inferred(method(ef)) ≈ method(distribution) + end + end + @test @inferred(isbasemeasureconstant(ef)) === isbasemeasureconstant(T) @test @inferred(basemeasure(ef, x)) == getbasemeasure(T, conditioner)(x) @test all(@inferred(sufficientstatistics(ef, x)) .== map(f -> f(x), getsufficientstatistics(T, conditioner))) diff --git a/test/distributions/gamma_inverse_tests.jl b/test/distributions/gamma_inverse_tests.jl index 590507cc..7e3a72c8 100644 --- a/test/distributions/gamma_inverse_tests.jl +++ b/test/distributions/gamma_inverse_tests.jl @@ -45,7 +45,7 @@ end @testitem "GammaInverse: ExponentialFamilyDistribution" begin include("distributions_setuptests.jl") - for α in 10rand(4), θ in 10rand(4) + for α in (10rand(4) .+ 4.0), θ in 10rand(4) @testset let d = InverseGamma(α, θ) ef = test_exponentialfamily_interface(d; option_assume_no_allocations = true) diff --git a/test/distributions/mv_normal_wishart_tests.jl b/test/distributions/mv_normal_wishart_tests.jl index 6a8bf3e6..a3a30242 100644 --- a/test/distributions/mv_normal_wishart_tests.jl +++ b/test/distributions/mv_normal_wishart_tests.jl @@ -14,7 +14,7 @@ end @testitem "MvNormalWishart: ExponentialFamilyDistribution" begin include("distributions_setuptests.jl") - for dim in (3), invS in rand(Wishart(10, Array(Eye(dim))), 4) + for dim in (3,), invS in rand(Wishart(10, Array(Eye(dim))), 4) ν = dim + 2 @testset let (d = MvNormalWishart(rand(dim), invS, rand(), ν)) ef = test_exponentialfamily_interface( @@ -25,7 +25,7 @@ end test_fisherinformation_against_jacobian = false ) - run_test_basic_functions(ef; assume_no_allocations = false, test_samples_logpdf = false) + run_test_basic_functions(d; assume_no_allocations = false, test_samples_logpdf = false) end end end diff --git a/test/distributions/pareto_tests.jl b/test/distributions/pareto_tests.jl index 2d3660a8..21ddc7a2 100644 --- a/test/distributions/pareto_tests.jl +++ b/test/distributions/pareto_tests.jl @@ -5,7 +5,7 @@ @testitem "Pareto: ExponentialFamilyDistribution" begin include("distributions_setuptests.jl") - for shape in (1.0, 2.0, 3.0), scale in (0.25, 0.5, 2.0) + for shape in (5.0, 6.0, 7.0), scale in (0.25, 0.5, 2.0) @testset let d = Pareto(shape, scale) ef = test_exponentialfamily_interface(d; option_assume_no_allocations = false) η1 = -shape - 1