Skip to content

Commit

Permalink
Move LatentGP to AbstractGPs.jl (#25)
Browse files Browse the repository at this point in the history
* Move latent_gp from LatentGPs.jl

* Move latent_gp from LatentGPs.jl

* Manually define gaussian likelihood

* Bump patch version
  • Loading branch information
sharanry authored Jul 6, 2020
1 parent ec5cf12 commit ac4f85f
Show file tree
Hide file tree
Showing 5 changed files with 50 additions and 2 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "AbstractGPs"
uuid = "99985d1d-32ba-4be9-9821-2ec096f28918"
authors = ["willtebbutt <[email protected]>"]
version = "0.2.0"
version = "0.2.1"

[deps]
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
Expand Down
5 changes: 4 additions & 1 deletion src/AbstractGPs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ module AbstractGPs

export GP, mean, cov, std, cov_diag, mean_and_cov, marginals, rand,
logpdf, elbo, dtc, posterior, approx_posterior, VFE, DTC, AbstractGP, sampleplot,
update_approx_posterior
update_approx_posterior, LatentGP

# Various bits of utility functionality.
include(joinpath("util", "common_covmat_ops.jl"))
Expand All @@ -27,6 +27,9 @@ module AbstractGPs
include(joinpath("posterior_gp", "posterior_gp.jl"))
include(joinpath("posterior_gp", "approx_posterior_gp.jl"))

# LatentGP object to accomodate GPs with non-gaussian likelihoods.
include(joinpath("latent_gp", "latent_gp.jl"))

# Plotting utilities.
include(joinpath("util", "plotting.jl"))
end # module
30 changes: 30 additions & 0 deletions src/latent_gp/latent_gp.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
"""
LatentGP(f<:GP, lik)
- `fx` is a `FiniteGP`.
- `lik` is the log likelihood function which maps sample from f to corresposing
conditional likelihood distributions.
"""
struct LatentGP{T<:AbstractGPs.FiniteGP, S}
fx::T
lik::S
end

function Distributions.rand(rng::AbstractRNG, lgp::LatentGP)
f = rand(rng, lgp.fx)
y = rand(rng, lgp.lik(f))
return (f=f, y=y)
end

"""
logpdf(lgp::LatentGP, y::NamedTuple{(:f, :y)})
```math
log p(y, f; x)
```
Returns the joint log density of the gaussian process output `f` and real output `y`.
"""
function Distributions.logpdf(lgp::LatentGP, y::NamedTuple{(:f, :y)})
return logpdf(lgp.fx, y.f) + logpdf(lgp.lik(y.f), y.y)
end
13 changes: 13 additions & 0 deletions test/latent_gp/latent_gp.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
@testset "latent_gp" begin
gp = GP(SqExponentialKernel())
x = rand(10)
y = rand(10)
fx = gp(x, 1e-5)

lgp = LatentGP(fx, x -> MvNormal(x, 0.1))
@test typeof(lgp) <: LatentGP
@test typeof(lgp.fx) <: AbstractGPs.FiniteGP
f = rand(10)
@test typeof(logpdf(lgp, (f=f, y=y))) <: Real
@test typeof(rand(lgp)) <: NamedTuple{(:f, :y)}
end
2 changes: 2 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ include("test_util.jl")
include(joinpath("posterior_gp", "approx_posterior_gp.jl"))
end

include(joinpath("latent_gp", "latent_gp.jl"))

include(joinpath("util", "plotting.jl"))

@testset "doctests" begin
Expand Down

2 comments on commit ac4f85f

@sharanry
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator register()

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/17520

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.2.1 -m "<description of version>" ac4f85f881e5a7ecf7b04bb627dd942d811809de
git push origin v0.2.1

Please sign in to comment.