diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..b9650c3 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "julia.environmentPath": "/Users/au568658/Library/CloudStorage/OneDrive-Aarhusuniversitet/Academ/Projects/Software/ADTests.jl" +} \ No newline at end of file diff --git a/Project.toml b/Project.toml index 001c16d..e91ae5a 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,8 @@ [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" +AbstractGPs = "99985d1d-32ba-4be9-9821-2ec096f28918" Chairmarks = "0ca39b1e-fe0b-4e98-acfc-b1656634c4de" +DelayDiffEq = "bcd4f6db-9728-5f36-b5f7-82caef46ccdb" DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab" DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" @@ -9,8 +11,11 @@ DynamicPPL = "366bfd00-2699-11ea-058f-f148b4cae6d8" Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" +Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c" +LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" +Lux = "b2108857-7c20-44ae-9111-449ecde12c47" MLDatasets = "eb30cadb-4394-5ae3-aed4-317e484a6458" Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" MultivariateStats = "6f286f6a-111f-5878-ab1e-185364afe411" diff --git a/main.jl b/main.jl index d37fb36..0c2b4c6 100644 --- a/main.jl +++ b/main.jl @@ -129,7 +129,10 @@ end @include_model "Effect of model size" "n500" @include_model "PosteriorDB" "pdb_eight_schools_centered" @include_model "PosteriorDB" "pdb_eight_schools_noncentered" -@include_model "Integrations with other packages" "ordinary_diffeq" +@include_model "External libraries" "delaydiffeq" +@include_model "External libraries" "ordinarydiffeq" +@include_model "External libraries" "lux_nn" +@include_model "External libraries" "AbstractGPs_GP" # The entry point to this script itself begins here if ARGS == ["--list-model-keys"] diff --git a/models/abstractgps.jl b/models/abstractgps.jl new file mode 100644 index 0000000..8109d36 --- /dev/null +++ b/models/abstractgps.jl @@ -0,0 +1,25 @@ +#= +This is an implementation of using AbstractGPs.jl with Turing to model a Gaussian process. +The model is adapted from the Turing documentation: +https://turinglang.org/docs/tutorials/gaussian-processes-introduction/ +=# + +using AbstractGPs +using LogExpFunctions + +# Load data +distance = [2.,3.,4.,5.,6.] +n = [1443, 694, 455, 353, 272] +y = [1346, 577, 337, 208, 149] + +# Make Turing model +@model function abstractgps(d, n, y; jitter=1e-4) + v ~ Gamma(2, 1) + l ~ Gamma(4, 1) + f = GP(v * with_lengthscale(SEKernel(), l)) + f_latent ~ f(d, jitter) + y ~ product_distribution(Binomial.(n, logistic.(f_latent))) + return (fx=f(d, jitter), f_latent=f_latent, y=y) +end + +model = abstractgps(distance, n, y) diff --git a/models/delaydiffeq.jl b/models/delaydiffeq.jl new file mode 100644 index 0000000..67a7001 --- /dev/null +++ b/models/delaydiffeq.jl @@ -0,0 +1,42 @@ +#= +This is an example of using DifferentialEquations.jl with Turing to model a delayed Lotka–Volterra equations (predator-prey model). +The model is adapted from the Turing documentation: +https://turinglang.org/docs/tutorials/bayesian-differential-equations/ +=# +using DelayDiffEq: DDEProblem, solve, MethodOfSteps, Tsit5 + +# SciMLSensitivity is needed for reverse-mode AD on differential equations +import SciMLSensitivity + +function delay_lotka_volterra(du, u, h, p, t) + α, β, γ, δ = p + x, y = u + du[1] = α * h(p, t - 1; idxs=1) - β * x * y + du[2] = -γ * y + δ * x * y + return nothing +end +p = (1.5, 1.0, 3.0, 1.0) +u0 = [1.0; 1.0] +tspan = (0.0, 10.0) +h(p, t; idxs::Int) = 1.0 +prob_dde = DDEProblem(delay_lotka_volterra, u0, h, tspan, p) +sol_dde = solve(prob_dde; saveat=0.1) +q = 1.7 +ddedata = rand.(Poisson.(q .* Array(sol_dde))) + +@model function delaydiffeq(data, prob) + α ~ truncated(Normal(1.5, 0.2); lower=0.5, upper=2.5) + β ~ truncated(Normal(1.1, 0.2); lower=0, upper=2) + γ ~ truncated(Normal(3.0, 0.2); lower=1, upper=4) + δ ~ truncated(Normal(1.0, 0.2); lower=0, upper=2) + q ~ truncated(Normal(1.7, 0.2); lower=0, upper=3) + p = [α, β, γ, δ] + predicted = solve(prob, MethodOfSteps(Tsit5()); p=p, saveat=0.1, abstol=1e-6, reltol=1e-6) + ϵ = 1e-5 + for i in eachindex(predicted) + data[:, i] ~ arraydist(Poisson.(q .* predicted[i] .+ ϵ)) + end + return nothing +end + +model = delaydiffeq(ddedata, prob_dde) diff --git a/models/lux_nn.jl b/models/lux_nn.jl new file mode 100644 index 0000000..da19a73 --- /dev/null +++ b/models/lux_nn.jl @@ -0,0 +1,77 @@ +#= +This is an implementation of using Lux.jl with Turing to implement a Bayesian neural network. +The model is adapted from the Turing documentation: +https://turinglang.org/docs/tutorials/bayesian-neural-networks/ +=# +using Lux +using Random +using LinearAlgebra +using Functors + +## Simulate data ## +# Number of points to generate +N = 80 +M = round(Int, N / 4) +rng = Random.default_rng() +Random.seed!(rng, 1234) + +# Generate artificial data +x1s = rand(Float32, M) * 4.5f0; +x2s = rand(Float32, M) * 4.5f0; +xt1s = Array([[x1s[i] + 0.5f0; x2s[i] + 0.5f0] for i in 1:M]) +x1s = rand(Float32, M) * 4.5f0; +x2s = rand(Float32, M) * 4.5f0; +append!(xt1s, Array([[x1s[i] - 5.0f0; x2s[i] - 5.0f0] for i in 1:M])) + +x1s = rand(Float32, M) * 4.5f0; +x2s = rand(Float32, M) * 4.5f0; +xt0s = Array([[x1s[i] + 0.5f0; x2s[i] - 5.0f0] for i in 1:M]) +x1s = rand(Float32, M) * 4.5f0; +x2s = rand(Float32, M) * 4.5f0; +append!(xt0s, Array([[x1s[i] - 5.0f0; x2s[i] + 0.5f0] for i in 1:M])) + +# Store all the data for later +xs = [xt1s; xt0s] +ts = [ones(2 * M); zeros(2 * M)] + +## Create neural network ## +# Construct a neural network using Lux +nn_initial = Chain(Dense(2 => 3, tanh), Dense(3 => 2, tanh), Dense(2 => 1, σ)) + +# Initialize the model weights and state +ps, st = Lux.setup(rng, nn_initial) + +# Create a regularization term and a Gaussian prior variance term. +alpha = 0.09 +sigma = sqrt(1.0 / alpha) + +function vector_to_parameters(ps_new::AbstractVector, ps::NamedTuple) + @assert length(ps_new) == Lux.parameterlength(ps) + i = 1 + function get_ps(x) + z = reshape(view(ps_new, i:(i + length(x) - 1)), size(x)) + i += length(x) + return z + end + return fmap(get_ps, ps) +end + +const nn = StatefulLuxLayer{true}(nn_initial, nothing, st) + +## Create Turing model ## +# Specify the probabilistic model. +@model function lux_nn(xs, ts; sigma = sigma, ps = ps, nn = nn) + # Sample the parameters + nparameters = Lux.parameterlength(nn_initial) + parameters ~ MvNormal(zeros(nparameters), Diagonal(abs2.(sigma .* ones(nparameters)))) + + # Forward NN to make predictions + preds = Lux.apply(nn, xs, f32(vector_to_parameters(parameters, ps))) + + # Observe each prediction. + for i in eachindex(ts) + ts[i] ~ Bernoulli(preds[i]) + end +end + +model = lux_nn(reduce(hcat, xs), ts) diff --git a/models/ordinary_diffeq.jl b/models/ordinarydiffeq.jl similarity index 93% rename from models/ordinary_diffeq.jl rename to models/ordinarydiffeq.jl index 7ad83c7..264a8f3 100644 --- a/models/ordinary_diffeq.jl +++ b/models/ordinarydiffeq.jl @@ -20,7 +20,7 @@ sol = solve(prob, Tsit5(); saveat = 0.1) q = 1.7 odedata = rand.(Poisson.(q * Array(sol))) -@model function ordinary_diffeq(data, prob) +@model function ordinarydiffeq(data, prob) α ~ truncated(Normal(1.5, 0.2); lower = 0.5, upper = 2.5) β ~ truncated(Normal(1.1, 0.2); lower = 0, upper = 2) γ ~ truncated(Normal(3.0, 0.2); lower = 1, upper = 4) @@ -34,4 +34,4 @@ odedata = rand.(Poisson.(q * Array(sol))) return nothing end -model = ordinary_diffeq(odedata, prob) +model = ordinarydiffeq(odedata, prob)