From 208c146edcba57ac817a974f8b5f5e85772caaab Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Thu, 2 Oct 2025 11:50:34 +0200 Subject: [PATCH 01/12] Renamed miscellanoues to external packages --- main.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/main.jl b/main.jl index 51219a1..27a6ed2 100644 --- a/main.jl +++ b/main.jl @@ -114,7 +114,7 @@ end @include_model "Effect of model size" "n500" @include_model "PosteriorDB" "pdb_eight_schools_centered" @include_model "PosteriorDB" "pdb_eight_schools_noncentered" -@include_model "Miscellaneous features" "metabayesian_MH" +@include_model "External libraries" "metabayesian_MH" # The entry point to this script itself begins here if ARGS == ["--list-model-keys"] From e009f963147b23f1224986e1b2ec888fe88f2868 Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Thu, 2 Oct 2025 11:53:18 +0200 Subject: [PATCH 02/12] renamed metabeyesian_Turing_HMC --- main.jl | 2 +- ...etabayesian_MH.jl => metabayesian_Turing_AdvancedHMC.jl} | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) rename models/{metabayesian_MH.jl => metabayesian_Turing_AdvancedHMC.jl} (90%) diff --git a/main.jl b/main.jl index 27a6ed2..c89f4f0 100644 --- a/main.jl +++ b/main.jl @@ -114,7 +114,7 @@ end @include_model "Effect of model size" "n500" @include_model "PosteriorDB" "pdb_eight_schools_centered" @include_model "PosteriorDB" "pdb_eight_schools_noncentered" -@include_model "External libraries" "metabayesian_MH" +@include_model "External libraries" "metabayesian_Turing_AdvancedHMC" # The entry point to this script itself begins here if ARGS == ["--list-model-keys"] diff --git a/models/metabayesian_MH.jl b/models/metabayesian_Turing_AdvancedHMC.jl similarity index 90% rename from models/metabayesian_MH.jl rename to models/metabayesian_Turing_AdvancedHMC.jl index 377bd7f..0a31e04 100644 --- a/models/metabayesian_MH.jl +++ b/models/metabayesian_Turing_AdvancedHMC.jl @@ -1,7 +1,7 @@ #= This is a "meta-Bayesian" model, where the generative model includes an inversion of a different generative model. These types of models are common in cognitive modelling, where systems of interest (e.g. human subjects) are thought to use Bayesian inference to navigate their environment. -Here we use a Metropolis-Hasting sampler implemented with Turing as the inversion of the inner "subjective" model. +Here we use a Metropolis-Hasting sampler implemented with Turing and AdvancedHMC as the inversion of the inner "subjective" model. =# using Random: Xoshiro @@ -14,7 +14,7 @@ using Random: Xoshiro end # Outer model function -@model function metabayesian_MH( +@model function metabayesian_Turing_AdvancedHMC( observation, action, inner_sampler = MH(), @@ -41,4 +41,4 @@ end action ~ Normal(subj_mean_expectationₜ, β) end -model = metabayesian_MH(0.0, 1.0) +model = metabayesian_Turing_AdvancedHMC(0.0, 1.0) From 1bba1fa67f413842d7a11b9f9804619120fd23e8 Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Thu, 2 Oct 2025 11:55:04 +0200 Subject: [PATCH 03/12] fixed previous renaming --- main.jl | 2 +- ..._AdvancedHMC.jl => metabayesian_Turing_AdvancedMH_MH.jl} | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) rename models/{metabayesian_Turing_AdvancedHMC.jl => metabayesian_Turing_AdvancedMH_MH.jl} (90%) diff --git a/main.jl b/main.jl index c89f4f0..a3d503a 100644 --- a/main.jl +++ b/main.jl @@ -114,7 +114,7 @@ end @include_model "Effect of model size" "n500" @include_model "PosteriorDB" "pdb_eight_schools_centered" @include_model "PosteriorDB" "pdb_eight_schools_noncentered" -@include_model "External libraries" "metabayesian_Turing_AdvancedHMC" +@include_model "External libraries" "metabayesian_Turing_AdvancedMH_MH" # The entry point to this script itself begins here if ARGS == ["--list-model-keys"] diff --git a/models/metabayesian_Turing_AdvancedHMC.jl b/models/metabayesian_Turing_AdvancedMH_MH.jl similarity index 90% rename from models/metabayesian_Turing_AdvancedHMC.jl rename to models/metabayesian_Turing_AdvancedMH_MH.jl index 0a31e04..b075645 100644 --- a/models/metabayesian_Turing_AdvancedHMC.jl +++ b/models/metabayesian_Turing_AdvancedMH_MH.jl @@ -1,7 +1,7 @@ #= This is a "meta-Bayesian" model, where the generative model includes an inversion of a different generative model. These types of models are common in cognitive modelling, where systems of interest (e.g. human subjects) are thought to use Bayesian inference to navigate their environment. -Here we use a Metropolis-Hasting sampler implemented with Turing and AdvancedHMC as the inversion of the inner "subjective" model. +Here we use a Metropolis-Hasting sampler implemented with Turing and AdvancedMH as the inversion of the inner "subjective" model. =# using Random: Xoshiro @@ -14,7 +14,7 @@ using Random: Xoshiro end # Outer model function -@model function metabayesian_Turing_AdvancedHMC( +@model function metabayesian_Turing_AdvancedMH_MH( observation, action, inner_sampler = MH(), @@ -41,4 +41,4 @@ end action ~ Normal(subj_mean_expectationₜ, β) end -model = metabayesian_Turing_AdvancedHMC(0.0, 1.0) +model = metabayesian_Turing_AdvancedMH_MH(0.0, 1.0) From 9c54e0fd19cdcaeeb5c822a59a8d58e3ff5671c6 Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Thu, 2 Oct 2025 13:28:51 +0200 Subject: [PATCH 04/12] added DifferentialEquations --- .vscode/settings.json | 3 ++ Project.toml | 1 + main.jl | 2 + models/DifferentialEquations_lv_DDE.jl | 55 +++++++++++++++++++++++++ models/DifferentialEquations_lv_ODE.jl | 56 ++++++++++++++++++++++++++ 5 files changed, 117 insertions(+) create mode 100644 .vscode/settings.json create mode 100644 models/DifferentialEquations_lv_DDE.jl create mode 100644 models/DifferentialEquations_lv_ODE.jl diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..b9650c3 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "julia.environmentPath": "/Users/au568658/Library/CloudStorage/OneDrive-Aarhusuniversitet/Academ/Projects/Software/ADTests.jl" +} \ No newline at end of file diff --git a/Project.toml b/Project.toml index af96777..ccd6c32 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,7 @@ [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" Chairmarks = "0ca39b1e-fe0b-4e98-acfc-b1656634c4de" +DifferentialEquations = "0c46a032-eb83-5123-abaf-570d42b7fbaa" DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" DynamicPPL = "366bfd00-2699-11ea-058f-f148b4cae6d8" diff --git a/main.jl b/main.jl index a3d503a..9a885c0 100644 --- a/main.jl +++ b/main.jl @@ -115,6 +115,8 @@ end @include_model "PosteriorDB" "pdb_eight_schools_centered" @include_model "PosteriorDB" "pdb_eight_schools_noncentered" @include_model "External libraries" "metabayesian_Turing_AdvancedMH_MH" +@include_model "External libraries" "DifferentialEquations_lv_ODE" +@include_model "External libraries" "DifferentialEquations_lv_DDE" # The entry point to this script itself begins here if ARGS == ["--list-model-keys"] diff --git a/models/DifferentialEquations_lv_DDE.jl b/models/DifferentialEquations_lv_DDE.jl new file mode 100644 index 0000000..e72b4da --- /dev/null +++ b/models/DifferentialEquations_lv_DDE.jl @@ -0,0 +1,55 @@ +#= +This is an implementation of using DIfferentialEquations.jl with Turing to model the Lotka–Volterra equations (predator-prey model). +The model is adapted from the Turing documentation: https://turinglang.org/docs/tutorials/bayesian-differential-equations/ +=# +using DifferentialEquations +using Distributions + +function delay_lotka_volterra(du, u, h, p, t) + # Model parameters. + α, β, γ, δ = p + + # Current state. + x, y = u + # Evaluate differential equations + du[1] = α * h(p, t - 1; idxs=1) - β * x * y + du[2] = -γ * y + δ * x * y + + return nothing +end + +# Define initial-value problem. +p = (1.5, 1.0, 3.0, 1.0) +u0 = [1.0; 1.0] +tspan = (0.0, 10.0) +h(p, t; idxs::Int) = 1.0 +prob_dde = DDEProblem(delay_lotka_volterra, u0, h, tspan, p) + +# Simulate data +sol_dde = solve(prob_dde; saveat=0.1) +ddedata = rand.(Poisson.(q .* Array(sol_dde))) + +# Create Turing model +@model function DifferentialEquations_lv_DDE(data, prob) +# Prior distributions. + α ~ truncated(Normal(1.5, 0.2); lower=0.5, upper=2.5) + β ~ truncated(Normal(1.1, 0.2); lower=0, upper=2) + γ ~ truncated(Normal(3.0, 0.2); lower=1, upper=4) + δ ~ truncated(Normal(1.0, 0.2); lower=0, upper=2) + q ~ truncated(Normal(1.7, 0.2); lower=0, upper=3) + + # Simulate Lotka–Volterra model. + p = [α, β, γ, δ] + predicted = solve(prob, MethodOfSteps(Tsit5()); p=p, saveat=0.1, abstol=1e-6, reltol=1e-6) + ϵ = 1e-5 + + # Observations. + for i in eachindex(predicted) + data[:, i] ~ arraydist(Poisson.(q .* predicted[i] .+ ϵ)) + end + + return nothing +end + +# Instantiate the model +model = DifferentialEquations_lv_DDE(ddedata, prob) \ No newline at end of file diff --git a/models/DifferentialEquations_lv_ODE.jl b/models/DifferentialEquations_lv_ODE.jl new file mode 100644 index 0000000..5945779 --- /dev/null +++ b/models/DifferentialEquations_lv_ODE.jl @@ -0,0 +1,56 @@ +#= +This is an implementation of using DIfferentialEquations.jl with Turing to model the Lotka–Volterra equations (predator-prey model). +The model is adapted from the Turing documentation: https://turinglang.org/docs/tutorials/bayesian-differential-equations/ +=# +using DifferentialEquations +using Distributions + +# Define Lotka–Volterra model +function lotka_volterra(du, u, p, t) + # Model parameters. + α, β, γ, δ = p + # Current state. + x, y = u + + # Evaluate differential equations. + du[1] = (α - β * y) * x # prey + du[2] = (δ * x - γ) * y # predator + + return nothing +end + +# Define initial-value problem +u0 = [1.0, 1.0] +p = [1.5, 1.0, 3.0, 1.0] +tspan = (0.0, 10.0) +prob = ODEProblem(lotka_volterra, u0, tspan, p) + +# Simulate data +sol = solve(prob, Tsit5(); saveat=0.1) +q = 1.7 +odedata = rand.(Poisson.(q * Array(sol))) + +# Create Turing model +@model function DifferentialEquations_lv_ODE(data, prob) + # Prior distributions. + α ~ truncated(Normal(1.5, 0.2); lower=0.5, upper=2.5) + β ~ truncated(Normal(1.1, 0.2); lower=0, upper=2) + γ ~ truncated(Normal(3.0, 0.2); lower=1, upper=4) + δ ~ truncated(Normal(1.0, 0.2); lower=0, upper=2) + q ~ truncated(Normal(1.7, 0.2); lower=0, upper=3) + + # Simulate Lotka–Volterra model. + p = [α, β, γ, δ] + predicted = solve(prob, Tsit5(); p=p, saveat=0.1, abstol=1e-6, reltol=1e-6) + ϵ = 1e-5 + + # Observations. + for i in eachindex(predicted) + data[:, i] ~ arraydist(Poisson.(q .* predicted[i] .+ ϵ)) + end + + return nothing +end + +# Instantiate the model +model = DifferentialEquations_lv_ODE(odedata, prob) \ No newline at end of file From 8cf4f7334b08d3ada0638757522f8a47d5d34e61 Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Thu, 2 Oct 2025 13:41:50 +0200 Subject: [PATCH 05/12] added Lux, updated DIfferentialEquations --- Project.toml | 2 + main.jl | 1 + models/DifferentialEquations_lv_DDE.jl | 2 +- models/DifferentialEquations_lv_ODE.jl | 2 +- models/Lux_neural_network.jl | 79 ++++++++++++++++++++++++++ 5 files changed, 84 insertions(+), 2 deletions(-) create mode 100644 models/Lux_neural_network.jl diff --git a/Project.toml b/Project.toml index ccd6c32..685e022 100644 --- a/Project.toml +++ b/Project.toml @@ -8,8 +8,10 @@ DynamicPPL = "366bfd00-2699-11ea-058f-f148b4cae6d8" Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" +Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c" +Lux = "b2108857-7c20-44ae-9111-449ecde12c47" Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" diff --git a/main.jl b/main.jl index 9a885c0..27b0d8b 100644 --- a/main.jl +++ b/main.jl @@ -117,6 +117,7 @@ end @include_model "External libraries" "metabayesian_Turing_AdvancedMH_MH" @include_model "External libraries" "DifferentialEquations_lv_ODE" @include_model "External libraries" "DifferentialEquations_lv_DDE" +@include_model "External libraries" "Lux_neural_network" # The entry point to this script itself begins here if ARGS == ["--list-model-keys"] diff --git a/models/DifferentialEquations_lv_DDE.jl b/models/DifferentialEquations_lv_DDE.jl index e72b4da..4e5942e 100644 --- a/models/DifferentialEquations_lv_DDE.jl +++ b/models/DifferentialEquations_lv_DDE.jl @@ -1,5 +1,5 @@ #= -This is an implementation of using DIfferentialEquations.jl with Turing to model the Lotka–Volterra equations (predator-prey model). +This is an implementation of using DifferentialEquations.jl with Turing to model a delayed Lotka–Volterra equations (predator-prey model). The model is adapted from the Turing documentation: https://turinglang.org/docs/tutorials/bayesian-differential-equations/ =# using DifferentialEquations diff --git a/models/DifferentialEquations_lv_ODE.jl b/models/DifferentialEquations_lv_ODE.jl index 5945779..7f244d8 100644 --- a/models/DifferentialEquations_lv_ODE.jl +++ b/models/DifferentialEquations_lv_ODE.jl @@ -1,5 +1,5 @@ #= -This is an implementation of using DIfferentialEquations.jl with Turing to model the Lotka–Volterra equations (predator-prey model). +This is an implementation of using DifferentialEquations.jl with Turing to model the Lotka–Volterra equations (predator-prey model). The model is adapted from the Turing documentation: https://turinglang.org/docs/tutorials/bayesian-differential-equations/ =# using DifferentialEquations diff --git a/models/Lux_neural_network.jl b/models/Lux_neural_network.jl new file mode 100644 index 0000000..66f578c --- /dev/null +++ b/models/Lux_neural_network.jl @@ -0,0 +1,79 @@ +#= +This is an implementation of using Flux.jl with Turing to implement a Bayesian neural network. +The model is adapted from the Turing documentation: https://turinglang.org/docs/tutorials/bayesian-neural-networks/ +=# +using Lux +using Random +using LinearAlgebra +using Functors + + +## Simulate data ## +# Number of points to generate +N = 80 +M = round(Int, N / 4) +rng = Random.default_rng() +Random.seed!(rng, 1234) + +# Generate artificial data +x1s = rand(rng, Float32, M) * 4.5f0; +x2s = rand(rng, Float32, M) * 4.5f0; +xt1s = Array([[x1s[i] + 0.5f0; x2s[i] + 0.5f0] for i in 1:M]) +x1s = rand(rng, Float32, M) * 4.5f0; +x2s = rand(rng, Float32, M) * 4.5f0; +append!(xt1s, Array([[x1s[i] - 5.0f0; x2s[i] - 5.0f0] for i in 1:M])) + +x1s = rand(rng, Float32, M) * 4.5f0; +x2s = rand(rng, Float32, M) * 4.5f0; +xt0s = Array([[x1s[i] + 0.5f0; x2s[i] - 5.0f0] for i in 1:M]) +x1s = rand(rng, Float32, M) * 4.5f0; +x2s = rand(rng, Float32, M) * 4.5f0; +append!(xt0s, Array([[x1s[i] - 5.0f0; x2s[i] + 0.5f0] for i in 1:M])) + +# Store all the data for later +xs = [xt1s; xt0s] +ts = [ones(2 * M); zeros(2 * M)] + + +## Create neural network ## +# Construct a neural network using Lux +nn_initial = Chain(Dense(2 => 3, tanh), Dense(3 => 2, tanh), Dense(2 => 1, σ)) + +# Initialize the model weights and state +ps, st = Lux.setup(rng, nn_initial) + +# Create a regularization term and a Gaussian prior variance term. +alpha = 0.09 +sigma = sqrt(1.0 / alpha) + +function vector_to_parameters(ps_new::AbstractVector, ps::NamedTuple) + @assert length(ps_new) == Lux.parameterlength(ps) + i = 1 + function get_ps(x) + z = reshape(view(ps_new, i:(i + length(x) - 1)), size(x)) + i += length(x) + return z + end + return fmap(get_ps, ps) +end + +const nn = StatefulLuxLayer{true}(nn_initial, nothing, st) + + +## Create Turing model ## +# Specify the probabilistic model. +@model function Lux_neural_network(xs, ts; sigma = sigma, ps = ps, nn = nn) + # Sample the parameters + nparameters = Lux.parameterlength(nn_initial) + parameters ~ MvNormal(zeros(nparameters), Diagonal(abs2.(sigma .* ones(nparameters)))) + + # Forward NN to make predictions + preds = Lux.apply(nn, xs, f32(vector_to_parameters(parameters, ps))) + + # Observe each prediction. + for i in eachindex(ts) + ts[i] ~ Bernoulli(preds[i]) + end +end + +model = Lux_neural_network(reduce(hcat, xs), ts) \ No newline at end of file From 4f33fcd2b1795ff3412e1d1e28ac0d0c723a9967 Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Thu, 2 Oct 2025 13:50:53 +0200 Subject: [PATCH 06/12] added AbstractGPs --- Project.toml | 3 +++ main.jl | 1 + models/AbstractGPs_GP.jl | 25 +++++++++++++++++++++++++ 3 files changed, 29 insertions(+) create mode 100644 models/AbstractGPs_GP.jl diff --git a/Project.toml b/Project.toml index 685e022..01e31a7 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,8 @@ [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" +AbstractGPs = "99985d1d-32ba-4be9-9821-2ec096f28918" Chairmarks = "0ca39b1e-fe0b-4e98-acfc-b1656634c4de" +DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" DifferentialEquations = "0c46a032-eb83-5123-abaf-570d42b7fbaa" DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" @@ -11,6 +13,7 @@ ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c" +LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" Lux = "b2108857-7c20-44ae-9111-449ecde12c47" Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" diff --git a/main.jl b/main.jl index 27b0d8b..de98d58 100644 --- a/main.jl +++ b/main.jl @@ -118,6 +118,7 @@ end @include_model "External libraries" "DifferentialEquations_lv_ODE" @include_model "External libraries" "DifferentialEquations_lv_DDE" @include_model "External libraries" "Lux_neural_network" +@include_model "External libraries" "AbstractGPs_GP" # The entry point to this script itself begins here if ARGS == ["--list-model-keys"] diff --git a/models/AbstractGPs_GP.jl b/models/AbstractGPs_GP.jl new file mode 100644 index 0000000..72cab50 --- /dev/null +++ b/models/AbstractGPs_GP.jl @@ -0,0 +1,25 @@ +#= +This is an implementation of using AbstractGPs.jl with Turing to model a Gaussian Process. +The model is adapted from the Turing documentation: https://turinglang.org/docs/tutorials/gaussian-processes-introduction/ +=# + +using AbstractGPs +using DataFrames +using LogExpFunctions + +# Load data +df = DataFrame( + distance = [2,3,4,5,6], n = [1443, 694, 455, 353, 272], y = [1346, 577, 337, 208, 149] +) + +# Make Turing model +@model function AbstractGPs_GP(d, n; jitter=1e-4) + v ~ Gamma(2, 1) + l ~ Gamma(4, 1) + f = GP(v * with_lengthscale(SEKernel(), l)) + f_latent ~ f(d, jitter) + y ~ product_distribution(Binomial.(n, logistic.(f_latent))) + return (fx=f(d, jitter), f_latent=f_latent, y=y) +end + +model = AbstractGPs_GP(Float64.(df.distance), df.n) \ No newline at end of file From f9b4c4a56712684ecf82d7ad1f3778a92f7f69e0 Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Thu, 2 Oct 2025 13:57:33 +0200 Subject: [PATCH 07/12] deleted previous ordinary_diffeq --- models/ordinary_diffeq.jl | 34 ---------------------------------- 1 file changed, 34 deletions(-) delete mode 100644 models/ordinary_diffeq.jl diff --git a/models/ordinary_diffeq.jl b/models/ordinary_diffeq.jl deleted file mode 100644 index 2be7200..0000000 --- a/models/ordinary_diffeq.jl +++ /dev/null @@ -1,34 +0,0 @@ -# See https://turinglang.org/docs/tutorials/bayesian-differential-equations/. - -using OrdinaryDiffEq: ODEProblem, solve, Tsit5 - -function lotka_volterra(du, u, p, t) - α, β, γ, δ = p - x, y = u - du[1] = (α - β * y) * x # prey - du[2] = (δ * x - γ) * y # predator - return nothing -end -u0 = [1.0, 1.0] -p = [1.5, 1.0, 3.0, 1.0] -tspan = (0.0, 10.0) -prob = ODEProblem(lotka_volterra, u0, tspan, p) -sol = solve(prob, Tsit5(); saveat=0.1) -q = 1.7 -odedata = rand.(Poisson.(q * Array(sol))) - -@model function ordinary_diffeq(data, prob) - α ~ truncated(Normal(1.5, 0.2); lower=0.5, upper=2.5) - β ~ truncated(Normal(1.1, 0.2); lower=0, upper=2) - γ ~ truncated(Normal(3.0, 0.2); lower=1, upper=4) - δ ~ truncated(Normal(1.0, 0.2); lower=0, upper=2) - q ~ truncated(Normal(1.7, 0.2); lower=0, upper=3) - p = [α, β, γ, δ] - predicted = solve(prob, Tsit5(); p=p, saveat=0.1, abstol=1e-6, reltol=1e-6) - for i in eachindex(predicted) - data[:, i] ~ product_distribution(Poisson.(q .* predicted[i] .+ 1e-5)) - end - return nothing -end - -model = ordinary_diffeq(odedata, prob) From d58a87fc9fd818d2a06259851fe1b3f61e5b1bea Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Fri, 3 Oct 2025 10:32:08 +0200 Subject: [PATCH 08/12] removed DataFrames dependency --- Project.toml | 3 +-- models/AbstractGPs_GP.jl | 11 +++++------ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/Project.toml b/Project.toml index 0e21f2d..18aea9b 100644 --- a/Project.toml +++ b/Project.toml @@ -2,9 +2,8 @@ ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" AbstractGPs = "99985d1d-32ba-4be9-9821-2ec096f28918" Chairmarks = "0ca39b1e-fe0b-4e98-acfc-b1656634c4de" -DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" -DifferentialEquations = "0c46a032-eb83-5123-abaf-570d42b7fbaa" DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab" +DifferentialEquations = "0c46a032-eb83-5123-abaf-570d42b7fbaa" DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" DistributionsAD = "ced4e74d-a319-5a8a-b0ac-84af2272839c" diff --git a/models/AbstractGPs_GP.jl b/models/AbstractGPs_GP.jl index 72cab50..e6aac49 100644 --- a/models/AbstractGPs_GP.jl +++ b/models/AbstractGPs_GP.jl @@ -4,16 +4,15 @@ The model is adapted from the Turing documentation: https://turinglang.org/docs/ =# using AbstractGPs -using DataFrames using LogExpFunctions # Load data -df = DataFrame( - distance = [2,3,4,5,6], n = [1443, 694, 455, 353, 272], y = [1346, 577, 337, 208, 149] -) +distance = [2.,3.,4.,5.,6.] +n = [1443, 694, 455, 353, 272] +y = [1346, 577, 337, 208, 149] # Make Turing model -@model function AbstractGPs_GP(d, n; jitter=1e-4) +@model function AbstractGPs_GP(d, n, y; jitter=1e-4) v ~ Gamma(2, 1) l ~ Gamma(4, 1) f = GP(v * with_lengthscale(SEKernel(), l)) @@ -22,4 +21,4 @@ df = DataFrame( return (fx=f(d, jitter), f_latent=f_latent, y=y) end -model = AbstractGPs_GP(Float64.(df.distance), df.n) \ No newline at end of file +model = AbstractGPs_GP(distance, n, y) \ No newline at end of file From ef2e7fa11475bcd15b9d2ab1bfa350ac3d45bfee Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Fri, 3 Oct 2025 16:55:44 +0200 Subject: [PATCH 09/12] removed rng from data generation --- models/Lux_neural_network.jl | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/models/Lux_neural_network.jl b/models/Lux_neural_network.jl index 66f578c..775ac91 100644 --- a/models/Lux_neural_network.jl +++ b/models/Lux_neural_network.jl @@ -16,18 +16,18 @@ rng = Random.default_rng() Random.seed!(rng, 1234) # Generate artificial data -x1s = rand(rng, Float32, M) * 4.5f0; -x2s = rand(rng, Float32, M) * 4.5f0; +x1s = rand(Float32, M) * 4.5f0; +x2s = rand(Float32, M) * 4.5f0; xt1s = Array([[x1s[i] + 0.5f0; x2s[i] + 0.5f0] for i in 1:M]) -x1s = rand(rng, Float32, M) * 4.5f0; -x2s = rand(rng, Float32, M) * 4.5f0; +x1s = rand(Float32, M) * 4.5f0; +x2s = rand(Float32, M) * 4.5f0; append!(xt1s, Array([[x1s[i] - 5.0f0; x2s[i] - 5.0f0] for i in 1:M])) -x1s = rand(rng, Float32, M) * 4.5f0; -x2s = rand(rng, Float32, M) * 4.5f0; +x1s = rand(Float32, M) * 4.5f0; +x2s = rand(Float32, M) * 4.5f0; xt0s = Array([[x1s[i] + 0.5f0; x2s[i] - 5.0f0] for i in 1:M]) -x1s = rand(rng, Float32, M) * 4.5f0; -x2s = rand(rng, Float32, M) * 4.5f0; +x1s = rand(Float32, M) * 4.5f0; +x2s = rand(Float32, M) * 4.5f0; append!(xt0s, Array([[x1s[i] - 5.0f0; x2s[i] + 0.5f0] for i in 1:M])) # Store all the data for later From b0313dd3c832eba6307ab23d30d10036d5dc4b04 Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Fri, 3 Oct 2025 17:03:48 +0200 Subject: [PATCH 10/12] reduced DifferentialEquations dependency --- Project.toml | 3 ++- models/DifferentialEquations_lv_DDE.jl | 5 +++-- models/DifferentialEquations_lv_ODE.jl | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/Project.toml b/Project.toml index 18aea9b..5095e89 100644 --- a/Project.toml +++ b/Project.toml @@ -2,8 +2,8 @@ ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" AbstractGPs = "99985d1d-32ba-4be9-9821-2ec096f28918" Chairmarks = "0ca39b1e-fe0b-4e98-acfc-b1656634c4de" +DelayDiffEq = "bcd4f6db-9728-5f36-b5f7-82caef46ccdb" DelimitedFiles = "8bb1440f-4735-579b-a4ab-409b98df4dab" -DifferentialEquations = "0c46a032-eb83-5123-abaf-570d42b7fbaa" DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" DistributionsAD = "ced4e74d-a319-5a8a-b0ac-84af2272839c" @@ -19,6 +19,7 @@ Lux = "b2108857-7c20-44ae-9111-449ecde12c47" MLDatasets = "eb30cadb-4394-5ae3-aed4-317e484a6458" Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" MultivariateStats = "6f286f6a-111f-5878-ab1e-185364afe411" +OrdinaryDiffEq = "1dea7af3-3e70-54e6-95c3-0bf5283fa5ed" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" diff --git a/models/DifferentialEquations_lv_DDE.jl b/models/DifferentialEquations_lv_DDE.jl index 4e5942e..0718f57 100644 --- a/models/DifferentialEquations_lv_DDE.jl +++ b/models/DifferentialEquations_lv_DDE.jl @@ -2,7 +2,7 @@ This is an implementation of using DifferentialEquations.jl with Turing to model a delayed Lotka–Volterra equations (predator-prey model). The model is adapted from the Turing documentation: https://turinglang.org/docs/tutorials/bayesian-differential-equations/ =# -using DifferentialEquations +using DelayDiffEq using Distributions function delay_lotka_volterra(du, u, h, p, t) @@ -27,6 +27,7 @@ prob_dde = DDEProblem(delay_lotka_volterra, u0, h, tspan, p) # Simulate data sol_dde = solve(prob_dde; saveat=0.1) +q = 1.7 ddedata = rand.(Poisson.(q .* Array(sol_dde))) # Create Turing model @@ -52,4 +53,4 @@ ddedata = rand.(Poisson.(q .* Array(sol_dde))) end # Instantiate the model -model = DifferentialEquations_lv_DDE(ddedata, prob) \ No newline at end of file +model = DifferentialEquations_lv_DDE(ddedata, prob_dde) \ No newline at end of file diff --git a/models/DifferentialEquations_lv_ODE.jl b/models/DifferentialEquations_lv_ODE.jl index 7f244d8..8ca9cb4 100644 --- a/models/DifferentialEquations_lv_ODE.jl +++ b/models/DifferentialEquations_lv_ODE.jl @@ -2,7 +2,7 @@ This is an implementation of using DifferentialEquations.jl with Turing to model the Lotka–Volterra equations (predator-prey model). The model is adapted from the Turing documentation: https://turinglang.org/docs/tutorials/bayesian-differential-equations/ =# -using DifferentialEquations +using OrdinaryDiffEq using Distributions # Define Lotka–Volterra model From 6d2967665a1840609be594dc9c3d5f92142faa99 Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Fri, 3 Oct 2025 17:09:56 +0200 Subject: [PATCH 11/12] simplified names --- main.jl | 6 +++--- ...tialEquations_lv_DDE.jl => DifferentialEquations_DDE.jl} | 4 ++-- ...tialEquations_lv_ODE.jl => DifferentialEquations_ODE.jl} | 4 ++-- models/{Lux_neural_network.jl => Lux_nn.jl} | 4 ++-- 4 files changed, 9 insertions(+), 9 deletions(-) rename models/{DifferentialEquations_lv_DDE.jl => DifferentialEquations_DDE.jl} (93%) rename models/{DifferentialEquations_lv_ODE.jl => DifferentialEquations_ODE.jl} (93%) rename models/{Lux_neural_network.jl => Lux_nn.jl} (94%) diff --git a/main.jl b/main.jl index 8203e87..c86f852 100644 --- a/main.jl +++ b/main.jl @@ -124,9 +124,9 @@ end @include_model "PosteriorDB" "pdb_eight_schools_centered" @include_model "PosteriorDB" "pdb_eight_schools_noncentered" @include_model "External libraries" "metabayesian_Turing_AdvancedMH_MH" -@include_model "External libraries" "DifferentialEquations_lv_ODE" -@include_model "External libraries" "DifferentialEquations_lv_DDE" -@include_model "External libraries" "Lux_neural_network" +@include_model "External libraries" "DifferentialEquations_ODE" +@include_model "External libraries" "DifferentialEquations_DDE" +@include_model "External libraries" "Lux_nn" @include_model "External libraries" "AbstractGPs_GP" # The entry point to this script itself begins here diff --git a/models/DifferentialEquations_lv_DDE.jl b/models/DifferentialEquations_DDE.jl similarity index 93% rename from models/DifferentialEquations_lv_DDE.jl rename to models/DifferentialEquations_DDE.jl index 0718f57..63ad2bf 100644 --- a/models/DifferentialEquations_lv_DDE.jl +++ b/models/DifferentialEquations_DDE.jl @@ -31,7 +31,7 @@ q = 1.7 ddedata = rand.(Poisson.(q .* Array(sol_dde))) # Create Turing model -@model function DifferentialEquations_lv_DDE(data, prob) +@model function DifferentialEquations_DDE(data, prob) # Prior distributions. α ~ truncated(Normal(1.5, 0.2); lower=0.5, upper=2.5) β ~ truncated(Normal(1.1, 0.2); lower=0, upper=2) @@ -53,4 +53,4 @@ ddedata = rand.(Poisson.(q .* Array(sol_dde))) end # Instantiate the model -model = DifferentialEquations_lv_DDE(ddedata, prob_dde) \ No newline at end of file +model = DifferentialEquations_DDE(ddedata, prob_dde) \ No newline at end of file diff --git a/models/DifferentialEquations_lv_ODE.jl b/models/DifferentialEquations_ODE.jl similarity index 93% rename from models/DifferentialEquations_lv_ODE.jl rename to models/DifferentialEquations_ODE.jl index 8ca9cb4..b21e3c7 100644 --- a/models/DifferentialEquations_lv_ODE.jl +++ b/models/DifferentialEquations_ODE.jl @@ -31,7 +31,7 @@ q = 1.7 odedata = rand.(Poisson.(q * Array(sol))) # Create Turing model -@model function DifferentialEquations_lv_ODE(data, prob) +@model function DifferentialEquations_ODE(data, prob) # Prior distributions. α ~ truncated(Normal(1.5, 0.2); lower=0.5, upper=2.5) β ~ truncated(Normal(1.1, 0.2); lower=0, upper=2) @@ -53,4 +53,4 @@ odedata = rand.(Poisson.(q * Array(sol))) end # Instantiate the model -model = DifferentialEquations_lv_ODE(odedata, prob) \ No newline at end of file +model = DifferentialEquations_ODE(odedata, prob) \ No newline at end of file diff --git a/models/Lux_neural_network.jl b/models/Lux_nn.jl similarity index 94% rename from models/Lux_neural_network.jl rename to models/Lux_nn.jl index 775ac91..5a43baa 100644 --- a/models/Lux_neural_network.jl +++ b/models/Lux_nn.jl @@ -62,7 +62,7 @@ const nn = StatefulLuxLayer{true}(nn_initial, nothing, st) ## Create Turing model ## # Specify the probabilistic model. -@model function Lux_neural_network(xs, ts; sigma = sigma, ps = ps, nn = nn) +@model function Lux_nn(xs, ts; sigma = sigma, ps = ps, nn = nn) # Sample the parameters nparameters = Lux.parameterlength(nn_initial) parameters ~ MvNormal(zeros(nparameters), Diagonal(abs2.(sigma .* ones(nparameters)))) @@ -76,4 +76,4 @@ const nn = StatefulLuxLayer{true}(nn_initial, nothing, st) end end -model = Lux_neural_network(reduce(hcat, xs), ts) \ No newline at end of file +model = Lux_nn(reduce(hcat, xs), ts) \ No newline at end of file From fbb1809c50109ee9c0be09ce65a59e942d65547e Mon Sep 17 00:00:00 2001 From: Peter Thestrup Waade Date: Fri, 3 Oct 2025 17:18:42 +0200 Subject: [PATCH 12/12] updated ODE and DDE examples to match Penelope's style --- models/DifferentialEquations_DDE.jl | 25 ++++------------- models/DifferentialEquations_ODE.jl | 43 +++++++++-------------------- 2 files changed, 18 insertions(+), 50 deletions(-) diff --git a/models/DifferentialEquations_DDE.jl b/models/DifferentialEquations_DDE.jl index 63ad2bf..0a7fac8 100644 --- a/models/DifferentialEquations_DDE.jl +++ b/models/DifferentialEquations_DDE.jl @@ -1,56 +1,41 @@ #= -This is an implementation of using DifferentialEquations.jl with Turing to model a delayed Lotka–Volterra equations (predator-prey model). +This is an example of using DifferentialEquations.jl with Turing to model a delayed Lotka–Volterra equations (predator-prey model). The model is adapted from the Turing documentation: https://turinglang.org/docs/tutorials/bayesian-differential-equations/ =# -using DelayDiffEq -using Distributions +using DelayDiffEq: DDEProblem, solve, MethodOfSteps, Tsit5 + +# SciMLSensitivity is needed for reverse-mode AD on differential equations +import SciMLSensitivity function delay_lotka_volterra(du, u, h, p, t) - # Model parameters. α, β, γ, δ = p - - # Current state. x, y = u - # Evaluate differential equations du[1] = α * h(p, t - 1; idxs=1) - β * x * y du[2] = -γ * y + δ * x * y - return nothing end - -# Define initial-value problem. p = (1.5, 1.0, 3.0, 1.0) u0 = [1.0; 1.0] tspan = (0.0, 10.0) h(p, t; idxs::Int) = 1.0 prob_dde = DDEProblem(delay_lotka_volterra, u0, h, tspan, p) - -# Simulate data sol_dde = solve(prob_dde; saveat=0.1) q = 1.7 ddedata = rand.(Poisson.(q .* Array(sol_dde))) -# Create Turing model @model function DifferentialEquations_DDE(data, prob) -# Prior distributions. α ~ truncated(Normal(1.5, 0.2); lower=0.5, upper=2.5) β ~ truncated(Normal(1.1, 0.2); lower=0, upper=2) γ ~ truncated(Normal(3.0, 0.2); lower=1, upper=4) δ ~ truncated(Normal(1.0, 0.2); lower=0, upper=2) q ~ truncated(Normal(1.7, 0.2); lower=0, upper=3) - - # Simulate Lotka–Volterra model. p = [α, β, γ, δ] predicted = solve(prob, MethodOfSteps(Tsit5()); p=p, saveat=0.1, abstol=1e-6, reltol=1e-6) ϵ = 1e-5 - - # Observations. for i in eachindex(predicted) data[:, i] ~ arraydist(Poisson.(q .* predicted[i] .+ ϵ)) end - return nothing end -# Instantiate the model model = DifferentialEquations_DDE(ddedata, prob_dde) \ No newline at end of file diff --git a/models/DifferentialEquations_ODE.jl b/models/DifferentialEquations_ODE.jl index b21e3c7..2861b1a 100644 --- a/models/DifferentialEquations_ODE.jl +++ b/models/DifferentialEquations_ODE.jl @@ -1,56 +1,39 @@ #= -This is an implementation of using DifferentialEquations.jl with Turing to model the Lotka–Volterra equations (predator-prey model). +This is an example of using DifferentialEquations.jl with Turing to model the Lotka–Volterra equations (predator-prey model). The model is adapted from the Turing documentation: https://turinglang.org/docs/tutorials/bayesian-differential-equations/ =# -using OrdinaryDiffEq -using Distributions +using OrdinaryDiffEq: ODEProblem, solve, Tsit5 + +# SciMLSensitivity is needed for reverse-mode AD on differential equations +import SciMLSensitivity -# Define Lotka–Volterra model function lotka_volterra(du, u, p, t) - # Model parameters. α, β, γ, δ = p - # Current state. x, y = u - - # Evaluate differential equations. du[1] = (α - β * y) * x # prey du[2] = (δ * x - γ) * y # predator - return nothing end - -# Define initial-value problem u0 = [1.0, 1.0] p = [1.5, 1.0, 3.0, 1.0] tspan = (0.0, 10.0) prob = ODEProblem(lotka_volterra, u0, tspan, p) - -# Simulate data -sol = solve(prob, Tsit5(); saveat=0.1) +sol = solve(prob, Tsit5(); saveat = 0.1) q = 1.7 odedata = rand.(Poisson.(q * Array(sol))) -# Create Turing model @model function DifferentialEquations_ODE(data, prob) - # Prior distributions. - α ~ truncated(Normal(1.5, 0.2); lower=0.5, upper=2.5) - β ~ truncated(Normal(1.1, 0.2); lower=0, upper=2) - γ ~ truncated(Normal(3.0, 0.2); lower=1, upper=4) - δ ~ truncated(Normal(1.0, 0.2); lower=0, upper=2) - q ~ truncated(Normal(1.7, 0.2); lower=0, upper=3) - - # Simulate Lotka–Volterra model. + α ~ truncated(Normal(1.5, 0.2); lower = 0.5, upper = 2.5) + β ~ truncated(Normal(1.1, 0.2); lower = 0, upper = 2) + γ ~ truncated(Normal(3.0, 0.2); lower = 1, upper = 4) + δ ~ truncated(Normal(1.0, 0.2); lower = 0, upper = 2) + q ~ truncated(Normal(1.7, 0.2); lower = 0, upper = 3) p = [α, β, γ, δ] - predicted = solve(prob, Tsit5(); p=p, saveat=0.1, abstol=1e-6, reltol=1e-6) - ϵ = 1e-5 - - # Observations. + predicted = solve(prob, Tsit5(); p = p, saveat = 0.1, abstol = 1e-6, reltol = 1e-6) for i in eachindex(predicted) - data[:, i] ~ arraydist(Poisson.(q .* predicted[i] .+ ϵ)) + data[:, i] ~ product_distribution(Poisson.(q .* predicted[i] .+ 1e-5)) end - return nothing end -# Instantiate the model model = DifferentialEquations_ODE(odedata, prob) \ No newline at end of file