From d72f9d06f81bd202f4ebc895c2fd37c292770825 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 5 Jun 2017 14:42:16 +0100 Subject: [PATCH 01/44] Support vec of mv for un-transformed dist --- src/core/varinfo.jl | 2 +- src/samplers/hmc.jl | 24 ++++++++++++++++++++---- src/samplers/sampler.jl | 29 ++++++++++++++++++++++++----- src/samplers/support/helper.jl | 14 ++++++-------- test/sampler.jl/vec_assume_mv.jl | 19 ++++++++++++------- 5 files changed, 63 insertions(+), 25 deletions(-) diff --git a/src/core/varinfo.jl b/src/core/varinfo.jl index 5295fc6785..e5c31b562f 100644 --- a/src/core/varinfo.jl +++ b/src/core/varinfo.jl @@ -143,7 +143,7 @@ end Base.getindex(vi::VarInfo, vns::Vector{VarName}) = begin @assert haskey(vi, vns[1]) "[Turing] attempted to replay unexisting variables in VarInfo" dist = getdist(vi, vns[1]) - rs = reconstruct(dist, getval(vi, vns)) + rs = reconstruct(dist, getval(vi, vns), length(vns)) rs = istrans(vi, vns[1]) ? invlink(dist, rs) : rs end diff --git a/src/samplers/hmc.jl b/src/samplers/hmc.jl index 1717fee97e..5a3875ad73 100644 --- a/src/samplers/hmc.jl +++ b/src/samplers/hmc.jl @@ -152,18 +152,34 @@ assume{T<:Hamiltonian}(spl::Sampler{T}, dist::Distribution, vn::VarName, vi::Var r end -assume{A<:Hamiltonian,D<:Distribution}(spl::Sampler{A}, dists::Vector{D}, vn::VarName, variable::Any, vi::VarInfo) = begin +assume{A<:Hamiltonian,D<:Distribution}(spl::Sampler{A}, dists::Vector{D}, vn::VarName, var::Any, vi::VarInfo) = begin @assert length(dists) == 1 "[observe] Turing only support vectorizing i.i.d distribution" dist = dists[1] - n = size(variable)[end] + n = size(var)[end] vns = map(i -> copybyindex(vn, "[$i]"), 1:n) - rs = vi[vns] + rs = vi[vns] # NOTE: inside Turing the Julia conversion should be sticked to acclogp!(vi, sum(logpdf(dist, rs, istrans(vi, vns[1])))) - rs + if isa(dist, UnivariateDistribution) + var = rs + elseif isa(dist, MultivariateDistribution) + if isa(var, Vector) + for i = 1:n + var[i] = rs[:,i] + end + elseif isa(var, Matrix) + var = rs + else + error("[Turing] unsupported variable container") + end + elseif isa(dist, MatrixDistribution) + # nothing now + end + + var end observe{A<:Hamiltonian}(spl::Sampler{A}, d::Distribution, value::Any, vi::VarInfo) = diff --git a/src/samplers/sampler.jl b/src/samplers/sampler.jl index 0ccd7d0e03..4c43cf1e84 100644 --- a/src/samplers/sampler.jl +++ b/src/samplers/sampler.jl @@ -42,10 +42,10 @@ assume(spl::Void, dist::Distribution, vn::VarName, vi::VarInfo) = begin r end -assume{T<:Distribution}(spl::Void, dists::Vector{T}, vn::VarName, variable::Any, vi::VarInfo) = begin +assume{T<:Distribution}(spl::Void, dists::Vector{T}, vn::VarName, var::Any, vi::VarInfo) = begin @assert length(dists) == 1 "[observe] Turing only support vectorizing i.i.d distribution" dist = dists[1] - n = size(variable)[end] + n = size(var)[end] vns = map(i -> copybyindex(vn, "[$i]"), 1:n) @@ -53,14 +53,33 @@ assume{T<:Distribution}(spl::Void, dists::Vector{T}, vn::VarName, variable::Any, rs = vi[vns] else rs = rand(dist, n) - for i = 1:n - push!(vi, vns[i], rs[i], dist, 0) + + if isa(dist, UnivariateDistribution) + for i = 1:n + push!(vi, vns[i], rs[i], dist, 0) + end + var = rs + elseif isa(dist, MultivariateDistribution) + for i = 1:n + push!(vi, vns[i], rs[:,i], dist, 0) + end + if isa(var, Vector) + for i = 1:n + var[i] = rs[:,i] + end + elseif isa(var, Matrix) + var = rs + else + error("[Turing] unsupported variable container") + end + elseif isa(dist, MatrixDistribution) + # nothing now end end acclogp!(vi, sum(logpdf(dist, rs, istrans(vi, vns[1])))) - rs + var end observe(spl::Void, dist::Distribution, value::Any, vi::VarInfo) = diff --git a/src/samplers/support/helper.jl b/src/samplers/support/helper.jl index fc81ffa42b..aeb6703d0d 100644 --- a/src/samplers/support/helper.jl +++ b/src/samplers/support/helper.jl @@ -26,16 +26,14 @@ vectorize{T<:Real}(d::MatrixDistribution, r::Matrix{T}) = Vector{Real}(vec # Note this is not the case for MultivariateDistribution so I guess this might be lack of # support for some types related to matrices (like PDMat). reconstruct(d::Distribution, val::Vector) = reconstruct(d, val, typeof(val[1])) -reconstruct(d::UnivariateDistribution, val::Vector, T::Type) = begin - if length(val) == 1 - T(val[1]) - else - Vector{T}(val) - end -end -reconstruct(d::MultivariateDistribution, val::Vector, T::Type) = Vector{T}(val) +reconstruct(d::UnivariateDistribution, val::Vector, T::Type) = T(val[1]) +reconstruct(d::MultivariateDistribution, val::Vector, T::Type) = Array{T, 1}(val) reconstruct(d::MatrixDistribution, val::Vector, T::Type) = Array{T, 2}(reshape(val, size(d)...)) +reconstruct(d::Distribution, val::Vector, n::Int) = reconstruct(d, val, typeof(val[1]), n) +reconstruct(d::UnivariateDistribution, val::Vector, T::Type, n::Int) = Array{T, 1}(val) +reconstruct(d::MultivariateDistribution, val::Vector, T::Type, n::Int) = Array{T, 2}(reshape(val, size(d)[1], n)) + ########## # Others # ########## diff --git a/test/sampler.jl/vec_assume_mv.jl b/test/sampler.jl/vec_assume_mv.jl index 4cd4893b7f..3bf61de8a5 100644 --- a/test/sampler.jl/vec_assume_mv.jl +++ b/test/sampler.jl/vec_assume_mv.jl @@ -9,26 +9,31 @@ alg = HMC(1000, 0.2, 4) # Test for vectorize UnivariateDistribution @model vdemo() = begin phi = Vector{Vector{Real}}(N) - phi ~ [Dirichlet(beta)] + # phi ~ [Dirichlet(beta)] + phi ~ [MvNormal(zeros(2), ones(2))] end -t_vec = @elapsed res = sample(vdemo(), alg) - +t_vec = @elapsed res_vec = sample(vdemo(), alg) +@model vdemo() = begin + phi = Matrix(2,N) + # phi ~ [Dirichlet(beta)] + phi ~ [MvNormal(zeros(2), ones(2))] +end +t_vec_mat = @elapsed res_vec_mat = sample(vdemo(), alg) @model vdemo() = begin phi = Vector{Vector{Real}}(N) for i = 1:N - phi[i] ~ Dirichlet(beta) + # phi[i] ~ Dirichlet(beta) + phi[i] ~ MvNormal(zeros(2), ones(2)) end end t_loop = @elapsed res = sample(vdemo(), alg) - - - println("Time for") println(" Loop : $t_loop") println(" Vec : $t_vec") +println(" Vec2 : $t_vec_mat") From aea8cf31ff0841e1fb0f515d134360e2255deb4d Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 5 Jun 2017 15:07:11 +0100 Subject: [PATCH 02/44] Vec invlogit and logit --- src/core/util.jl | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/core/util.jl b/src/core/util.jl index c04b17fde4..3250473195 100644 --- a/src/core/util.jl +++ b/src/core/util.jl @@ -25,9 +25,8 @@ macro VarName(ex::Union{Expr, Symbol}) end end -invlogit(x::Real) = one(x) / (one(x) + exp(-x)) - -logit(x::Real) = log(x / (one(x) - x)) +invlogit{T<:Real}(x::Union{T,Vector{T},Matrix{T}}) = one(T) ./ (one(T) + exp(-x)) +logit{T<:Real}(x::Union{T,Vector{T},Matrix{T}}) = log(x ./ (one(T) - x)) # More stable, faster version of rand(Categorical) function randcat(p::Vector{Float64}) From a6b9e5e962e243b0cdff68f4e6beb5e7e54399e4 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 5 Jun 2017 15:20:53 +0100 Subject: [PATCH 03/44] Vec of Uni now support vec transformation --- src/samplers/support/transform.jl | 32 +++++++++++++++---------------- test/runtests.jl | 2 +- test/sampler.jl/vec_assume.jl | 9 +++++++++ 3 files changed, 26 insertions(+), 17 deletions(-) diff --git a/src/samplers/support/transform.jl b/src/samplers/support/transform.jl index cae553b508..e146e5ac99 100644 --- a/src/samplers/support/transform.jl +++ b/src/samplers/support/transform.jl @@ -33,11 +33,11 @@ typealias TransformDistribution{T<:ContinuousUnivariateDistribution} Union{T, Truncated{T}} -link(d::TransformDistribution, x::Real) = begin +link{T<:Real}(d::TransformDistribution, x::Union{T,Vector{T}}) = begin a, b = minimum(d), maximum(d) lowerbounded, upperbounded = isfinite(a), isfinite(b) if lowerbounded && upperbounded - logit((x - a) / (b - a)) + logit((x - a) ./ (b - a)) elseif lowerbounded log(x - a) elseif upperbounded @@ -47,11 +47,11 @@ link(d::TransformDistribution, x::Real) = begin end end -invlink(d::TransformDistribution, x::Real) = begin +invlink{T<:Real}(d::TransformDistribution, x::Union{T,Vector{T}}) = begin a, b = minimum(d), maximum(d) lowerbounded, upperbounded = isfinite(a), isfinite(b) if lowerbounded && upperbounded - (b - a) * invlogit(x) + a + (b - a) .* invlogit(x) + a elseif lowerbounded exp(x) + a elseif upperbounded @@ -61,13 +61,13 @@ invlink(d::TransformDistribution, x::Real) = begin end end -Distributions.logpdf(d::TransformDistribution, x::Real, transform::Bool) = begin +Distributions.logpdf{T<:Real}(d::TransformDistribution, x::Union{T,Vector{T}}, transform::Bool) = begin lp = logpdf(d, x) if transform a, b = minimum(d), maximum(d) lowerbounded, upperbounded = isfinite(a), isfinite(b) if lowerbounded && upperbounded - lp += log((x - a) * (b - x) / (b - a)) + lp += log((x - a) .* (b - x) ./ (b - a)) elseif lowerbounded lp += log(x - a) elseif upperbounded @@ -85,11 +85,11 @@ typealias RealDistribution Union{Cauchy, Gumbel, Laplace, Logistic, NoncentralT, Normal, NormalCanon, TDist} -link(d::RealDistribution, x::Real) = x +link{T<:Real}(d::RealDistribution, x::Union{T,Vector{T}}) = x -invlink(d::RealDistribution, x::Real) = x +invlink{T<:Real}(d::RealDistribution, x::Union{T,Vector{T}}) = x -Distributions.logpdf(d::RealDistribution, x::Real, transform::Bool) = logpdf(d, x) +Distributions.logpdf{T<:Real}(d::RealDistribution, x::Union{T,Vector{T}}, transform::Bool) = logpdf(d, x) ######### @@ -101,11 +101,11 @@ typealias PositiveDistribution Gamma, InverseGamma, InverseGaussian, Kolmogorov, LogNormal, NoncentralChisq, NoncentralF, Rayleigh, Weibull} -link(d::PositiveDistribution, x::Real) = log(x) +link{T<:Real}(d::PositiveDistribution, x::Union{T,Vector{T}}) = log(x) -invlink(d::PositiveDistribution, x::Real) = exp(x) +invlink{T<:Real}(d::PositiveDistribution, x::Union{T,Vector{T}}) = exp(x) -Distributions.logpdf(d::PositiveDistribution, x::Real, transform::Bool) = begin +Distributions.logpdf{T<:Real}(d::PositiveDistribution, x::Union{T,Vector{T}}, transform::Bool) = begin lp = logpdf(d, x) transform ? lp + log(x) : lp end @@ -118,13 +118,13 @@ end typealias UnitDistribution Union{Beta, KSOneSided, NoncentralBeta} -link(d::UnitDistribution, x::Real) = logit(x) +link{T<:Real}(d::UnitDistribution, x::Union{T,Vector{T}}) = logit(x) -invlink(d::UnitDistribution, x::Real) = invlogit(x) +invlink{T<:Real}(d::UnitDistribution, x::Union{T,Vector{T}}) = invlogit(x) -Distributions.logpdf(d::UnitDistribution, x::Real, transform::Bool) = begin +Distributions.logpdf{T<:Real}(d::UnitDistribution, x::Union{T,Vector{T}}, transform::Bool) = begin lp = logpdf(d, x) - transform ? lp + log(x * (one(x) - x)) : lp + transform ? lp + log(x .* (one(x) - x)) : lp end ########### diff --git a/test/runtests.jl b/test/runtests.jl index 5b84c4f66b..e6cf4e8962 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -25,7 +25,7 @@ testcases = Dict( # samplers/ # support/ "resample.jl" => ["resample", "particlecontainer",], - "sampler.jl" => ["vectorize_observe", "vec_assume",], + "sampler.jl" => ["vectorize_observe", "vec_assume", "vec_assume_mv",], "gibbs.jl" => ["gibbs", "gibbs2", "gibbs_constructor",], "nuts.jl" => ["nuts_cons", "nuts", # "nuts_geweke", diff --git a/test/sampler.jl/vec_assume.jl b/test/sampler.jl/vec_assume.jl index 3987f94f55..495b3ba46a 100644 --- a/test/sampler.jl/vec_assume.jl +++ b/test/sampler.jl/vec_assume.jl @@ -35,3 +35,12 @@ println("Time for") println(" Loop : $t_loop") println(" Vec : $t_vec") println(" Mv : $t_mv") + + +# Transformed test +@model vdemo() = begin + x = Vector{Real}(N) + x ~ [InverseGamma(2, 3)] +end + +sample(vdemo(), alg) From 263342c2edb95bb964f8571ea40f31eabdd6830a Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 5 Jun 2017 17:27:56 +0100 Subject: [PATCH 04/44] Vec simplex trans --- src/samplers/support/transform.jl | 48 +++++++++++++++++++++++++++++++ test/sampler.jl/vec_assume_mv.jl | 9 ++---- 2 files changed, 51 insertions(+), 6 deletions(-) diff --git a/src/samplers/support/transform.jl b/src/samplers/support/transform.jl index e146e5ac99..1f91cb28a2 100644 --- a/src/samplers/support/transform.jl +++ b/src/samplers/support/transform.jl @@ -143,6 +143,21 @@ link{T}(d::SimplexDistribution, x::Vector{T}) = begin push!(y, zero(T)) end +link{T<:Real}(d::SimplexDistribution, X::Matrix{T}) = begin + nrow, ncol = size(X) + K = nrow + Z = Matrix{T}(nrow - 1, ncol) + for k = 1:K-1 + Z[k,:] = X[k,:] ./ (one(T) - sum(X[1:k-1,:],1))' + end + Y = zeros(T, nrow, ncol) + for k = 1:K-1 + Y[k,:] = logit(Z[k,:]) - log(one(T) / (K-k)) + end + + Y +end + invlink{T}(d::SimplexDistribution, y::Vector{T}) = begin K = length(y) z = [invlogit(y[k] + log(one(T) / (K - k))) for k in 1:K-1] @@ -154,6 +169,22 @@ invlink{T}(d::SimplexDistribution, y::Vector{T}) = begin x end +invlink{T<:Real}(d::SimplexDistribution, Y::Matrix{T}) = begin + nrow, ncol = size(Y) + K = nrow + Z = Matrix{T}(nrow - 1, ncol) + for k = 1:K-1 + Z[k,:] = invlogit(Y[k,:] + log(one(T) / (K - k))) + end + X = zeros(T, nrow, ncol) + for k = 1:K-1 + X[k,:] = (one(T) - sum(X[1:k-1,:], 1)) .* Z[k,:]' + end + X[K,:] = one(T) - sum(X[1:K-1,:], 1) + + X +end + Distributions.logpdf{T}(d::SimplexDistribution, x::Vector{T}, transform::Bool) = begin lp = logpdf(d, x) if transform @@ -167,6 +198,23 @@ Distributions.logpdf{T}(d::SimplexDistribution, x::Vector{T}, transform::Bool) = lp end +Distributions.logpdf{T}(d::SimplexDistribution, X::Matrix{T}, transform::Bool) = begin + lp = logpdf(d, X) + if transform + nrow, ncol = size(X) + K = nrow + Z = Matrix{T}(nrow - 1, ncol) + for k = 1:K-1 + Z[k,:] = X[k,:] ./ (one(T) - sum(X[1:k-1,:],1))' + end + for k = 1:K-1 + lp += log(Z[k,:]) + log(one(T) - Z[k,:]) + log(one(T) - sum(X[1:k-1,:], 1)) + end + end + lp +end + +# REVIEW: why do we put this piece of code here? Distributions.logpdf(d::Categorical, x::Int) = begin d.p[x] > 0.0 && insupport(d, x) ? log(d.p[x]) : eltype(d.p)(-Inf) end diff --git a/test/sampler.jl/vec_assume_mv.jl b/test/sampler.jl/vec_assume_mv.jl index 3bf61de8a5..6ce2f35ba8 100644 --- a/test/sampler.jl/vec_assume_mv.jl +++ b/test/sampler.jl/vec_assume_mv.jl @@ -9,16 +9,14 @@ alg = HMC(1000, 0.2, 4) # Test for vectorize UnivariateDistribution @model vdemo() = begin phi = Vector{Vector{Real}}(N) - # phi ~ [Dirichlet(beta)] - phi ~ [MvNormal(zeros(2), ones(2))] + phi ~ [Dirichlet(beta)] end t_vec = @elapsed res_vec = sample(vdemo(), alg) @model vdemo() = begin phi = Matrix(2,N) - # phi ~ [Dirichlet(beta)] - phi ~ [MvNormal(zeros(2), ones(2))] + phi ~ [Dirichlet(beta)] end t_vec_mat = @elapsed res_vec_mat = sample(vdemo(), alg) @@ -26,8 +24,7 @@ t_vec_mat = @elapsed res_vec_mat = sample(vdemo(), alg) @model vdemo() = begin phi = Vector{Vector{Real}}(N) for i = 1:N - # phi[i] ~ Dirichlet(beta) - phi[i] ~ MvNormal(zeros(2), ones(2)) + phi[i] ~ Dirichlet(beta) end end From 6aa0928fb631f6995c45b97094b48b7e30c4994d Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 5 Jun 2017 17:38:37 +0100 Subject: [PATCH 05/44] Fix dimension bug --- src/samplers/support/transform.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/samplers/support/transform.jl b/src/samplers/support/transform.jl index 1f91cb28a2..583250a5d5 100644 --- a/src/samplers/support/transform.jl +++ b/src/samplers/support/transform.jl @@ -208,7 +208,7 @@ Distributions.logpdf{T}(d::SimplexDistribution, X::Matrix{T}, transform::Bool) = Z[k,:] = X[k,:] ./ (one(T) - sum(X[1:k-1,:],1))' end for k = 1:K-1 - lp += log(Z[k,:]) + log(one(T) - Z[k,:]) + log(one(T) - sum(X[1:k-1,:], 1)) + lp += log(Z[k,:]) + log(one(T) - Z[k,:]) + log(one(T) - sum(X[1:k-1,:], 1))' end end lp From 901e8c296dd6d926972510b5393fe6f53a8bc7fb Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 5 Jun 2017 19:03:18 +0100 Subject: [PATCH 06/44] Try LDA new syntax. --- test/quick.jl | 49 ++++++++++++++++++++++--------------------------- 1 file changed, 22 insertions(+), 27 deletions(-) diff --git a/test/quick.jl b/test/quick.jl index 668192a06e..444b0a152c 100644 --- a/test/quick.jl +++ b/test/quick.jl @@ -1,36 +1,31 @@ -using Distributions using Turing -using Stan -include(Pkg.dir("Turing")*"/benchmarks/benchmarkhelper.jl") -include(Pkg.dir("Turing")*"/example-models/stan-models/MoC-stan.data.jl") - -@model nbmodel(K, V, M, N, z, w, doc, alpha, beta) = begin - theta ~ Dirichlet(alpha) - phi = Vector{Vector{Real}}(K) - phi ~ [Dirichlet(beta)] - - log_theta = log(theta) - Turing.acclogp!(vi, sum(log_theta[z[1:M]])) +@model ldamodel(K, V, M, N, w, doc, beta, alpha) = begin + theta = Matrix{Real}(K, M) + for m = 1:M + theta ~ [Dirichlet(alpha)] + end - log_phi = map(x->log(x), phi) + phi = Matrix{Real}(V, K) + for k = 1:K + phi ~ [Dirichlet(beta)] + end + + phi_dot_theta = phi * theta for n = 1:N - # w[n] ~ Categorical(phi[z[doc[n]]]) - Turing.acclogp!(vi, log_phi[z[doc[n]]][w[n]]) + Turing.acclogp!(vi, phi_dot_theta[w[n], doc[n]]) end - - phi end +include(Pkg.dir("Turing")*"/benchmarks/benchmarkhelper.jl") +include(Pkg.dir("Turing")*"/example-models/stan-models/lda-stan.data.jl") -# bench_res = tbenchmark("NUTS(1000, 0.65)", "nbmodel", "data=nbstandata[1]") -# bench_res[4].names = ["phi[1]", "phi[2]", "phi[3]", "phi[4]"] -# logd = build_logd("Naive Bayes", bench_res...) -# -# include(Pkg.dir("Turing")*"/benchmarks/"*"MoC-stan.run.jl") -# logd["stan"] = stan_d -# logd["time_stan"] = nb_time -# -# print_log(logd) +setchunksize(60) -samples = sample(nbmodel(data=nbstandata[1]), HMC(1000, 0.1, 4)) +alg = "HMCDA(2000, 0.65, 1.5)" +bench_res = tbenchmark(alg, "ldamodel", "data=ldastandata[1]") +bench_res[4].names = ["phi[1]", "phi[2]"] +logd = build_logd("LDA", bench_res...) +# logd["stan"] = lda_stan_d +# logd["time_stan"] = lda_time +print_log(logd) From 23509fe525b5f12edd89506d00e73c3508dbe37a Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 5 Jun 2017 22:25:31 +0100 Subject: [PATCH 07/44] Matrix support vec now --- src/samplers/hmc.jl | 4 +-- src/samplers/sampler.jl | 4 +-- src/samplers/support/helper.jl | 8 ++++++ src/samplers/support/transform.jl | 42 ++++++++++++++++++++++++++++--- test/sampler.jl/vec_assume_mat.jl | 26 +++++++++++++++++++ 5 files changed, 75 insertions(+), 9 deletions(-) create mode 100644 test/sampler.jl/vec_assume_mat.jl diff --git a/src/samplers/hmc.jl b/src/samplers/hmc.jl index 5a3875ad73..3dfb58f617 100644 --- a/src/samplers/hmc.jl +++ b/src/samplers/hmc.jl @@ -163,7 +163,7 @@ assume{A<:Hamiltonian,D<:Distribution}(spl::Sampler{A}, dists::Vector{D}, vn::Va acclogp!(vi, sum(logpdf(dist, rs, istrans(vi, vns[1])))) - if isa(dist, UnivariateDistribution) + if isa(dist, UnivariateDistribution) || isa(dist, MatrixDistribution) var = rs elseif isa(dist, MultivariateDistribution) if isa(var, Vector) @@ -175,8 +175,6 @@ assume{A<:Hamiltonian,D<:Distribution}(spl::Sampler{A}, dists::Vector{D}, vn::Va else error("[Turing] unsupported variable container") end - elseif isa(dist, MatrixDistribution) - # nothing now end var diff --git a/src/samplers/sampler.jl b/src/samplers/sampler.jl index 4c43cf1e84..b400a7533d 100644 --- a/src/samplers/sampler.jl +++ b/src/samplers/sampler.jl @@ -54,7 +54,7 @@ assume{T<:Distribution}(spl::Void, dists::Vector{T}, vn::VarName, var::Any, vi:: else rs = rand(dist, n) - if isa(dist, UnivariateDistribution) + if isa(dist, UnivariateDistribution) || isa(dist, MatrixDistribution) for i = 1:n push!(vi, vns[i], rs[i], dist, 0) end @@ -72,8 +72,6 @@ assume{T<:Distribution}(spl::Void, dists::Vector{T}, vn::VarName, var::Any, vi:: else error("[Turing] unsupported variable container") end - elseif isa(dist, MatrixDistribution) - # nothing now end end diff --git a/src/samplers/support/helper.jl b/src/samplers/support/helper.jl index aeb6703d0d..08ad73a378 100644 --- a/src/samplers/support/helper.jl +++ b/src/samplers/support/helper.jl @@ -33,6 +33,14 @@ reconstruct(d::MatrixDistribution, val::Vector, T::Type) = Array{T, 2}(res reconstruct(d::Distribution, val::Vector, n::Int) = reconstruct(d, val, typeof(val[1]), n) reconstruct(d::UnivariateDistribution, val::Vector, T::Type, n::Int) = Array{T, 1}(val) reconstruct(d::MultivariateDistribution, val::Vector, T::Type, n::Int) = Array{T, 2}(reshape(val, size(d)[1], n)) +reconstruct(d::MatrixDistribution, val::Vector, T::Type, n::Int) = begin + orig = Vector{Matrix{T}}(n) + tmp = Array{T, 3}(reshape(val, size(d)[1], size(d)[2], n)) + for i = 1:n + orig[i] = tmp[:,:,i] + end + orig +end ########## # Others # diff --git a/src/samplers/support/transform.jl b/src/samplers/support/transform.jl index 583250a5d5..aa9749cebb 100644 --- a/src/samplers/support/transform.jl +++ b/src/samplers/support/transform.jl @@ -225,7 +225,7 @@ end typealias PDMatDistribution Union{InverseWishart, Wishart} -link{T}(d::PDMatDistribution, x::Array{T,2}) = begin +link{T<:Real}(d::PDMatDistribution, x::Array{T,2}) = begin z = chol(x)' dim = size(z) for m in 1:dim[1] @@ -237,7 +237,15 @@ link{T}(d::PDMatDistribution, x::Array{T,2}) = begin Array{T,2}(z) end -invlink{T}(d::PDMatDistribution, z::Array{T,2}) = begin +link{T<:Real}(d::PDMatDistribution, X::Vector{Matrix{T}}) = begin + n = length(X) + for i = 1:n + X[i] = link(d, X[i]) + end + X +end + +invlink{T<:Real}(d::PDMatDistribution, z::Array{T,2}) = begin dim = size(z) for m in 1:dim[1] z[m, m] = exp(z[m, m]) @@ -248,7 +256,17 @@ invlink{T}(d::PDMatDistribution, z::Array{T,2}) = begin Array{T,2}(z * z') end -Distributions.logpdf{T}(d::PDMatDistribution, x::Array{T,2}, transform::Bool) = begin +invlink{T<:Real}(d::PDMatDistribution, Z::Vector{Matrix{T}}) = begin + n = length(Z) + + for i = 1:n + Z[i] = invlink(d, Z[i]) + end + + Z +end + +Distributions.logpdf{T<:Real}(d::PDMatDistribution, x::Array{T,2}, transform::Bool) = begin lp = logpdf(d, x) if transform && isfinite(lp) U = chol(x) @@ -261,6 +279,24 @@ Distributions.logpdf{T}(d::PDMatDistribution, x::Array{T,2}, transform::Bool) = lp end +Distributions.logpdf{T<:Real}(d::PDMatDistribution, X::Vector{Matrix{T}}, transform::Bool) = begin + lp = logpdf(d, X) + if transform && all(isfinite(lp)) + n = length(X) + U = Vector{Matrix{T}}(n) + for i = 1:n + U[i] = chol(X[i])' + end + D = dim(d) + + for j = 1:n, i in 1:D + lp[j] += (D - i + T(2)) * log(U[j][i,i]) + end + lp += D * log(T(2)) + end + lp +end + ############# # Callbacks # ############# diff --git a/test/sampler.jl/vec_assume_mat.jl b/test/sampler.jl/vec_assume_mat.jl new file mode 100644 index 0000000000..8cfb9dedc5 --- /dev/null +++ b/test/sampler.jl/vec_assume_mat.jl @@ -0,0 +1,26 @@ +include("../utility.jl") +using Distributions, Turing, Base.Test + +N = 5 +setchunksize(4*N) +alg = HMC(1000, 0.2, 4) + +@model vdemo() = begin + v = Vector{Matrix{Real}}(N) + v ~ [Wishart(7, [1 0.5; 0.5 1])] +end + +t_vec = @elapsed res_vec = sample(vdemo(), alg) + +@model vdemo() = begin + v = Vector{Matrix{Real}}(N) + for i = 1:N + v[i] ~ Wishart(7, [1 0.5; 0.5 1]) + end +end + +t_loop = @elapsed res = sample(vdemo(), alg) + +println("Time for") +println(" Loop : $t_loop") +println(" Vec : $t_vec") From 13f2887c04bcdd72addbbfd5b74edf5d49adc309 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 5 Jun 2017 23:02:15 +0100 Subject: [PATCH 08/44] Increase N to see gap clearer --- test/sampler.jl/vec_assume_mv.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/sampler.jl/vec_assume_mv.jl b/test/sampler.jl/vec_assume_mv.jl index 6ce2f35ba8..68c846960d 100644 --- a/test/sampler.jl/vec_assume_mv.jl +++ b/test/sampler.jl/vec_assume_mv.jl @@ -1,7 +1,7 @@ include("../utility.jl") using Distributions, Turing, Base.Test -N = 5 +N = 20 beta = [0.5, 0.5] setchunksize(N*length(beta)) alg = HMC(1000, 0.2, 4) From 4f375a98e05ea486f998224481004a47aed2dda5 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Tue, 6 Jun 2017 12:48:08 +0100 Subject: [PATCH 09/44] Fix few bugs for resume MCMC --- src/core/io.jl | 2 +- src/samplers/gibbs.jl | 5 ++++- src/samplers/hmc.jl | 9 +++++++-- src/samplers/hmcda.jl | 16 +++++++++------- src/samplers/nuts.jl | 12 +++++++----- src/samplers/pgibbs.jl | 6 +++++- 6 files changed, 33 insertions(+), 17 deletions(-) diff --git a/src/core/io.jl b/src/core/io.jl index 9645d168d7..f1009486e5 100644 --- a/src/core/io.jl +++ b/src/core/io.jl @@ -192,7 +192,7 @@ end save!(c::Chain, spl::Sampler, model::Function, vi::VarInfo) = begin c.info[:spl] = spl c.info[:model] = model - c.info[:vi] = vi + c.info[:vi] = deepcopy(vi) end resume(c::Chain, n_iter::Int) = begin diff --git a/src/samplers/gibbs.jl b/src/samplers/gibbs.jl index 9c36d47f36..ef1263ffcd 100644 --- a/src/samplers/gibbs.jl +++ b/src/samplers/gibbs.jl @@ -76,7 +76,10 @@ sample(model::Function, alg::Gibbs; end # Init parameters - varInfo = model(); n = spl.alg.n_iters; i_thin = 1 + varInfo = resume_from == nothing ? + model() : + resume_from.info[:vi] + n = spl.alg.n_iters; i_thin = 1 # Gibbs steps spl.info[:progress] = ProgressMeter.Progress(n, 1, "[Gibbs] Sampling...", 0) diff --git a/src/samplers/hmc.jl b/src/samplers/hmc.jl index 3dfb58f617..99d3adbba8 100644 --- a/src/samplers/hmc.jl +++ b/src/samplers/hmc.jl @@ -95,7 +95,10 @@ function sample{T<:Hamiltonian}(model::Function, alg::T; for i = 1:n samples[i] = Sample(weight, Dict{Symbol, Any}()) end - vi = model() + + vi = resume_from == nothing ? + model() : + deepcopy(resume_from.info[:vi]) if spl.alg.gid == 0 link!(vi, spl) @@ -107,7 +110,7 @@ function sample{T<:Hamiltonian}(model::Function, alg::T; for i = 1:n dprintln(2, "$alg_str stepping...") - time_elapsed = @elapsed vi = step(model, spl, vi, i==1) + time_elapsed = @elapsed vi = step(model, spl, vi, i == 1) time_total += time_elapsed if spl.info[:accept_his][end] # accepted => store the new predcits @@ -138,6 +141,8 @@ function sample{T<:Hamiltonian}(model::Function, alg::T; end c = Chain(0, samples) # wrap the result by Chain if save_state # save state + # Convert vi back to X if vi is required to be saved + if spl.alg.gid == 0 invlink!(vi, spl) end save!(c, spl, model, vi) end diff --git a/src/samplers/hmcda.jl b/src/samplers/hmcda.jl index 5bfb042c7e..6879b73332 100644 --- a/src/samplers/hmcda.jl +++ b/src/samplers/hmcda.jl @@ -49,17 +49,19 @@ end function step(model, spl::Sampler{HMCDA}, vi::VarInfo, is_first::Bool) if is_first - if spl.alg.gid != 0 link!(vi, spl) end # X -> R + if ~haskey(spl.info, :wum) + if spl.alg.gid != 0 link!(vi, spl) end # X -> R - init_warm_up_params(vi, spl) + init_warm_up_params(vi, spl) - ϵ = spl.alg.delta > 0 ? - find_good_eps(model, vi, spl) : # heuristically find optimal ϵ - spl.info[:pre_set_ϵ] + ϵ = spl.alg.delta > 0 ? + find_good_eps(model, vi, spl) : # heuristically find optimal ϵ + spl.info[:pre_set_ϵ] - if spl.alg.gid != 0 invlink!(vi, spl) end # R -> X + if spl.alg.gid != 0 invlink!(vi, spl) end # R -> X - update_da_params(spl.info[:wum], ϵ) + update_da_params(spl.info[:wum], ϵ) + end push!(spl.info[:accept_his], true) diff --git a/src/samplers/nuts.jl b/src/samplers/nuts.jl index aeff2c3424..195549a124 100644 --- a/src/samplers/nuts.jl +++ b/src/samplers/nuts.jl @@ -44,15 +44,17 @@ end function step(model::Function, spl::Sampler{NUTS}, vi::VarInfo, is_first::Bool) if is_first - if spl.alg.gid != 0 link!(vi, spl) end # X -> R + if ~haskey(spl.info, :wum) + if spl.alg.gid != 0 link!(vi, spl) end # X -> R - init_warm_up_params(vi, spl) + init_warm_up_params(vi, spl) - ϵ = find_good_eps(model, vi, spl) # heuristically find optimal ϵ + ϵ = find_good_eps(model, vi, spl) # heuristically find optimal ϵ - if spl.alg.gid != 0 invlink!(vi, spl) end # R -> X + if spl.alg.gid != 0 invlink!(vi, spl) end # R -> X - update_da_params(spl.info[:wum], ϵ) + update_da_params(spl.info[:wum], ϵ) + end push!(spl.info[:accept_his], true) diff --git a/src/samplers/pgibbs.jl b/src/samplers/pgibbs.jl index 5b7b16e39b..44a99d6424 100644 --- a/src/samplers/pgibbs.jl +++ b/src/samplers/pgibbs.jl @@ -101,7 +101,11 @@ sample(model::Function, alg::PG; ## custom resampling function for pgibbs ## re-inserts reteined particle after each resampling step time_total = zero(Float64) - vi = VarInfo() + + vi = resume_from == nothing ? + model() : + resume_from.info[:vi] + @showprogress 1 "[PG] Sampling..." for i = 1:n time_elapsed = @elapsed vi = step(model, spl, vi) push!(samples, Sample(vi)) From dcecb9b1f8f5f9fd305eca37cf04a8392514e64f Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Tue, 6 Jun 2017 12:50:26 +0100 Subject: [PATCH 10/44] Fix PG typo --- src/samplers/pgibbs.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/samplers/pgibbs.jl b/src/samplers/pgibbs.jl index 44a99d6424..abf6acd43f 100644 --- a/src/samplers/pgibbs.jl +++ b/src/samplers/pgibbs.jl @@ -103,7 +103,7 @@ sample(model::Function, alg::PG; time_total = zero(Float64) vi = resume_from == nothing ? - model() : + VarInfo() : resume_from.info[:vi] @showprogress 1 "[PG] Sampling..." for i = 1:n From 41eded2c8748c8d3f4aaeb86885f3c2e9348d775 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Tue, 6 Jun 2017 13:42:09 +0100 Subject: [PATCH 11/44] Increase eps in online pre-cond cal --- src/samplers/support/adapt.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/samplers/support/adapt.jl b/src/samplers/support/adapt.jl index 3b604057db..edb528b1aa 100644 --- a/src/samplers/support/adapt.jl +++ b/src/samplers/support/adapt.jl @@ -74,7 +74,7 @@ update_pre_cond(wum::WarmUpManager, θ_new) = begin else#if t <= 1000 D = length(θ_new) # D = 2.4^2 - wum[:vars] = (t - 1) / t * wum[:vars] .+ 100 * eps(Float64) + + wum[:vars] = (t - 1) / t * wum[:vars] .+ 1e3 * eps(Float64) + (2.4^2 / D) / t * (t * θ_mean_old .* θ_mean_old - (t + 1) * θ_mean_new .* θ_mean_new + θ_new .* θ_new) end From 99428ce79a70253dd7b0be7da926cb830a51c262 Mon Sep 17 00:00:00 2001 From: Hong Ge Date: Thu, 8 Jun 2017 16:08:15 +0100 Subject: [PATCH 12/44] Fair comparision on LDA (#275) --- benchmarks/lda-stan.run.jl | 7 ++++++- benchmarks/lda.run.jl | 3 ++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/benchmarks/lda-stan.run.jl b/benchmarks/lda-stan.run.jl index a1b3418c0c..caa7a54a0b 100644 --- a/benchmarks/lda-stan.run.jl +++ b/benchmarks/lda-stan.run.jl @@ -7,7 +7,12 @@ include(Pkg.dir("Turing")*"/example-models/stan-models/lda-stan.data.jl") include(Pkg.dir("Turing")*"/example-models/stan-models/lda-stan.model.jl") stan_model_name = "LDA" -ldastan = Stanmodel(Sample(save_warmup=true), name=stan_model_name, model=ldastanmodel, nchains=1); +# ldastan = Stanmodel(Sample(save_warmup=true), name=stan_model_name, model=ldastanmodel, nchains=1); +# To understand parameters, use: ?Stan.Static, ?Stan,Hmc +ldastan = Stanmodel(Sample(algorithm=Stan.Hmc(Stan.Static(0.25),Stan.diag_e(),0.025,0.0), + save_warmup=true,adapt=Stan.Adapt(engaged=false)), + num_samples=2000, num_warmup=0, thin=1, + name=stan_model_name, model=ldastanmodel, nchains=1); rc, lda_stan_sim = stan(ldastan, ldastandata, CmdStanDir=CMDSTAN_HOME, summary=false); # lda_stan_sim.names diff --git a/benchmarks/lda.run.jl b/benchmarks/lda.run.jl index 02157b829f..4c9ad96271 100644 --- a/benchmarks/lda.run.jl +++ b/benchmarks/lda.run.jl @@ -10,7 +10,8 @@ include(Pkg.dir("Turing")*"/benchmarks/"*"lda-stan.run.jl") setchunksize(60) -for alg in ["HMC(1000, 0.25, 6)", "HMCDA(1000, 0.65, 1.5)", "NUTS(2000, 1000, 0.65)"] +#for alg in ["HMC(2000, 0.25, 10)", "HMCDA(1000, 0.65, 1.5)", "NUTS(2000, 1000, 0.65)"] +or alg in ["HMC(2000, 0.025, 10)"] bench_res = tbenchmark(alg, "ldamodel", "data=ldastandata[1]") bench_res[4].names = ["phi[1]", "phi[2]"] logd = build_logd("LDA", bench_res...) From c5d3ac4e4604ec09dc67fffd3d4851ae7e575690 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Sat, 10 Jun 2017 21:32:49 +0100 Subject: [PATCH 13/44] Add assertions to check dimension of rs and var. --- src/samplers/hmc.jl | 3 +++ src/samplers/sampler.jl | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/src/samplers/hmc.jl b/src/samplers/hmc.jl index 99d3adbba8..edf7e9940d 100644 --- a/src/samplers/hmc.jl +++ b/src/samplers/hmc.jl @@ -169,13 +169,16 @@ assume{A<:Hamiltonian,D<:Distribution}(spl::Sampler{A}, dists::Vector{D}, vn::Va acclogp!(vi, sum(logpdf(dist, rs, istrans(vi, vns[1])))) if isa(dist, UnivariateDistribution) || isa(dist, MatrixDistribution) + @assert size(var) == size(rs) "[assume] variable and random number dimension unmatched" var = rs elseif isa(dist, MultivariateDistribution) if isa(var, Vector) + @assert length(var) == size(rs)[2] "[assume] variable and random number dimension unmatched" for i = 1:n var[i] = rs[:,i] end elseif isa(var, Matrix) + @assert size(var) == size(rs) "[assume] variable and random number dimension unmatched" var = rs else error("[Turing] unsupported variable container") diff --git a/src/samplers/sampler.jl b/src/samplers/sampler.jl index b400a7533d..af02283c53 100644 --- a/src/samplers/sampler.jl +++ b/src/samplers/sampler.jl @@ -43,7 +43,7 @@ assume(spl::Void, dist::Distribution, vn::VarName, vi::VarInfo) = begin end assume{T<:Distribution}(spl::Void, dists::Vector{T}, vn::VarName, var::Any, vi::VarInfo) = begin - @assert length(dists) == 1 "[observe] Turing only support vectorizing i.i.d distribution" + @assert length(dists) == 1 "[assume] Turing only support vectorizing i.i.d distribution" dist = dists[1] n = size(var)[end] @@ -58,16 +58,19 @@ assume{T<:Distribution}(spl::Void, dists::Vector{T}, vn::VarName, var::Any, vi:: for i = 1:n push!(vi, vns[i], rs[i], dist, 0) end + @assert size(var) == size(rs) "[assume] variable and random number dimension unmatched" var = rs elseif isa(dist, MultivariateDistribution) for i = 1:n push!(vi, vns[i], rs[:,i], dist, 0) end if isa(var, Vector) + @assert length(var) == size(rs)[2] "[assume] variable and random number dimension unmatched" for i = 1:n var[i] = rs[:,i] end elseif isa(var, Matrix) + @assert size(var) == size(rs) "[assume] variable and random number dimension unmatched" var = rs else error("[Turing] unsupported variable container") From 920561664a200dd3e9278a7421d6165d95365aff Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Sat, 10 Jun 2017 22:05:28 +0100 Subject: [PATCH 14/44] Forbid vec for PG and SMC by giving errors --- src/samplers/pgibbs.jl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/samplers/pgibbs.jl b/src/samplers/pgibbs.jl index abf6acd43f..6a3821da4e 100644 --- a/src/samplers/pgibbs.jl +++ b/src/samplers/pgibbs.jl @@ -157,5 +157,11 @@ assume{T<:Union{PG,SMC}}(spl::Sampler{T}, dist::Distribution, vn::VarName, _::Va end end +assume{T<:Union{PG,SMC}}(spl::Void, dists::Vector{T}, vn::VarName, var::Any, vi::VarInfo) = + error("[Turing] PG and SMC doesn't support vectorizing assume statement") + observe{T<:Union{PG,SMC}}(spl::Sampler{T}, dist::Distribution, value, vi) = produce(logpdf(dist, value)) + +observe{A<:Union{PG,SMC},D<:Distribution}(spl::Sampler{A}, ds::Vector{D}, value::Any, vi::VarInfo) = + error("[Turing] PG and SMC doesn't support vectorizing observe statement") From 6699c845bcb791d5e8ad67af6a33d2368c0fefcd Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Sat, 10 Jun 2017 22:10:23 +0100 Subject: [PATCH 15/44] Add vec LDA --- benchmarks/lda.run.jl | 7 ++++--- example-models/stan-models/lda.model.jl | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/benchmarks/lda.run.jl b/benchmarks/lda.run.jl index 4c9ad96271..aeff916455 100644 --- a/benchmarks/lda.run.jl +++ b/benchmarks/lda.run.jl @@ -11,10 +11,11 @@ include(Pkg.dir("Turing")*"/benchmarks/"*"lda-stan.run.jl") setchunksize(60) #for alg in ["HMC(2000, 0.25, 10)", "HMCDA(1000, 0.65, 1.5)", "NUTS(2000, 1000, 0.65)"] -or alg in ["HMC(2000, 0.025, 10)"] - bench_res = tbenchmark(alg, "ldamodel", "data=ldastandata[1]") + +for (modelc, modeln) in zip(["ldamodel", "ldamodel_vec"], ["LDA", "LDA vec"]) + bench_res = tbenchmark("HMC(2000, 0.025, 10)", modelc, "data=ldastandata[1]") bench_res[4].names = ["phi[1]", "phi[2]"] - logd = build_logd("LDA", bench_res...) + logd = build_logd(modeln, bench_res...) logd["stan"] = lda_stan_d logd["time_stan"] = lda_time print_log(logd) diff --git a/example-models/stan-models/lda.model.jl b/example-models/stan-models/lda.model.jl index d32486f81c..0e837e97a1 100644 --- a/example-models/stan-models/lda.model.jl +++ b/example-models/stan-models/lda.model.jl @@ -51,3 +51,21 @@ end end + + +@model ldamodel_vec(K, V, M, N, w, doc, beta, alpha) = begin + theta = Matrix{Real}(K, M) + for m = 1:M + theta ~ [Dirichlet(alpha)] + end + + phi = Matrix{Real}(V, K) + for k = 1:K + phi ~ [Dirichlet(beta)] + end + + phi_dot_theta = phi * theta + for n = 1:N + Turing.acclogp!(vi, phi_dot_theta[w[n], doc[n]]) + end +end From d81f76823ddcf71f495ceadb9c3dc09084c06967 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 12:45:15 +0100 Subject: [PATCH 16/44] Vec version of LDA --- benchmarks/lda.run.jl | 2 +- example-models/stan-models/lda.model.jl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/benchmarks/lda.run.jl b/benchmarks/lda.run.jl index aeff916455..839595b8fc 100644 --- a/benchmarks/lda.run.jl +++ b/benchmarks/lda.run.jl @@ -12,7 +12,7 @@ setchunksize(60) #for alg in ["HMC(2000, 0.25, 10)", "HMCDA(1000, 0.65, 1.5)", "NUTS(2000, 1000, 0.65)"] -for (modelc, modeln) in zip(["ldamodel", "ldamodel_vec"], ["LDA", "LDA vec"]) +for (modelc, modeln) in zip(["ldamodel_vec", "ldamodel"], ["LDA vec", "LDA"]) bench_res = tbenchmark("HMC(2000, 0.025, 10)", modelc, "data=ldastandata[1]") bench_res[4].names = ["phi[1]", "phi[2]"] logd = build_logd(modeln, bench_res...) diff --git a/example-models/stan-models/lda.model.jl b/example-models/stan-models/lda.model.jl index 0e837e97a1..aa4bcece36 100644 --- a/example-models/stan-models/lda.model.jl +++ b/example-models/stan-models/lda.model.jl @@ -64,7 +64,7 @@ end phi ~ [Dirichlet(beta)] end - phi_dot_theta = phi * theta + phi_dot_theta = log(phi * theta) for n = 1:N Turing.acclogp!(vi, phi_dot_theta[w[n], doc[n]]) end From bc64f9f110b0afbfbd0e111054962d9857cf8812 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 12:45:37 +0100 Subject: [PATCH 17/44] Disable pre-cond adapt for std HMC --- src/samplers/hmcda.jl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/samplers/hmcda.jl b/src/samplers/hmcda.jl index 6879b73332..93b6802ec4 100644 --- a/src/samplers/hmcda.jl +++ b/src/samplers/hmcda.jl @@ -114,8 +114,10 @@ function step(model, spl::Sampler{HMCDA}, vi::VarInfo, is_first::Bool) setlogp!(vi, old_logp) # reset logp end - # Adapt step-size and pre-cond - adapt(spl.info[:wum], α, realpart(vi[spl])) + + if spl.alg.delta > 0 # only do adaption for HMCDA + adapt(spl.info[:wum], α, realpart(vi[spl])) + end dprintln(3, "R -> X...") if spl.alg.gid != 0 invlink!(vi, spl); cleandual!(vi) end From a64de950e2a233b39acdc2a7ce74f39ad2e7d420 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 12:45:54 +0100 Subject: [PATCH 18/44] Disable re-scale for pre-cond --- src/samplers/support/adapt.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/samplers/support/adapt.jl b/src/samplers/support/adapt.jl index edb528b1aa..36257b18e0 100644 --- a/src/samplers/support/adapt.jl +++ b/src/samplers/support/adapt.jl @@ -80,7 +80,7 @@ update_pre_cond(wum::WarmUpManager, θ_new) = begin if t > 100 wum[:stds] = sqrt(wum[:vars]) - wum[:stds] = wum[:stds] / min(wum[:stds]...) + # wum[:stds] = wum[:stds] / min(wum[:stds]...) end end end From f27f52ea08865255da1236cd968e9d0e6b67c933 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 13:29:29 +0100 Subject: [PATCH 19/44] Bugfix for vec LDA --- example-models/stan-models/lda.model.jl | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/example-models/stan-models/lda.model.jl b/example-models/stan-models/lda.model.jl index aa4bcece36..514d1aba40 100644 --- a/example-models/stan-models/lda.model.jl +++ b/example-models/stan-models/lda.model.jl @@ -55,14 +55,10 @@ end @model ldamodel_vec(K, V, M, N, w, doc, beta, alpha) = begin theta = Matrix{Real}(K, M) - for m = 1:M - theta ~ [Dirichlet(alpha)] - end + theta ~ [Dirichlet(alpha)] phi = Matrix{Real}(V, K) - for k = 1:K - phi ~ [Dirichlet(beta)] - end + phi ~ [Dirichlet(beta)] phi_dot_theta = log(phi * theta) for n = 1:N From a5a18275a1f6d32f76dbc451320dfb2814230742 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 16:16:42 +0100 Subject: [PATCH 20/44] Try another re-sacle. --- src/samplers/support/adapt.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/src/samplers/support/adapt.jl b/src/samplers/support/adapt.jl index 36257b18e0..672b87f072 100644 --- a/src/samplers/support/adapt.jl +++ b/src/samplers/support/adapt.jl @@ -81,6 +81,7 @@ update_pre_cond(wum::WarmUpManager, θ_new) = begin if t > 100 wum[:stds] = sqrt(wum[:vars]) # wum[:stds] = wum[:stds] / min(wum[:stds]...) + wum[:stds] = wum[:stds] / mean([wum[:stds]...]) end end end From fe7790de3efb40b380b3ec3a4a585dffa5a22fef Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 16:26:43 +0100 Subject: [PATCH 21/44] Comments --- src/samplers/support/adapt.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/samplers/support/adapt.jl b/src/samplers/support/adapt.jl index 672b87f072..ebaf30fddf 100644 --- a/src/samplers/support/adapt.jl +++ b/src/samplers/support/adapt.jl @@ -80,7 +80,7 @@ update_pre_cond(wum::WarmUpManager, θ_new) = begin if t > 100 wum[:stds] = sqrt(wum[:vars]) - # wum[:stds] = wum[:stds] / min(wum[:stds]...) + # wum[:stds] = wum[:stds] / min(wum[:stds]...) # old wum[:stds] = wum[:stds] / mean([wum[:stds]...]) end end From 0a997e84a24a607deed8d9a1b40944cf329006ab Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 20:46:54 +0100 Subject: [PATCH 22/44] Try using Julia 0.5.2 for Windows --- appveyor.yml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index d1a5ac2680..d7accc7576 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,11 +1,11 @@ environment: matrix: - - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.5/julia-0.5.0-win64.exe" + - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.5/julia-0.5.2-win64.exe" MINGW_DIR: mingw64 # MINGW_URL: https://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win64/Personal%20Builds/mingw-builds/5.3.0/threads-win32/seh/x86_64-5.3.0-release-win32-seh-rt_v4-rev0.7z/download MINGW_URL: http://mlg.eng.cam.ac.uk/hong/x86_64-5.3.0-release-win32-seh-rt_v4-rev0.7z MINGW_ARCHIVE: x86_64-5.3.0-release-win32-seh-rt_v4-rev0.7z - - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x86/0.5/julia-0.5.0-win32.exe" + - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x86/0.5/julia-0.5.2-win32.exe" MINGW_DIR: mingw32 # MINGW_URL: https://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win32/Personal%20Builds/mingw-builds/5.3.0/threads-win32/dwarf/i686-5.3.0-release-win32-dwarf-rt_v4-rev0.7z/download MINGW_URL: http://mlg.eng.cam.ac.uk/hong/i686-5.3.0-release-win32-dwarf-rt_v4-rev0.7z @@ -21,7 +21,7 @@ cache: # Cache large downloads to avoid network unreliability - "%MINGW_ARCHIVE%" - C:\Users\appveyor\.julia\v0.5 - + install: - ps: "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12" # Download and install MinGW @@ -43,9 +43,7 @@ build_script: - C:\projects\julia\bin\julia -e "versioninfo(); Pkg.update(); rm(Pkg.dir(\"Turing\"), force=true, recursive=true); - Pkg.clone(pwd(), \"Turing\"); - Pkg.pin(\"Gadfly\", v\"0.6.0\"); - using Gadfly; + Pkg.clone(pwd(), \"Turing\"); Pkg.build(\"Turing\")" test_script: From 4d0cfaa747466c1d66404a83db778132eecaad43 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 20:51:42 +0100 Subject: [PATCH 23/44] Enable SV model --- benchmarks/sv.run.jl | 10 ++-------- example-models/nips-2017/sv.model.jl | 3 --- 2 files changed, 2 insertions(+), 11 deletions(-) diff --git a/benchmarks/sv.run.jl b/benchmarks/sv.run.jl index 1a4d670f00..085d538bae 100644 --- a/benchmarks/sv.run.jl +++ b/benchmarks/sv.run.jl @@ -5,22 +5,16 @@ TPATH = Pkg.dir("Turing") include(TPATH*"/example-models/nips-2017/sv.model.jl") -PR_for_distributions_is_merged = false - -if PR_for_distributions_is_merged - using HDF5, JLD sv_data = load(TPATH*"/example-models/nips-2017/sv-data.jld")["data"] model_f = sv_model(data=sv_data[1]) -sample_n = 10000 +sample_n = 5000 setchunksize(550) chain_nuts = sample(model_f, NUTS(sample_n, 0.65)) decribe(chain_nuts) setchunksize(5) -chain_gibbs = sample(model_f, Gibbs(sample_n, PG(50,1,:h),NUTS(1000,0.65,:ϕ,:σ,:μ))) +chain_gibbs = sample(model_f, Gibbs(sample_n, PG(50,1,:h), NUTS(1000,0.65,:ϕ,:σ,:μ))) decribe(chain_gibbs) - -end diff --git a/example-models/nips-2017/sv.model.jl b/example-models/nips-2017/sv.model.jl index 6a54a1a55d..88ad42c561 100644 --- a/example-models/nips-2017/sv.model.jl +++ b/example-models/nips-2017/sv.model.jl @@ -4,9 +4,6 @@ σ ~ Truncated(Cauchy(0,5), 0, +Inf) μ ~ Cauchy(0, 10) h = tzeros(Real, T) - if σ / sqrt(1 - ϕ^2) <= 0 - println(σ, ϕ) - end h[1] ~ Normal(μ, σ / sqrt(1 - ϕ^2)) y[1] ~ Normal(0, exp(h[1] / 2)) for t = 2:T From 6f41fe1cbe826700c13f7d583e806774efbd1369 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 20:54:14 +0100 Subject: [PATCH 24/44] Add debug log --- test/utility.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/utility.jl b/test/utility.jl index 4a683a32f2..c1c863434d 100644 --- a/test/utility.jl +++ b/test/utility.jl @@ -17,6 +17,7 @@ randr(vi::VarInfo, vn::VarName, dist::Distribution, count::Bool) = begin r end end +println("[test/utility.jl] randr() reloaded") randr(vi::VarInfo, vn::VarName, dist::Distribution, spl::Sampler, count::Bool) = begin vi.index = count ? vi.index + 1 : vi.index @@ -35,6 +36,7 @@ randr(vi::VarInfo, vn::VarName, dist::Distribution, spl::Sampler, count::Bool) = vi[vn] end end +println("[test/utility.jl] randr() reloaded") # Helper function for numerical tests From c34767901e431f9c17867392be9159836f84fd22 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 21:23:03 +0100 Subject: [PATCH 25/44] Quick profile --- test/quick.jl | 25 +++++++------------------ 1 file changed, 7 insertions(+), 18 deletions(-) diff --git a/test/quick.jl b/test/quick.jl index 444b0a152c..4a13cd0f24 100644 --- a/test/quick.jl +++ b/test/quick.jl @@ -1,31 +1,20 @@ using Turing -@model ldamodel(K, V, M, N, w, doc, beta, alpha) = begin +@model ldamodel_vec(K, V, M, N, w, doc, beta, alpha) = begin theta = Matrix{Real}(K, M) - for m = 1:M - theta ~ [Dirichlet(alpha)] - end + theta ~ [Dirichlet(alpha)] phi = Matrix{Real}(V, K) - for k = 1:K - phi ~ [Dirichlet(beta)] - end - - phi_dot_theta = phi * theta + phi ~ [Dirichlet(beta)] + + phi_dot_theta = log(phi * theta) for n = 1:N Turing.acclogp!(vi, phi_dot_theta[w[n], doc[n]]) end end -include(Pkg.dir("Turing")*"/benchmarks/benchmarkhelper.jl") include(Pkg.dir("Turing")*"/example-models/stan-models/lda-stan.data.jl") -setchunksize(60) +setchunksize(100) -alg = "HMCDA(2000, 0.65, 1.5)" -bench_res = tbenchmark(alg, "ldamodel", "data=ldastandata[1]") -bench_res[4].names = ["phi[1]", "phi[2]"] -logd = build_logd("LDA", bench_res...) -# logd["stan"] = lda_stan_d -# logd["time_stan"] = lda_time -print_log(logd) +sample(ldamodel_vec(data=ldastandata[1]), HMC(2000, 0.025, 10)) From f8370bb4e0d5b529176966612088a8226f98ce1b Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 23:20:19 +0100 Subject: [PATCH 26/44] Automatic benchmark log upload --- benchmarks/benchmarkhelper.jl | 39 +++++++++++++++++++---------------- benchmarks/install_deps.jl | 2 ++ benchmarks/lda.run.jl | 4 ++++ 3 files changed, 27 insertions(+), 18 deletions(-) diff --git a/benchmarks/benchmarkhelper.jl b/benchmarks/benchmarkhelper.jl index 0f99fdb83d..83e5d06661 100644 --- a/benchmarks/benchmarkhelper.jl +++ b/benchmarks/benchmarkhelper.jl @@ -23,26 +23,27 @@ build_logd(name::String, engine::String, time, mem, tchain, _) = begin end # Log function -print_log(logd::Dict, monitor=[]) = begin - println("/=======================================================================") - println("| Benchmark Result for >>> $(logd["name"]) <<<") - println("|-----------------------------------------------------------------------") - println("| Overview") - println("|-----------------------------------------------------------------------") - println("| Inference Engine : $(logd["engine"])") - println("| Time Used (s) : $(logd["time"])") +log2str(logd::Dict, monitor=[]) = begin + str = "" + str *= ("/=======================================================================") * "\n" + str *= ("| Benchmark Result for >>> $(logd["name"]) <<<") * "\n" + str *= ("|-----------------------------------------------------------------------") * "\n" + str *= ("| Overview") * "\n" + str *= ("|-----------------------------------------------------------------------") * "\n" + str *= ("| Inference Engine : $(logd["engine"])") * "\n" + str *= ("| Time Used (s) : $(logd["time"])") * "\n" if haskey(logd, "time_stan") - println("| -> time by Stan : $(logd["time_stan"])") + str *= ("| -> time by Stan : $(logd["time_stan"])") * "\n" end - println("| Mem Alloc (bytes) : $(logd["mem"])") + str *= ("| Mem Alloc (bytes) : $(logd["mem"])") * "\n" if haskey(logd, "turing") - println("|-----------------------------------------------------------------------") - println("| Turing Inference Result") - println("|-----------------------------------------------------------------------") + str *= ("|-----------------------------------------------------------------------") * "\n" + str *= ("| Turing Inference Result") * "\n" + str *= ("|-----------------------------------------------------------------------") * "\n" for (v, m) = logd["turing"] if isempty(monitor) || v in monitor - println("| >> $v <<") - println("| mean = $(round(m, 3))") + str *= ("| >> $v <<") * "\n" + str *= ("| mean = $(round(m, 3))") * "\n" if haskey(logd, "analytic") && haskey(logd["analytic"], v) print("| -> analytic = $(round(logd["analytic"][v], 3)), ") diff = abs(m - logd["analytic"][v]) @@ -50,7 +51,7 @@ print_log(logd::Dict, monitor=[]) = begin if sum(diff) > 0.2 print_with_color(:red, diff_output*"\n") else - println(diff_output) + str *= (diff_output) * "\n" end end if haskey(logd, "stan") && haskey(logd["stan"], v) @@ -60,11 +61,13 @@ print_log(logd::Dict, monitor=[]) = begin if sum(diff) > 0.2 print_with_color(:red, diff_output*"\n") else - println(diff_output) + str *= (diff_output) * "\n" end end end end end - println("\\=======================================================================") + str *= ("\\=======================================================================") * "\n" end + +print_log(logd::Dict, monitor=[]) = print(log2str(logd, monitor)) diff --git a/benchmarks/install_deps.jl b/benchmarks/install_deps.jl index 9353d22b42..c29eaeebcc 100644 --- a/benchmarks/install_deps.jl +++ b/benchmarks/install_deps.jl @@ -8,3 +8,5 @@ Pkg.add("UnicodePlots") Pkg.add("HDF5") Pkg.add("JLD") + +Pkg.add("Requests") diff --git a/benchmarks/lda.run.jl b/benchmarks/lda.run.jl index 839595b8fc..118f649b47 100644 --- a/benchmarks/lda.run.jl +++ b/benchmarks/lda.run.jl @@ -2,6 +2,9 @@ using Distributions using Turing using Stan +using Requests +import Requests: get, post, put, delete, options, FileParam + include(Pkg.dir("Turing")*"/benchmarks/benchmarkhelper.jl") include(Pkg.dir("Turing")*"/example-models/stan-models/lda-stan.data.jl") include(Pkg.dir("Turing")*"/example-models/stan-models/lda.model.jl") @@ -19,4 +22,5 @@ for (modelc, modeln) in zip(["ldamodel_vec", "ldamodel"], ["LDA vec", "LDA"]) logd["stan"] = lda_stan_d logd["time_stan"] = lda_time print_log(logd) + post("http://80.85.86.210:1110"; files = [FileParam(log2str(logd), "text","upfile","benchmark-$(Dates.format(now(), "dd-u-yyyy-HH-MM-SS"))-$modelc.txt")]) end From 4ff5c40556b58b386e01517351b4795cd03f151a Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 23:24:30 +0100 Subject: [PATCH 27/44] Fix log2str --- benchmarks/benchmarkhelper.jl | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/benchmarks/benchmarkhelper.jl b/benchmarks/benchmarkhelper.jl index 83e5d06661..f27ea664f9 100644 --- a/benchmarks/benchmarkhelper.jl +++ b/benchmarks/benchmarkhelper.jl @@ -45,21 +45,25 @@ log2str(logd::Dict, monitor=[]) = begin str *= ("| >> $v <<") * "\n" str *= ("| mean = $(round(m, 3))") * "\n" if haskey(logd, "analytic") && haskey(logd["analytic"], v) - print("| -> analytic = $(round(logd["analytic"][v], 3)), ") + str *= ("| -> analytic = $(round(logd["analytic"][v], 3)), ") diff = abs(m - logd["analytic"][v]) diff_output = "diff = $(round(diff, 3))" if sum(diff) > 0.2 + # TODO: try to fix this print_with_color(:red, diff_output*"\n") + str *= (diff_output) * "\n" else str *= (diff_output) * "\n" end end if haskey(logd, "stan") && haskey(logd["stan"], v) - print("| -> Stan = $(round(logd["stan"][v], 3)), ") + str *= ("| -> Stan = $(round(logd["stan"][v], 3)), ") diff = abs(m - logd["stan"][v]) diff_output = "diff = $(round(diff, 3))" if sum(diff) > 0.2 + # TODO: try to fix this print_with_color(:red, diff_output*"\n") + str *= (diff_output) * "\n" else str *= (diff_output) * "\n" end From e6c6beb5581e65ed1ab38f09020fd84002529e12 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Mon, 12 Jun 2017 23:41:27 +0100 Subject: [PATCH 28/44] Fix data file name --- benchmarks/sv.run.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/sv.run.jl b/benchmarks/sv.run.jl index 085d538bae..27e03edf3b 100644 --- a/benchmarks/sv.run.jl +++ b/benchmarks/sv.run.jl @@ -6,7 +6,7 @@ TPATH = Pkg.dir("Turing") include(TPATH*"/example-models/nips-2017/sv.model.jl") using HDF5, JLD -sv_data = load(TPATH*"/example-models/nips-2017/sv-data.jld")["data"] +sv_data = load(TPATH*"/example-models/nips-2017/sv-data.jld.data")["data"] model_f = sv_model(data=sv_data[1]) sample_n = 5000 From 6d546f6dfb6a1a259916720e87c8fecaffa857e1 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Tue, 13 Jun 2017 14:44:38 +0100 Subject: [PATCH 29/44] Add commit to file name --- benchmarks/benchmarkhelper.jl | 10 ++++++++++ benchmarks/lda.run.jl | 4 ++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/benchmarks/benchmarkhelper.jl b/benchmarks/benchmarkhelper.jl index f27ea664f9..437126c449 100644 --- a/benchmarks/benchmarkhelper.jl +++ b/benchmarks/benchmarkhelper.jl @@ -75,3 +75,13 @@ log2str(logd::Dict, monitor=[]) = begin end print_log(logd::Dict, monitor=[]) = print(log2str(logd, monitor)) + +send_log(logd::Dict, monitor=[]) = begin + log_str = log2str(logd, monitor) + dir_old = pwd() + cd(Pkg.dir("Turing")) + commit_str = replace(split(readstring(pipeline(`git show --summary `, `grep "commit"`)), " ")[2], "\n", "") + cd(dir_old) + time_str = "$(Dates.format(now(), "dd-u-yyyy-HH-MM-SS"))" + post("http://80.85.86.210:1110"; files = [FileParam(log_str, "text","upfile","benchmark-$time_str-$commit_str-$(logd["name"]).txt")]) +end diff --git a/benchmarks/lda.run.jl b/benchmarks/lda.run.jl index 118f649b47..4ac1fdaba0 100644 --- a/benchmarks/lda.run.jl +++ b/benchmarks/lda.run.jl @@ -15,12 +15,12 @@ setchunksize(60) #for alg in ["HMC(2000, 0.25, 10)", "HMCDA(1000, 0.65, 1.5)", "NUTS(2000, 1000, 0.65)"] -for (modelc, modeln) in zip(["ldamodel_vec", "ldamodel"], ["LDA vec", "LDA"]) +for (modelc, modeln) in zip(["ldamodel_vec", "ldamodel"], ["LDA-vec", "LDA"]) bench_res = tbenchmark("HMC(2000, 0.025, 10)", modelc, "data=ldastandata[1]") bench_res[4].names = ["phi[1]", "phi[2]"] logd = build_logd(modeln, bench_res...) logd["stan"] = lda_stan_d logd["time_stan"] = lda_time print_log(logd) - post("http://80.85.86.210:1110"; files = [FileParam(log2str(logd), "text","upfile","benchmark-$(Dates.format(now(), "dd-u-yyyy-HH-MM-SS"))-$modelc.txt")]) + send_log(logd) end From 630ee795205488bf186f775fabc94d91f0d37c28 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Tue, 13 Jun 2017 16:44:03 +0100 Subject: [PATCH 30/44] Try use Julia 5.0 for appveyor --- appveyor.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index d7accc7576..1f12d6d2c0 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,11 +1,11 @@ environment: matrix: - - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.5/julia-0.5.2-win64.exe" + - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.5/julia-0.5.0-win64.exe" MINGW_DIR: mingw64 # MINGW_URL: https://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win64/Personal%20Builds/mingw-builds/5.3.0/threads-win32/seh/x86_64-5.3.0-release-win32-seh-rt_v4-rev0.7z/download MINGW_URL: http://mlg.eng.cam.ac.uk/hong/x86_64-5.3.0-release-win32-seh-rt_v4-rev0.7z MINGW_ARCHIVE: x86_64-5.3.0-release-win32-seh-rt_v4-rev0.7z - - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x86/0.5/julia-0.5.2-win32.exe" + - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x86/0.5/julia-0.5.0-win32.exe" MINGW_DIR: mingw32 # MINGW_URL: https://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win32/Personal%20Builds/mingw-builds/5.3.0/threads-win32/dwarf/i686-5.3.0-release-win32-dwarf-rt_v4-rev0.7z/download MINGW_URL: http://mlg.eng.cam.ac.uk/hong/i686-5.3.0-release-win32-dwarf-rt_v4-rev0.7z From 8dc4956572693f1ea3b918d9df1ac584fcddc121 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Tue, 13 Jun 2017 20:53:59 +0100 Subject: [PATCH 31/44] Try fixing appveyor --- test/utility.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/test/utility.jl b/test/utility.jl index c1c863434d..61bf7743ca 100644 --- a/test/utility.jl +++ b/test/utility.jl @@ -3,6 +3,7 @@ using Turing using Turing: checkindex, setval!, updategid!, acclogp!, vectorize, CACHERESET, VarInfo, VarName, Sampler +println("[test/utility.jl] packages loaded") randr(vi::VarInfo, vn::VarName, dist::Distribution, count::Bool) = begin vi.index = count ? vi.index + 1 : vi.index From 19faedc019a826801fc9fa2d864766fbcd9a8e27 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Tue, 13 Jun 2017 21:38:10 +0100 Subject: [PATCH 32/44] Try fixing appveyor --- test/utility.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/test/utility.jl b/test/utility.jl index 61bf7743ca..09c49c5537 100644 --- a/test/utility.jl +++ b/test/utility.jl @@ -1,6 +1,7 @@ # Helper functions used by tarce.jl and varinfo.jl # TODO: can we somehow update the tests so that we can remove these two functions below? +println("[test/utility.jl] in utility.jl now") using Turing using Turing: checkindex, setval!, updategid!, acclogp!, vectorize, CACHERESET, VarInfo, VarName, Sampler println("[test/utility.jl] packages loaded") From 78d2657adb5c1b88c2427a21986158214af4fc7e Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Tue, 13 Jun 2017 21:38:34 +0100 Subject: [PATCH 33/44] Smaller N for SV --- benchmarks/sv.run.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/sv.run.jl b/benchmarks/sv.run.jl index 27e03edf3b..5c9c67475e 100644 --- a/benchmarks/sv.run.jl +++ b/benchmarks/sv.run.jl @@ -9,7 +9,7 @@ using HDF5, JLD sv_data = load(TPATH*"/example-models/nips-2017/sv-data.jld.data")["data"] model_f = sv_model(data=sv_data[1]) -sample_n = 5000 +sample_n = 500 setchunksize(550) chain_nuts = sample(model_f, NUTS(sample_n, 0.65)) From ca8303ecabb035902c93087e59fd4264ce4d6a04 Mon Sep 17 00:00:00 2001 From: Hong Ge Date: Wed, 14 Jun 2017 10:01:35 +0200 Subject: [PATCH 34/44] Update appveyor.yml --- appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index 1f12d6d2c0..1a9db2af11 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -20,7 +20,7 @@ notifications: cache: # Cache large downloads to avoid network unreliability - "%MINGW_ARCHIVE%" - - C:\Users\appveyor\.julia\v0.5 + #- C:\Users\appveyor\.julia\v0.5 install: - ps: "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12" From 205b407ec9c032af80eac674a0c326675e81bba5 Mon Sep 17 00:00:00 2001 From: Hong Ge Date: Wed, 14 Jun 2017 10:15:16 +0200 Subject: [PATCH 35/44] Faster LDA (#275) --- example-models/stan-models/lda.model.jl | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/example-models/stan-models/lda.model.jl b/example-models/stan-models/lda.model.jl index 514d1aba40..c3ab9e98e6 100644 --- a/example-models/stan-models/lda.model.jl +++ b/example-models/stan-models/lda.model.jl @@ -44,11 +44,13 @@ # end phi_dot_theta = [log([dot(map(p -> p[i], phi), theta[m]) for i = 1:V]) for m=1:M] - for n = 1:N - # phi_dot_theta = [dot(map(p -> p[i], phi), theta[doc[n]]) for i = 1:V] - # w[n] ~ Categorical(phi_dot_theta) - Turing.acclogp!(vi, phi_dot_theta[doc[n]][w[n]]) - end + #for n = 1:N + # # phi_dot_theta = [dot(map(p -> p[i], phi), theta[doc[n]]) for i = 1:V] + # # w[n] ~ Categorical(phi_dot_theta) + # Turing.acclogp!(vi, phi_dot_theta[doc[n]][w[n]]) + #end + lp = mapreduce(n->phi_dot_theta[w[n], doc[n]], +, 1:N) + Turing.acclogp!(vi, lp) end @@ -61,7 +63,9 @@ end phi ~ [Dirichlet(beta)] phi_dot_theta = log(phi * theta) - for n = 1:N - Turing.acclogp!(vi, phi_dot_theta[w[n], doc[n]]) - end + #for n = 1:N + # Turing.acclogp!(vi, phi_dot_theta[w[n], doc[n]]) + #end + lp = mapreduce(n->phi_dot_theta[w[n], doc[n]], +, 1:N) + Turing.acclogp!(vi, lp) end From 3c9c6eae80713492b14465be48c52d9ab6cf6ce9 Mon Sep 17 00:00:00 2001 From: Hong Ge Date: Wed, 14 Jun 2017 10:17:09 +0200 Subject: [PATCH 36/44] Minor bugfix in LDA (#275) --- example-models/stan-models/lda.model.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example-models/stan-models/lda.model.jl b/example-models/stan-models/lda.model.jl index c3ab9e98e6..e9c51ae8b5 100644 --- a/example-models/stan-models/lda.model.jl +++ b/example-models/stan-models/lda.model.jl @@ -49,7 +49,7 @@ # # w[n] ~ Categorical(phi_dot_theta) # Turing.acclogp!(vi, phi_dot_theta[doc[n]][w[n]]) #end - lp = mapreduce(n->phi_dot_theta[w[n], doc[n]], +, 1:N) + lp = mapreduce(n->phi_dot_theta[doc[n]][w[n]], +, 1:N) Turing.acclogp!(vi, lp) end From a0bbfd161f6ab2fcc8149cc4434b700d2d0fcf6d Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Wed, 14 Jun 2017 10:56:49 +0100 Subject: [PATCH 37/44] Fix typo in sv.run.jl --- benchmarks/sv.run.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/benchmarks/sv.run.jl b/benchmarks/sv.run.jl index 5c9c67475e..fae062de3a 100644 --- a/benchmarks/sv.run.jl +++ b/benchmarks/sv.run.jl @@ -13,8 +13,8 @@ sample_n = 500 setchunksize(550) chain_nuts = sample(model_f, NUTS(sample_n, 0.65)) -decribe(chain_nuts) +describe(chain_nuts) setchunksize(5) chain_gibbs = sample(model_f, Gibbs(sample_n, PG(50,1,:h), NUTS(1000,0.65,:ϕ,:σ,:μ))) -decribe(chain_gibbs) +describe(chain_gibbs) From 0179ef0910d5ee04ac30e74a5a7dd9daf461c0b6 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Wed, 14 Jun 2017 10:57:14 +0100 Subject: [PATCH 38/44] Try higher Julia version --- appveyor.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index 1a9db2af11..2bd9ecbcdb 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,11 +1,11 @@ environment: matrix: - - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.5/julia-0.5.0-win64.exe" + - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.5/julia-0.5.2-win64.exe" MINGW_DIR: mingw64 # MINGW_URL: https://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win64/Personal%20Builds/mingw-builds/5.3.0/threads-win32/seh/x86_64-5.3.0-release-win32-seh-rt_v4-rev0.7z/download MINGW_URL: http://mlg.eng.cam.ac.uk/hong/x86_64-5.3.0-release-win32-seh-rt_v4-rev0.7z MINGW_ARCHIVE: x86_64-5.3.0-release-win32-seh-rt_v4-rev0.7z - - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x86/0.5/julia-0.5.0-win32.exe" + - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x86/0.5/julia-0.5.2-win32.exe" MINGW_DIR: mingw32 # MINGW_URL: https://sourceforge.net/projects/mingw-w64/files/Toolchains%20targetting%20Win32/Personal%20Builds/mingw-builds/5.3.0/threads-win32/dwarf/i686-5.3.0-release-win32-dwarf-rt_v4-rev0.7z/download MINGW_URL: http://mlg.eng.cam.ac.uk/hong/i686-5.3.0-release-win32-dwarf-rt_v4-rev0.7z From 820bd5c838fc04e3b6084549a8fa2d1cef30d378 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Wed, 14 Jun 2017 15:20:08 +0100 Subject: [PATCH 39/44] Update MoC benchmark --- benchmarks/MoC.run.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/benchmarks/MoC.run.jl b/benchmarks/MoC.run.jl index 2f786fa143..48b7cca18c 100644 --- a/benchmarks/MoC.run.jl +++ b/benchmarks/MoC.run.jl @@ -8,10 +8,11 @@ include(Pkg.dir("Turing")*"/example-models/stan-models/MoC.model.jl") bench_res = tbenchmark("HMCDA(1000, 0.65, 0.3)", "nbmodel", "data=nbstandata[1]") bench_res[4].names = ["phi[1]", "phi[2]", "phi[3]", "phi[4]"] -logd = build_logd("Naive Bayes", bench_res...) +logd = build_logd("Mixture-of-Categorical", bench_res...) include(Pkg.dir("Turing")*"/benchmarks/"*"MoC-stan.run.jl") logd["stan"] = stan_d logd["time_stan"] = nb_time print_log(logd) +send_log(logd) From 775f299431d225ab9c1a24d01b126dc5493e716a Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Wed, 14 Jun 2017 15:24:40 +0100 Subject: [PATCH 40/44] Remove using Turing from utility.jl --- test/utility.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/test/utility.jl b/test/utility.jl index 09c49c5537..a7ebc67b45 100644 --- a/test/utility.jl +++ b/test/utility.jl @@ -2,7 +2,6 @@ # TODO: can we somehow update the tests so that we can remove these two functions below? println("[test/utility.jl] in utility.jl now") -using Turing using Turing: checkindex, setval!, updategid!, acclogp!, vectorize, CACHERESET, VarInfo, VarName, Sampler println("[test/utility.jl] packages loaded") From 32d06f9be5f43cfaed7a2d163cf80351f2da2ff7 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Wed, 14 Jun 2017 17:04:54 +0100 Subject: [PATCH 41/44] Using missing lib --- benchmarks/MoC.run.jl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/benchmarks/MoC.run.jl b/benchmarks/MoC.run.jl index 48b7cca18c..4d25fad53c 100644 --- a/benchmarks/MoC.run.jl +++ b/benchmarks/MoC.run.jl @@ -15,4 +15,7 @@ logd["stan"] = stan_d logd["time_stan"] = nb_time print_log(logd) + +using Requests +import Requests: get, post, put, delete, options, FileParam send_log(logd) From fcb0642a2c08df9942ee9eee43b96fa7667f26f2 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Wed, 14 Jun 2017 22:28:31 +0100 Subject: [PATCH 42/44] Remove randr from utility.jl --- src/samplers/pgibbs.jl | 2 +- test/resample.jl/particlecontainer.jl | 13 +++++---- test/trace.jl/trace.jl | 10 ++++--- test/utility.jl | 42 ++------------------------- test/varinfo.jl/varinfo.jl | 20 ++++++++++++- 5 files changed, 37 insertions(+), 50 deletions(-) diff --git a/src/samplers/pgibbs.jl b/src/samplers/pgibbs.jl index 6a3821da4e..535ac97f00 100644 --- a/src/samplers/pgibbs.jl +++ b/src/samplers/pgibbs.jl @@ -140,7 +140,7 @@ assume{T<:Union{PG,SMC}}(spl::Sampler{T}, dist::Distribution, vn::VarName, _::Va if ~haskey(vi, vn) r = rand(dist) push!(vi, vn, r, dist, spl.alg.gid) - spl.info[:cache_updated] = 0b00 # sanity flag mask for getidcs and getranges + spl.info[:cache_updated] = CACHERESET # sanity flag mask for getidcs and getranges r elseif isnan(vi, vn) r = rand(dist) diff --git a/test/resample.jl/particlecontainer.jl b/test/resample.jl/particlecontainer.jl index feb6c4cf87..271159422c 100644 --- a/test/resample.jl/particlecontainer.jl +++ b/test/resample.jl/particlecontainer.jl @@ -3,22 +3,25 @@ using Turing using Distributions -import Turing: ParticleContainer, weights, resample!, effectiveSampleSize, TraceC, TraceR, Trace, current_trace +import Turing: ParticleContainer, weights, resample!, effectiveSampleSize, TraceC, TraceR, Trace, current_trace, VarName, Sampler global n = 0 +alg = PG(5, 1) +spl = Turing.Sampler(alg) +dist = Normal(0, 1) + function f() global n t = TArray(Float64, 1); t[1] = 0; while true + ct = current_trace() vn = VarName(gensym(), :x, "[$n]", 1) - randr(current_trace().vi, vn, Normal(0,1), true) - n += 1 + Turing.assume(spl, dist, vn, ct.vi); n += 1; produce(0) vn = VarName(gensym(), :x, "[$n]", 1) - randr(current_trace().vi, vn, Normal(0,1), true) - n += 1 + Turing.assume(spl, dist, vn, ct.vi); n += 1; t[1] = 1 + t[1] end end diff --git a/test/trace.jl/trace.jl b/test/trace.jl/trace.jl index 54a5e57096..4085b0bb9a 100644 --- a/test/trace.jl/trace.jl +++ b/test/trace.jl/trace.jl @@ -3,11 +3,13 @@ using Turing using Distributions -import Turing: Trace, TraceR, TraceC, current_trace, fork, fork2, VarName +import Turing: Trace, TraceR, TraceC, current_trace, fork, fork2, VarName, Sampler global n = 0 - +alg = PG(5, 1) +spl = Turing.Sampler(alg) +dist = Normal(0, 1) function f2() global n @@ -16,10 +18,10 @@ function f2() while true ct = current_trace() vn = VarName(gensym(), :x, "[$n]", 1) - randr(ct.vi, vn, Normal(0,1), true); n += 1; + Turing.assume(spl, dist, vn, ct.vi); n += 1; produce(t[1]); vn = VarName(gensym(), :x, "[$n]", 1) - randr(ct.vi, vn, Normal(0,1), true); n += 1; + Turing.assume(spl, dist, vn, ct.vi); n += 1; t[1] = 1 + t[1] end end diff --git a/test/utility.jl b/test/utility.jl index a7ebc67b45..96d6beff03 100644 --- a/test/utility.jl +++ b/test/utility.jl @@ -1,46 +1,8 @@ # Helper functions used by tarce.jl and varinfo.jl -# TODO: can we somehow update the tests so that we can remove these two functions below? -println("[test/utility.jl] in utility.jl now") -using Turing: checkindex, setval!, updategid!, acclogp!, vectorize, CACHERESET, VarInfo, VarName, Sampler -println("[test/utility.jl] packages loaded") - -randr(vi::VarInfo, vn::VarName, dist::Distribution, count::Bool) = begin - vi.index = count ? vi.index + 1 : vi.index - if ~haskey(vi, vn) - r = rand(dist) - push!(vi, vn, r, dist, 0) - r - else - if count checkindex(vn, vi) end - r = vi[vn] - acclogp!(vi, logpdf(dist, r, istrans(vi, vn))) - r - end -end -println("[test/utility.jl] randr() reloaded") - -randr(vi::VarInfo, vn::VarName, dist::Distribution, spl::Sampler, count::Bool) = begin - vi.index = count ? vi.index + 1 : vi.index - if ~haskey(vi, vn) - r = rand(dist) - push!(vi, vn, r, dist, spl.alg.gid) - spl.info[:cache_updated] = CACHERESET # sanity flag mask for getidcs and getranges - r - elseif isnan(vi, vn) - r = rand(dist) - setval!(vi, vectorize(dist, r), vn) - r - else - if count checkindex(vn, vi, spl) end - updategid!(vi, vn, spl) - vi[vn] - end -end -println("[test/utility.jl] randr() reloaded") +using Turing # Helper function for numerical tests - check_numerical(chain::Turing.Chain, symbols::Vector{Symbol}, exact_vals::Vector; eps=0.2) = begin for (sym, val) in zip(symbols, exact_vals) @@ -56,3 +18,5 @@ check_numerical(chain::Turing.Chain, symbols::Vector{Symbol}, exact_vals::Vector end end end + +using Turing: Sampler diff --git a/test/varinfo.jl/varinfo.jl b/test/varinfo.jl/varinfo.jl index cd8226010a..df53c19044 100644 --- a/test/varinfo.jl/varinfo.jl +++ b/test/varinfo.jl/varinfo.jl @@ -1,6 +1,24 @@ using Turing, Base.Test using Turing: uid, cuid, reconstruct, invlink, getvals, step, getidcs, getretain, NULL -include("../utility.jl") +using Turing: VarInfo, VarName, Sampler + +randr(vi::VarInfo, vn::VarName, dist::Distribution, spl::Sampler, count::Bool) = begin + vi.index = count ? vi.index + 1 : vi.index + if ~haskey(vi, vn) + r = rand(dist) + Turing.push!(vi, vn, r, dist, spl.alg.gid) + r + elseif isnan(vi, vn) + r = rand(dist) + Turing.setval!(vi, Turing.vectorize(dist, r), vn) + r + else + if count Turing.checkindex(vn, vi, spl) end + Turing.updategid!(vi, vn, spl) + vi[vn] + end +end + # Test for uid() (= string()) csym = gensym() vn1 = VarName(csym, :x, "[1]", 1) From 6d91745d935502ab60cddb613836595caaa95f7a Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Wed, 14 Jun 2017 23:11:53 +0100 Subject: [PATCH 43/44] Try to fix appveyor. --- test/utility.jl | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/test/utility.jl b/test/utility.jl index 96d6beff03..266aaece1e 100644 --- a/test/utility.jl +++ b/test/utility.jl @@ -1,16 +1,18 @@ # Helper functions used by tarce.jl and varinfo.jl +println("[Turing] in utility.jl") using Turing +println("[Turing] used Turing") # Helper function for numerical tests check_numerical(chain::Turing.Chain, symbols::Vector{Symbol}, exact_vals::Vector; eps=0.2) = begin for (sym, val) in zip(symbols, exact_vals) E = mean(chain[sym]) - print(" $sym = $E ≈ $val (ϵ = $eps) ?") + print(" $sym = $E ≈ $val (eps = $eps) ?") cmp = abs(sum(mean(chain[sym]) - val)) <= eps if cmp - print_with_color(:green, " ✓\n") + print_with_color(:green, "./\n") print_with_color(:green, " $sym = $E, diff = $(abs(E - val))\n") else print_with_color(:red, " X\n") @@ -18,5 +20,7 @@ check_numerical(chain::Turing.Chain, symbols::Vector{Symbol}, exact_vals::Vector end end end +println("[Turing] defined check_numerical") using Turing: Sampler +println("[Turing] used Turing.Sampler") From 95e76e0abd673c90b4ce6b78a91b8902fca32de6 Mon Sep 17 00:00:00 2001 From: Kai Xu Date: Thu, 15 Jun 2017 12:57:00 +0100 Subject: [PATCH 44/44] Remove using Turing from utility.jl --- test/gibbs.jl/gibbs_constructor.jl | 2 +- test/hmcda.jl/hmcda_cons.jl | 1 + test/nuts.jl/nuts_cons.jl | 1 + test/utility.jl | 7 +------ test/varinfo.jl/varinfo.jl | 6 +++--- 5 files changed, 7 insertions(+), 10 deletions(-) diff --git a/test/gibbs.jl/gibbs_constructor.jl b/test/gibbs.jl/gibbs_constructor.jl index ab259a94b9..d8d2c8090b 100644 --- a/test/gibbs.jl/gibbs_constructor.jl +++ b/test/gibbs.jl/gibbs_constructor.jl @@ -29,7 +29,7 @@ end @test length(c4[:s]) == N * (3 + 2) # Test gid of each samplers -g = Sampler(s3) +g = Turing.Sampler(s3) @test g.info[:samplers][1].alg.gid == 1 @test g.info[:samplers][2].alg.gid == 2 diff --git a/test/hmcda.jl/hmcda_cons.jl b/test/hmcda.jl/hmcda_cons.jl index 5c70363c62..e661832a66 100644 --- a/test/hmcda.jl/hmcda_cons.jl +++ b/test/hmcda.jl/hmcda_cons.jl @@ -1,4 +1,5 @@ using Turing +using Turing: Sampler alg = HMCDA(1000, 0.65, 0.75) println(alg) diff --git a/test/nuts.jl/nuts_cons.jl b/test/nuts.jl/nuts_cons.jl index ab353b9950..7492e10e15 100644 --- a/test/nuts.jl/nuts_cons.jl +++ b/test/nuts.jl/nuts_cons.jl @@ -1,4 +1,5 @@ using Turing +using Turing: Sampler alg = NUTS(1000, 200, 0.65) println(alg) diff --git a/test/utility.jl b/test/utility.jl index 266aaece1e..716b39998f 100644 --- a/test/utility.jl +++ b/test/utility.jl @@ -1,11 +1,9 @@ # Helper functions used by tarce.jl and varinfo.jl println("[Turing] in utility.jl") -using Turing -println("[Turing] used Turing") # Helper function for numerical tests -check_numerical(chain::Turing.Chain, symbols::Vector{Symbol}, exact_vals::Vector; +check_numerical(chain, symbols::Vector{Symbol}, exact_vals::Vector; eps=0.2) = begin for (sym, val) in zip(symbols, exact_vals) E = mean(chain[sym]) @@ -21,6 +19,3 @@ check_numerical(chain::Turing.Chain, symbols::Vector{Symbol}, exact_vals::Vector end end println("[Turing] defined check_numerical") - -using Turing: Sampler -println("[Turing] used Turing.Sampler") diff --git a/test/varinfo.jl/varinfo.jl b/test/varinfo.jl/varinfo.jl index df53c19044..f91b838930 100644 --- a/test/varinfo.jl/varinfo.jl +++ b/test/varinfo.jl/varinfo.jl @@ -1,8 +1,8 @@ using Turing, Base.Test using Turing: uid, cuid, reconstruct, invlink, getvals, step, getidcs, getretain, NULL -using Turing: VarInfo, VarName, Sampler +using Turing: VarInfo, VarName -randr(vi::VarInfo, vn::VarName, dist::Distribution, spl::Sampler, count::Bool) = begin +randr(vi::VarInfo, vn::VarName, dist::Distribution, spl::Turing.Sampler, count::Bool) = begin vi.index = count ? vi.index + 1 : vi.index if ~haskey(vi, vn) r = rand(dist) @@ -88,7 +88,7 @@ end # Test the update of group IDs g_demo_f = gdemo() -g = Sampler(Gibbs(1000, PG(10, 2, :x, :y, :z), HMC(1, 0.4, 8, :w, :u))) +g = Turing.Sampler(Gibbs(1000, PG(10, 2, :x, :y, :z), HMC(1, 0.4, 8, :w, :u))) pg, hmc = g.info[:samplers]