Skip to content

Instantly share code, notes, and snippets.

@kescobo
Last active October 23, 2020 18:12
Show Gist options
  • Save kescobo/9deb667221b3268d6b476d7216851041 to your computer and use it in GitHub Desktop.
Save kescobo/9deb667221b3268d6b476d7216851041 to your computer and use it in GitHub Desktop.
Illegal argument: line must be non-negative
using DataFrames
using Turing
using Distributions
using StatsPlots
using Distances
using LinearAlgebra
using StatsModels
using AbstractGPs, KernelFunctions
df = DataFrame(
subject = repeat(1:5, inner=3),
obese = repeat(rand(Bool, 5), inner=3),
timepoint = [1,2,3,1,3,4,1,2,5,1,4,5,1,3,5],
bug = rand(Beta(0.9, 5), 15),
nutrient = rand(Beta(0.9,5), 15)
)
sqexpkernel(alpha::Real, rho::Real) =
alpha^2 * AbstractGPs.transform(SEKernel(), 1/(rho*sqrt(2)))
# Define model.
@model GPRegression(y, X, jitter=1e-6) = begin
# Priors.
alpha ~ LogNormal(0.0, 0.1)
rho ~ LogNormal(0.0, 1.0)
sigma ~ LogNormal(0.0, 1.0)
# Covariance function.
kernel = sqexpkernel(alpha, rho)
kernel += jitter * WhiteKernel()
# GP (implicit zero-mean).
gp = GP(kernel)
# Sampling Distribution
y ~ gp(X, sigma^2) # add jitter for numerical stability.
end;
gp = GPRegression(df.bug,
Matrix(df[!,[:subject, :obese, :nutrient]]),
Matrix{Float64}(df[!,[:timepoint]]))
@time chain = sample(gp, HMC(0.01, 100), 200)
Status `~/repos/gptool/Project.toml`
[99985d1d] AbstractGPs v0.2.13
[c7e460c6] ArgParse v1.1.0
[336ed68f] CSV v0.7.7 `https://github.com/JuliaData/CSV.jl.git#master`
[324d7699] CategoricalArrays v0.8.3
[a93c6f00] DataFrames v0.21.8
[b4f34e82] Distances v0.10.0
[31c24e10] Distributions v0.23.12
[ec8451be] KernelFunctions v0.8.7
[91a5bcdd] Plots v1.7.3
[3eaba693] StatsModels v0.6.15
[f3b207a7] StatsPlots v0.14.15
[fce5fe82] Turing v0.14.10
[37e2e46d] LinearAlgebra
[56ddb016] Logging
[de0858da] Printf
[10745b16] Statistics
julia> @time chain = sample(gp, HMC(0.01, 100), 200)
ERROR: DimensionMismatch("Inconsistent array dimensions.")
Stacktrace:
[1] loglikelihood
@ ~/.julia/packages/Distributions/HjzA0/src/multivariates.jl:261 [inlined]
[2] observe
@ ~/.julia/packages/DynamicPPL/jOFDR/src/context_implementations.jl:152 [inlined]
[3] _tilde
@ ~/.julia/packages/DynamicPPL/jOFDR/src/context_implementations.jl:109 [inlined]
[4] tilde
@ ~/.julia/packages/DynamicPPL/jOFDR/src/context_implementations.jl:67 [inlined]
[5] tilde_observe(ctx::DynamicPPL.DefaultContext, sampler::DynamicPPL.SampleFromPrior, right::AbstractGPs.FiniteGP{GP{AbstractGPs.ZeroMean{Float64}, ScaledKernel{TransformedKernel{SqExponentialKernel, ScaleTransform{Float64}}, Float64}}, ColVecs{Float64, Matrix{Float64}, SubArray{Float64, 1, Matrix{Float64}, Tuple{Base.Slice{Base.OneTo{Int64}}, Int64}, true}}, Matrix{Float64}}, left::Vector{Float64}, vname::DynamicPPL.VarName{:y, Tuple{}}, vinds::Tuple{}, vi::DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{DynamicPPL.VarName, Int64}, Vector{Distribution}, Vector{DynamicPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64})
@ DynamicPPL ~/.julia/packages/DynamicPPL/jOFDR/src/context_implementations.jl:89
[6] #15
@ ~/repos/gptool/scratch.jl:39 [inlined]
[7] (::var"#15#16")(_rng::Random._GLOBAL_RNG, _model::DynamicPPL.Model{var"#15#16", (:y, :X, :jitter), (:jitter,), , Tuple{Vector{Float64}, Matrix{Float64}, Matrix{Float64}}, Tuple{Float64}}, _varinfo::DynamicPPL.UntypedVarInfo{DynamicPPL.Metadata{Dict{DynamicPPL.VarName, Int64}, Vector{Distribution}, Vector{DynamicPPL.VarName}, Vector{Real}, Vector{Set{DynamicPPL.Selector}}}, Float64}, _sampler::DynamicPPL.SampleFromPrior, _context::DynamicPPL.DefaultContext, y::Vector{Float64}, X::Matrix{Float64}, jitter::Matrix{Float64})
@ Main ./none:0
[8] macro expansion
@ ~/.julia/packages/DynamicPPL/jOFDR/src/model.jl:0 [inlined]
[9] _evaluate
@ ~/.julia/packages/DynamicPPL/jOFDR/src/model.jl:160 [inlined]
[10] evaluate_threadunsafe
@ ~/.julia/packages/DynamicPPL/jOFDR/src/model.jl:130 [inlined]
[11] Model
@ ~/.julia/packages/DynamicPPL/jOFDR/src/model.jl:92 [inlined]
[12] Model
@ ~/.julia/packages/DynamicPPL/jOFDR/src/model.jl:98 [inlined]
[13] VarInfo
@ ~/.julia/packages/DynamicPPL/jOFDR/src/varinfo.jl:110 [inlined]
[14] VarInfo
@ ~/.julia/packages/DynamicPPL/jOFDR/src/varinfo.jl:109 [inlined]
[15] DynamicPPL.Sampler(alg::HMC{Turing.Core.ForwardDiffAD{40}, , AdvancedHMC.UnitEuclideanMetric}, model::DynamicPPL.Model{var"#15#16", (:y, :X, :jitter), (:jitter,), , Tuple{Vector{Float64}, Matrix{Float64}, Matrix{Float64}}, Tuple{Float64}}, s::DynamicPPL.Selector)
@ Turing.Inference ~/.julia/packages/Turing/UsQlw/src/inference/hmc.jl:378
[16] Sampler
@ ~/.julia/packages/Turing/UsQlw/src/inference/hmc.jl:376 [inlined]
[17] sample(rng::Random._GLOBAL_RNG, model::DynamicPPL.Model{var"#15#16", (:y, :X, :jitter), (:jitter,), , Tuple{Vector{Float64}, Matrix{Float64}, Matrix{Float64}}, Tuple{Float64}}, alg::HMC{Turing.Core.ForwardDiffAD{40}, , AdvancedHMC.UnitEuclideanMetric}, N::Int64; kwargs::Base.Iterators.Pairs{Union{}, Union{}, Tuple{}, NamedTuple{, Tuple{}}})
@ Turing.Inference ~/.julia/packages/Turing/UsQlw/src/inference/Inference.jl:164
[18] sample
@ ~/.julia/packages/Turing/UsQlw/src/inference/Inference.jl:164 [inlined]
[19] #sample#1
@ ~/.julia/packages/Turing/UsQlw/src/inference/Inference.jl:154 [inlined]
[20] sample(model::DynamicPPL.Model{var"#15#16", (:y, :X, :jitter), (:jitter,), , Tuple{Vector{Float64}, Matrix{Float64}, Matrix{Float64}}, Tuple{Float64}}, alg::HMC{Turing.Core.ForwardDiffAD{40}, , AdvancedHMC.UnitEuclideanMetric}, N::Int64)
@ Turing.Inference ~/.julia/packages/Turing/UsQlw/src/inference/Inference.jl:154
[21] top-level scope
@ ./timing.jl:192 [inlined]
[22] top-level scope
@ ./REPL[4]:0
[23] eval
@ ./boot.jl:360 [inlined]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment