Skip to content

Instantly share code, notes, and snippets.

@SteveBronder
Created October 28, 2020 03:20
Show Gist options
  • Save SteveBronder/e14ad2588fa433ba59dfb9b3810e661f to your computer and use it in GitHub Desktop.
Save SteveBronder/e14ad2588fa433ba59dfb9b3810e661f to your computer and use it in GitHub Desktop.
// Code generated by stanc 3ce704a2
#include <stan/model/model_header.hpp>
namespace bernoulli_model_namespace {
inline void validate_positive_index(const char* var_name, const char* expr,
int val) {
if (val < 1) {
std::stringstream msg;
msg << "Found dimension size less than one in simplex declaration"
<< "; variable=" << var_name << "; dimension size expression=" << expr
<< "; expression value=" << val;
std::string msg_str(msg.str());
throw std::invalid_argument(msg_str.c_str());
}
}
inline void validate_unit_vector_index(const char* var_name, const char* expr,
int val) {
if (val <= 1) {
std::stringstream msg;
if (val == 1) {
msg << "Found dimension size one in unit vector declaration."
<< " One-dimensional unit vector is discrete"
<< " but the target distribution must be continuous."
<< " variable=" << var_name << "; dimension size expression=" << expr;
} else {
msg << "Found dimension size less than one in unit vector declaration"
<< "; variable=" << var_name << "; dimension size expression=" << expr
<< "; expression value=" << val;
}
std::string msg_str(msg.str());
throw std::invalid_argument(msg_str.c_str());
}
}
using stan::io::dump;
using stan::math::lgamma;
using stan::model::cons_list;
using stan::model::index_max;
using stan::model::index_min;
using stan::model::index_min_max;
using stan::model::index_multi;
using stan::model::index_omni;
using stan::model::index_uni;
using stan::model::model_base_crtp;
using stan::model::nil_index_list;
using stan::model::rvalue;
using std::istream;
using std::pow;
using std::string;
using std::stringstream;
using std::vector;
using namespace stan::math;
using stan::math::pow;
static int current_statement__ = 0;
static const std::vector<string> locations_array__ = {
" (found before start of program)",
" (in 'examples/bernoulli/bernoulli.stan', line 8, column 2 to column 18)",
" (in 'examples/bernoulli/bernoulli.stan', line 9, column 2 to column 13)",
" (in 'examples/bernoulli/bernoulli.stan', line 10, column 2 to column 22)",
" (in 'examples/bernoulli/bernoulli.stan', line 13, column 2 to column 59)",
" (in 'examples/bernoulli/bernoulli.stan', line 2, column 2 to column 8)",
" (in 'examples/bernoulli/bernoulli.stan', line 3, column 2 to column 8)",
" (in 'examples/bernoulli/bernoulli.stan', line 4, column 9 to column 10)",
" (in 'examples/bernoulli/bernoulli.stan', line 4, column 2 to column 14)",
" (in 'examples/bernoulli/bernoulli.stan', line 5, column 9 to column 10)",
" (in 'examples/bernoulli/bernoulli.stan', line 5, column 12 to column 13)",
" (in 'examples/bernoulli/bernoulli.stan', line 5, column 2 to column 17)",
" (in 'examples/bernoulli/bernoulli.stan', line 8, column 9 to column 10)"};
class bernoulli_model final : public model_base_crtp<bernoulli_model> {
private:
int N;
int M;
Eigen::Matrix<double, -1, 1> y;
Eigen::Matrix<double, -1, -1> x;
public:
~bernoulli_model() final {}
std::string model_name() const final { return "bernoulli_model"; }
std::vector<std::string> model_compile_info() const {
std::vector<std::string> stanc_info;
stanc_info.push_back("stanc_version = stanc3 3ce704a2");
stanc_info.push_back("stancflags = ");
return stanc_info;
}
bernoulli_model(stan::io::var_context& context__,
unsigned int random_seed__ = 0,
std::ostream* pstream__ = nullptr)
: model_base_crtp(0) {
using local_scalar_t__ = double;
boost::ecuyer1988 base_rng__
= stan::services::util::create_rng(random_seed__, 0);
(void)base_rng__; // suppress unused var warning
static const char* function__
= "bernoulli_model_namespace::bernoulli_model";
(void)function__; // suppress unused var warning
local_scalar_t__ DUMMY_VAR__(std::numeric_limits<double>::quiet_NaN());
(void)DUMMY_VAR__; // suppress unused var warning
try {
int pos__;
pos__ = std::numeric_limits<int>::min();
pos__ = 1;
current_statement__ = 5;
context__.validate_dims("data initialization", "N", "int",
context__.to_vec());
N = std::numeric_limits<int>::min();
current_statement__ = 5;
N = context__.vals_i("N")[(1 - 1)];
current_statement__ = 6;
context__.validate_dims("data initialization", "M", "int",
context__.to_vec());
M = std::numeric_limits<int>::min();
current_statement__ = 6;
M = context__.vals_i("M")[(1 - 1)];
current_statement__ = 7;
validate_non_negative_index("y", "N", N);
current_statement__ = 8;
context__.validate_dims("data initialization", "y", "double",
context__.to_vec(N));
y = Eigen::Matrix<double, -1, 1>(N);
stan::math::fill(y, std::numeric_limits<double>::quiet_NaN());
{
std::vector<local_scalar_t__> y_flat__;
current_statement__ = 8;
assign(y_flat__, nil_index_list(), context__.vals_r("y"),
"assigning variable y_flat__");
current_statement__ = 8;
pos__ = 1;
current_statement__ = 8;
for (int sym1__ = 1; sym1__ <= N; ++sym1__) {
current_statement__ = 8;
assign(y, cons_list(index_uni(sym1__), nil_index_list()),
y_flat__[(pos__ - 1)], "assigning variable y");
current_statement__ = 8;
pos__ = (pos__ + 1);
}
}
current_statement__ = 9;
validate_non_negative_index("x", "N", N);
current_statement__ = 10;
validate_non_negative_index("x", "M", M);
current_statement__ = 11;
context__.validate_dims("data initialization", "x", "double",
context__.to_vec(N, M));
x = Eigen::Matrix<double, -1, -1>(N, M);
stan::math::fill(x, std::numeric_limits<double>::quiet_NaN());
{
std::vector<local_scalar_t__> x_flat__;
current_statement__ = 11;
assign(x_flat__, nil_index_list(), context__.vals_r("x"),
"assigning variable x_flat__");
current_statement__ = 11;
pos__ = 1;
current_statement__ = 11;
for (int sym1__ = 1; sym1__ <= M; ++sym1__) {
current_statement__ = 11;
for (int sym2__ = 1; sym2__ <= N; ++sym2__) {
current_statement__ = 11;
assign(x,
cons_list(index_uni(sym2__),
cons_list(index_uni(sym1__), nil_index_list())),
x_flat__[(pos__ - 1)], "assigning variable x");
current_statement__ = 11;
pos__ = (pos__ + 1);
}
}
}
current_statement__ = 12;
validate_non_negative_index("betas", "M", M);
} catch (const std::exception& e) {
stan::lang::rethrow_located(e, locations_array__[current_statement__]);
// Next line prevents compiler griping about no return
throw std::runtime_error("*** IF YOU SEE THIS, PLEASE REPORT A BUG ***");
}
num_params_r__ = 0U;
try {
num_params_r__ += M;
num_params_r__ += 1;
num_params_r__ += 1;
} catch (const std::exception& e) {
stan::lang::rethrow_located(e, locations_array__[current_statement__]);
// Next line prevents compiler griping about no return
throw std::runtime_error("*** IF YOU SEE THIS, PLEASE REPORT A BUG ***");
}
}
template <bool propto__, bool jacobian__, typename T__>
inline T__ log_prob(std::vector<T__>& params_r__,
std::vector<int>& params_i__,
std::ostream* pstream__ = nullptr) const {
using local_scalar_t__ = T__;
T__ lp__(0.0);
stan::math::accumulator<T__> lp_accum__;
static const char* function__ = "bernoulli_model_namespace::log_prob";
(void)function__; // suppress unused var warning
stan::io::reader<local_scalar_t__> in__(params_r__, params_i__);
local_scalar_t__ DUMMY_VAR__(std::numeric_limits<double>::quiet_NaN());
(void)DUMMY_VAR__; // suppress unused var warning
try {
Eigen::Matrix<local_scalar_t__, -1, 1> betas;
betas = Eigen::Matrix<local_scalar_t__, -1, 1>(M);
stan::math::fill(betas, DUMMY_VAR__);
current_statement__ = 1;
betas = in__.vector(M);
local_scalar_t__ alpha;
alpha = DUMMY_VAR__;
current_statement__ = 2;
alpha = in__.scalar();
local_scalar_t__ sigma;
sigma = DUMMY_VAR__;
current_statement__ = 3;
sigma = in__.scalar();
current_statement__ = 3;
if (jacobian__) {
current_statement__ = 3;
sigma = stan::math::lb_constrain(sigma, 0, lp__);
} else {
current_statement__ = 3;
sigma = stan::math::lb_constrain(sigma, 0);
}
{
current_statement__ = 4;
lp_accum__.add(normal_id_glm_lpdf<false>(y, x, alpha, betas, sigma));
}
} catch (const std::exception& e) {
stan::lang::rethrow_located(e, locations_array__[current_statement__]);
// Next line prevents compiler griping about no return
throw std::runtime_error("*** IF YOU SEE THIS, PLEASE REPORT A BUG ***");
}
lp_accum__.add(lp__);
return lp_accum__.sum();
} // log_prob()
template <typename RNG>
inline void write_array(RNG& base_rng__, std::vector<double>& params_r__,
std::vector<int>& params_i__,
std::vector<double>& vars__,
bool emit_transformed_parameters__ = true,
bool emit_generated_quantities__ = true,
std::ostream* pstream__ = nullptr) const {
using local_scalar_t__ = double;
vars__.resize(0);
stan::io::reader<local_scalar_t__> in__(params_r__, params_i__);
static const char* function__ = "bernoulli_model_namespace::write_array";
(void)function__; // suppress unused var warning
(void)function__; // suppress unused var warning
double lp__ = 0.0;
(void)lp__; // dummy to suppress unused var warning
stan::math::accumulator<double> lp_accum__;
local_scalar_t__ DUMMY_VAR__(std::numeric_limits<double>::quiet_NaN());
(void)DUMMY_VAR__; // suppress unused var warning
try {
Eigen::Matrix<double, -1, 1> betas;
betas = Eigen::Matrix<double, -1, 1>(M);
stan::math::fill(betas, std::numeric_limits<double>::quiet_NaN());
current_statement__ = 1;
betas = in__.vector(M);
double alpha;
alpha = std::numeric_limits<double>::quiet_NaN();
current_statement__ = 2;
alpha = in__.scalar();
double sigma;
sigma = std::numeric_limits<double>::quiet_NaN();
current_statement__ = 3;
sigma = in__.scalar();
current_statement__ = 3;
sigma = stan::math::lb_constrain(sigma, 0);
for (int sym1__ = 1; sym1__ <= M; ++sym1__) {
vars__.emplace_back(betas[(sym1__ - 1)]);
}
vars__.emplace_back(alpha);
vars__.emplace_back(sigma);
if (logical_negation((primitive_value(emit_transformed_parameters__)
|| primitive_value(emit_generated_quantities__)))) {
return;
}
if (logical_negation(emit_generated_quantities__)) {
return;
}
} catch (const std::exception& e) {
stan::lang::rethrow_located(e, locations_array__[current_statement__]);
// Next line prevents compiler griping about no return
throw std::runtime_error("*** IF YOU SEE THIS, PLEASE REPORT A BUG ***");
}
} // write_array()
inline void transform_inits(const stan::io::var_context& context__,
std::vector<int>& params_i__,
std::vector<double>& vars__,
std::ostream* pstream__) const final {
using local_scalar_t__ = double;
vars__.clear();
vars__.reserve(num_params_r__);
try {
int pos__;
pos__ = std::numeric_limits<int>::min();
pos__ = 1;
Eigen::Matrix<double, -1, 1> betas;
betas = Eigen::Matrix<double, -1, 1>(M);
stan::math::fill(betas, std::numeric_limits<double>::quiet_NaN());
{
std::vector<local_scalar_t__> betas_flat__;
current_statement__ = 1;
assign(betas_flat__, nil_index_list(), context__.vals_r("betas"),
"assigning variable betas_flat__");
current_statement__ = 1;
pos__ = 1;
current_statement__ = 1;
for (int sym1__ = 1; sym1__ <= M; ++sym1__) {
current_statement__ = 1;
assign(betas, cons_list(index_uni(sym1__), nil_index_list()),
betas_flat__[(pos__ - 1)], "assigning variable betas");
current_statement__ = 1;
pos__ = (pos__ + 1);
}
}
double alpha;
alpha = std::numeric_limits<double>::quiet_NaN();
current_statement__ = 2;
alpha = context__.vals_r("alpha")[(1 - 1)];
double sigma;
sigma = std::numeric_limits<double>::quiet_NaN();
current_statement__ = 3;
sigma = context__.vals_r("sigma")[(1 - 1)];
double sigma_free__;
sigma_free__ = std::numeric_limits<double>::quiet_NaN();
current_statement__ = 3;
sigma_free__ = stan::math::lb_free(sigma, 0);
for (int sym1__ = 1; sym1__ <= M; ++sym1__) {
vars__.emplace_back(betas[(sym1__ - 1)]);
}
vars__.emplace_back(alpha);
vars__.emplace_back(sigma_free__);
} catch (const std::exception& e) {
stan::lang::rethrow_located(e, locations_array__[current_statement__]);
// Next line prevents compiler griping about no return
throw std::runtime_error("*** IF YOU SEE THIS, PLEASE REPORT A BUG ***");
}
} // transform_inits()
inline void get_param_names(std::vector<std::string>& names__) const {
names__.clear();
names__.emplace_back("betas");
names__.emplace_back("alpha");
names__.emplace_back("sigma");
} // get_param_names()
inline void get_dims(std::vector<std::vector<size_t>>& dimss__) const final {
dimss__.clear();
dimss__.emplace_back(std::vector<size_t>{static_cast<size_t>(M)});
dimss__.emplace_back(std::vector<size_t>{});
dimss__.emplace_back(std::vector<size_t>{});
} // get_dims()
inline void constrained_param_names(std::vector<std::string>& param_names__,
bool emit_transformed_parameters__ = true,
bool emit_generated_quantities__
= true) const final {
for (int sym1__ = 1; sym1__ <= M; ++sym1__) {
{
param_names__.emplace_back(std::string() + "betas" + '.'
+ std::to_string(sym1__));
}
}
param_names__.emplace_back(std::string() + "alpha");
param_names__.emplace_back(std::string() + "sigma");
if (emit_transformed_parameters__) {
}
if (emit_generated_quantities__) {
}
} // constrained_param_names()
inline void unconstrained_param_names(std::vector<std::string>& param_names__,
bool emit_transformed_parameters__
= true,
bool emit_generated_quantities__
= true) const final {
for (int sym1__ = 1; sym1__ <= M; ++sym1__) {
{
param_names__.emplace_back(std::string() + "betas" + '.'
+ std::to_string(sym1__));
}
}
param_names__.emplace_back(std::string() + "alpha");
param_names__.emplace_back(std::string() + "sigma");
if (emit_transformed_parameters__) {
}
if (emit_generated_quantities__) {
}
} // unconstrained_param_names()
inline std::string get_constrained_sizedtypes() const {
stringstream s__;
s__ << "[{\"name\":\"betas\",\"type\":{\"name\":\"vector\",\"length\":" << M
<< "},\"block\":\"parameters\"},{\"name\":\"alpha\",\"type\":{\"name\":"
"\"real\"},\"block\":\"parameters\"},{\"name\":\"sigma\",\"type\":{"
"\"name\":\"real\"},\"block\":\"parameters\"}]";
return s__.str();
} // get_constrained_sizedtypes()
inline std::string get_unconstrained_sizedtypes() const {
stringstream s__;
s__ << "[{\"name\":\"betas\",\"type\":{\"name\":\"vector\",\"length\":" << M
<< "},\"block\":\"parameters\"},{\"name\":\"alpha\",\"type\":{\"name\":"
"\"real\"},\"block\":\"parameters\"},{\"name\":\"sigma\",\"type\":{"
"\"name\":\"real\"},\"block\":\"parameters\"}]";
return s__.str();
} // get_unconstrained_sizedtypes()
// Begin method overload boilerplate
template <typename RNG>
inline void write_array(RNG& base_rng__,
Eigen::Matrix<double, Eigen::Dynamic, 1>& params_r,
Eigen::Matrix<double, Eigen::Dynamic, 1>& vars,
bool emit_transformed_parameters__ = true,
bool emit_generated_quantities__ = true,
std::ostream* pstream = nullptr) const {
std::vector<double> params_r_vec(params_r.size());
for (int i = 0; i < params_r.size(); ++i)
params_r_vec[i] = params_r(i);
std::vector<double> vars_vec;
std::vector<int> params_i_vec;
write_array(base_rng__, params_r_vec, params_i_vec, vars_vec,
emit_transformed_parameters__, emit_generated_quantities__,
pstream);
vars.resize(vars_vec.size());
for (int i = 0; i < vars.size(); ++i)
vars(i) = vars_vec[i];
}
template <bool propto__, bool jacobian__, typename T_>
inline T_ log_prob(Eigen::Matrix<T_, Eigen::Dynamic, 1>& params_r,
std::ostream* pstream = nullptr) const {
std::vector<T_> vec_params_r;
vec_params_r.reserve(params_r.size());
for (int i = 0; i < params_r.size(); ++i)
vec_params_r.push_back(params_r(i));
std::vector<int> vec_params_i;
return log_prob<propto__, jacobian__, T_>(vec_params_r, vec_params_i,
pstream);
}
inline void transform_inits(
const stan::io::var_context& context,
Eigen::Matrix<double, Eigen::Dynamic, 1>& params_r,
std::ostream* pstream__ = nullptr) const {
std::vector<double> params_r_vec;
std::vector<int> params_i_vec;
transform_inits(context, params_i_vec, params_r_vec, pstream__);
params_r.resize(params_r_vec.size());
for (int i = 0; i < params_r.size(); ++i)
params_r(i) = params_r_vec[i];
}
};
} // namespace bernoulli_model_namespace
using stan_model = bernoulli_model_namespace::bernoulli_model;
#ifndef USING_R
// Boilerplate
stan::model::model_base& new_model(stan::io::var_context& data_context,
unsigned int seed,
std::ostream* msg_stream) {
stan_model* m = new stan_model(data_context, seed, msg_stream);
return *m;
}
#endif
data {
int N;
int M;
vector[N] y;
matrix[N, M] x;
}
parameters {
vector[M] betas;
real alpha;
real<lower=0> sigma;
}
model {
target += normal_id_glm_lpdf(y | x, alpha, betas, sigma);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment