Skip to content

Instantly share code, notes, and snippets.

@hans
Last active June 22, 2018 02:21
Show Gist options
  • Save hans/b447b374e70752eee4bf4e2a23964a40 to your computer and use it in GitHub Desktop.
Save hans/b447b374e70752eee4bf4e2a23964a40 to your computer and use it in GitHub Desktop.
var ps = Categorical({vs: [0.2, 0.4, 0.6, 0.8]})
var ps_skew_left = Categorical({vs: [0.1, 0.2, 0.3, 0.8]})
var vs = Categorical({vs: [2, 4, 6, 8]})
var vs_skew_left = Categorical({vs: [1, 2, 3, 10]})
var Model = function(name, f) {
Object.defineProperty(f, 'name', {value: name})
return f;
};
var score = function(erp, x) {
// for backwards compatibility with webppl < 0.7
//return Math.max(erp.score(null, x), erp.score(x));
// NB: ^ isn't bullet proof
erp.score(x)
}
var expectation = function(erp, prop) {
return sum(map(function(state) {
return Math.exp(score(erp, state)) * ((!!prop) ? state[prop] : state);
}, erp.support()));
};
var KL = function(P, Q, pProp) {
var statesP = P.support();
var statesQ = Q.support();
// TODO: assert that states1 = states2
return sum(map(
function(state) {
var scoreP = P.score(state)
var scoreQ = Q.score(((!!pProp) ? state[pProp] : state));
var probP = Math.exp(scoreP);
// P(i) * log[ P(i) / Q(i) ] = P(i) * [log(P(i) - log(Q(i)))]
// Let \lim_{x \to 0} x \log(x) = 0.
// Otherwise, 0 * -Infinity = NaN.
if (probP === 0) {
return 0;
}
return probP * (scoreP - scoreQ);
},
statesP));
}
/**
* models: potential hypotheses
* X: Experiment sampling function
* Y: Response sampling function
*/
var EIG = function(models, X, Y) {
var mPrior = Enumerate(function() {
var m = uniformDraw(models)
return {name: m.name, func: m}
});
Enumerate(function() {
var x = X();
// wrt the above distribution on responses, what is the posterior distribution on models?
var KLDist = Enumerate(function() {
var y = Y(x);
var mPosterior = Enumerate(function() {
var _m = sample(mPrior), mName = _m.name, mFunc = _m.func;
var ll = mFunc(x, y);
factor(ll);
return {m: _m, ll: ll};
});
var kl = KL(mPosterior, mPrior, 'm');
return {y: y, kl: kl};
});
var EIG = expectation(KLDist, 'kl');
//factor(EIG);
//return (returnKL) ? {x: x, EIG: EIG, KLDist: KLDist} : {x: x, EIG: EIG}
return {x: x, EIG: EIG}
})
}
var fX = function() { return uniformDraw(["p", "v"]) }
var make_fY = function(d_ps, d_vs) {
return function(cardType) {
cardType == "p" ? sample(d_ps) : sample(d_vs)
}
}
var make_mPrior = function(d_ps, d_vs) {
return Enumerate(function() {
var p_a = sample(d_ps)
var v_a = sample(d_vs)
var p_b = sample(d_ps)
var v_b = sample(d_vs)
var eu_a = p_a * v_a
var eu_b = p_b * v_b
var query = eu_a > eu_b
return {m: query, func: function(x, y) {
// TODO not sure this "func" behavior is what EIG wants?
var eu_a = (x == "p" ? y : p_a) * (x == "v" ? y : v_a)
var eu_b = p_b * v_b
return eu_a > eu_b
}}
})
}
var make_aWins = function(d_ps, d_vs) {
var dist = Enumerate(function() {
var p_a = sample(d_ps)
var p_b = sample(d_ps)
var v_a = sample(d_vs)
var v_b = sample(d_vs)
var eu_a = p_a * v_a
var eu_b = p_b * v_b
condition(eu_a > eu_b)
var key = uniformDraw(["p", "v"])
return {key: key, val: key == "p" ? p_a : v_a}
})
return Model("aWins", function(x, y) {
dist.score({key: x, val: y})
})
}
var make_bWins = function(d_ps, d_vs) {
var dist = Enumerate(function() {
var p_a = sample(d_ps)
var p_b = sample(d_ps)
var v_a = sample(d_vs)
var v_b = sample(d_vs)
var eu_a = p_a * v_a
var eu_b = p_b * v_b
condition(eu_b > eu_a)
var key = uniformDraw(["p", "v"])
return {key: key, val: key == "p" ? p_a : v_a}
})
return Model("bWins", function(x, y) {
dist.score({key: x, val: y})
})
}
var eval = function(d_ps, d_vs) {
viz.table(EIG([make_aWins(d_ps, d_vs), make_bWins(d_ps, d_vs)],
fX, make_fY(d_ps, d_vs)))
}
print("no skew")
eval(ps, vs)
print("ps skewed left")
eval(ps_skew_left, vs)
print("vs skewed left")
eval(ps, vs_skew_left)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment