Created
December 23, 2013 02:59
-
-
Save rpietro/8091169 to your computer and use it in GitHub Desktop.
Comments on chapter 2 of bayesian computation with R
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#################################### | |
# Section 2.3 Using a Discrete Prior | |
#################################### | |
install.packages('LearnBayes') | |
library(LearnBayes) | |
p = seq(0.05, 0.95, by = 0.1) # vector of proportion values | |
prior = c(1, 5.2, 8, 7.2, 4.6, 2.1, 0.7, 0.1, 0, 0) | |
prior = prior/sum(prior) # vector of prior probabilities | |
plot(p, prior, type = "h", ylab="Prior Probability") | |
data = c(11, 16) # 11 out of 27 students sleep a sufficient number of hours | |
post = pdisc(p, prior, data) #Posterior distribution for a proportion with discrete priorsround(cbind(p, rior, post),2)library(lattice) | |
PRIOR=data.frame("prior",p,prior) | |
POST=data.frame("posterior",p,post) | |
names(PRIOR)=c("Type","P","Probability") | |
names(POST)=c("Type","P","Probability") | |
data=rbind(PRIOR,POST) | |
xyplot(Probability~P|Type,data=data,layout=c(1,2),type="h",lwd=3,col="black") | |
################################ | |
# Section 2.4 Using a Beta Prior | |
############################# | |
library(LearnBayes) | |
(quantile2=list(p=.9,x=.5)) # list with components p = the value of the first probability (median value), and x = the value of the first quantile () | |
quantile1 = list(p=.5,x=.3) | |
# ?beta.select | |
(ab = beta.select(quantile1,quantile2)) #Selection of Beta Prior Given Knowledge of Two Quantiles | |
(a = ab[1]) | |
b = ab[2] | |
s = 11 # 11 successes out of 27 students who sleep a sufficient number of hours | |
f = 16 # 16 failures out of 27 students who do not sleep a sufficient number of hours | |
curve(dbeta(x,a+s,b+f), from=0, to=1, xlab="p",ylab="Density",lty=1,lwd=4) | |
curve(dbeta(x,s+1,f+1),add=TRUE,lty=2,lwd=4) | |
curve(dbeta(x,a,b),add=TRUE,lty=3,lwd=4) | |
legend(.7,4,c("Prior","Likelihood","Posterior"), | |
lty=c(3,2,1),lwd=c(3,3,3)) | |
1 - pbeta(0.5, a + s, b + f) | |
qbeta(c(0.05, 0.95), a + s, b + f) | |
ps = rbeta(1000, a + s, b + f) | |
windows() | |
hist(ps,xlab="p") | |
sum(ps >= 0.5)/1000 | |
quantile(ps, c(0.05, 0.95)) | |
##################################### | |
# Section 2.5 Using a Histogram Prior | |
##################################### | |
library(LearnBayes) | |
midpt = seq(0.05, 0.95, by = 0.1) | |
prior = c(1, 5.2, 8, 7.2, 4.6, 2.1, 0.7, 0.1, 0, 0) | |
prior = prior/sum(prior) | |
curve(histprior(x,midpt,prior), from=0, to=1, | |
ylab="Prior density",ylim=c(0,.3)) | |
#the likelihood density for a binomial density is given by beta(s+1, f+1) density, where s = number of successes and f = number of failures | |
s = 11 | |
f = 16 | |
curve(histprior(x,midpt,prior) * dbeta(x,s+1,f+1), | |
from=0, to=1, ylab="Posterior density") | |
p = seq(0, 1, length=500) | |
post = histprior(p, midpt, prior) * | |
dbeta(p, s+1, f+1) | |
post = post/sum(post) | |
ps = sample(p, replace = TRUE, prob = post) | |
hist(ps, xlab="p", main="") | |
######################## | |
# Section 2.6 Prediction | |
######################## | |
library(LearnBayes) | |
p = seq(0.05, 0.95, by=.1) | |
prior = c(1, 5.2, 8, 7.2, 4.6, 2.1, 0.7, 0.1, 0, 0) | |
prior = prior/sum(prior) | |
m = 20 # size of future binomial sample | |
ys = 0:20 # vector of number of successes for future binomial experiment | |
pred = pdiscp(p, prior, m, ys) # Predictive distribution for a binomial sample with a discrete prior | |
cbind(0:20,pred) | |
ab=c(3.26, 7.19) # beta parameters | |
m=20; ys=0:20 | |
pred=pbetap(ab, m, ys) #Predictive distribution for a binomial sample with a beta prior | |
p = rbeta(1000,3.26, 7.19) # random generation for the beta distribution | |
y = rbinom(1000, 20, p) | |
table(y) | |
freq=table(y) | |
ys=as.integer(names(freq)) | |
predprob=freq/sum(freq) | |
plot(ys,predprob,type="h",xlab="y", | |
ylab="Predictive Probability") | |
dist=cbind(ys,predprob) | |
covprob=.9 | |
discint(dist,covprob) # Highest probability interval for a discrete distribution |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment