Data from Adam Feldman's Empirical SCOTUS blog: https://empiricalscotus.com/2016/05/26/interruptions/ https://empiricalscotus.com/2016/05/01/oral-arguments-2015/
as analysed at StatsChat: http://www.statschat.org.nz/2016/05/29/ima-let-you-finish/
library(FNN) | |
x1<-runif(5e4) | |
x2<-matrix(runif(5e4*2),ncol=2) | |
x10<-matrix(runif(5e4*10),ncol=10) | |
x100<-matrix(runif(5e4*100),ncol=100) | |
system.time(d1<-knn.dist(x1,k=1)) | |
system.time(d2<-knn.dist(x2,k=1)) | |
system.time(d10<-knn.dist(x10,k=1)) |
library(hextri) | |
library(survey) | |
data(api) | |
## all hexes same orientation: looks as if middle schools went up more | |
with(apipop,hextri(api99,api00,stype,c("orange","forestgreen","purple"),nbins=20,diffuse=FALSE, | |
xlab="1999 Academic Performance Index",ylab="2000 Academic Performance Index",style="size")) | |
legend("topleft",fill=c("orange","purple","forestgreen"),legend=c("Elementary","Middle","High"),bty="n") | |
## random orientation: less pretty, but you can see middle schools went up less |
rock<-read.csv("~/Downloads/output.csv", as.is=TRUE, header=FALSE, col.names=c("datetime","song","group")) | |
rock$stime<-substr(rock$datetime,5,9) | |
rock$timeangle<-with(rock, as.numeric(substr(stime,1,2))*2*pi/24+as.numeric(substr(stime,4,5))*2*pi/24/60) | |
rock$ty<-cos(rock$timeangle) | |
rock$tx<-sin(rock$timeangle) | |
rock$txmean<-mean(rock$tx) | |
rock$tymean<-mean(rock$ty) |
"+.bentime"<-function(e1,e2){ | |
e<-list(hour=e1$hour+e2$hour,min=e1$min+e2$min,sec=e1$sec+e2$sec) | |
sec_overflow<-e$sec>60L | |
e$sec<-e$sec %% 60L | |
e$min<-e$min+sec_overflow | |
min_overflow<-e$min>60L | |
e$hour<-e$hour+min_overflow | |
e$min<-e$min %% 60L | |
class(e)<-"bentime" |
hexmap<-function(x,y,id,...){ | |
xcent<-x*sqrt(3) - (y%%2) *sqrt(3)/2 | |
ycent<-y*1.5 | |
plot(x,y,type="n",ylim=range(ycent)+c(-2,2),xlim=range(xcent)+c(-2,2)) | |
for(i in 1:length(x)){ | |
polygon(hex_x+xcent[i],hex_y+ycent[i]) | |
text(xcent[i],ycent[i],id[i],cex=0.4) | |
} |
Data from Adam Feldman's Empirical SCOTUS blog: https://empiricalscotus.com/2016/05/26/interruptions/ https://empiricalscotus.com/2016/05/01/oral-arguments-2015/
as analysed at StatsChat: http://www.statschat.org.nz/2016/05/29/ima-let-you-finish/
Data from the New Zealand Department of Corrections, provided to Mark Hanna under the Official Information Act
Original documents available at https://fyi.org.nz/request/3926-strip-searches-in-new-zealand-prisons#incoming-13386
gravity<-mks(c(3.7,8.9,9.8,3.7,23.1,9.0,8.7,11.0,0.6), m=1, kg=0, s=-2) | |
density<-mks(c(5427,5243,5515,3933,1326,687,1270,1638,2390), m=-3,kg=1,s=0) | |
speed.of.light<-fff(1.8026175e12,fur=1,fir=0,ftn=-1) | |
setMethod("mean","mks",function(x,...){ | |
u<-getUnits(x) | |
mks(mean(getValues(x)),u[1],u[2],u[3]) | |
}) |
pal<-function(mat) rgb(mat[,1],mat[,2],mat[,3],maxColorValue=255) | |
showpal<-function(pal) { | |
n<-length(pal) | |
image(1:n, 1, as.matrix(1:n), col = pal, | |
xlab = deparse(substitute(pal)), ylab = "", xaxt = "n", yaxt = "n", | |
bty = "n") | |
} |
This is a model for NZ road deaths per billion km travelled, using data complied by Sam Warburton (@economissive on Twitter)
The model has an underlying smoothed trend which is a random walk with Gaussian increments, and a year-specific deviation that is also Gaussian. That is, it's a state space model with a random walk in the latent variable and a measurement model that's a Poisson-logNormal mixture to get overdispersed counts.
The variances of the random walk steps and the year-specific deviations have diffuse hyperpriors
and the means are set up so that the mean of the exponential of the increment is 1 -- ie, so mu
is genuinely the trend
in means.