Skip to content

Instantly share code, notes, and snippets.

@mhermans
Last active September 5, 2015 21:24
Show Gist options
  • Save mhermans/552325 to your computer and use it in GitHub Desktop.
Save mhermans/552325 to your computer and use it in GitHub Desktop.
# Load required R-libraries
library(OpenMx)
library(psych)
library(polycor)
require(mvtnorm)
# SAS's PROBBNRM in R
# ===================
probbnrm <- function(x, y, r) {
crm <- matrix(c(1, r, r, 1), ncol=2)
pmvnorm(lower=c(-Inf, -Inf), upper=c(x, y), mean=c(0,0), corr=crm)[1]
}
# Green & Yang (2009) macro
# =========================
est.rho.nl <- function(THRESH, LOAD, FACCOR, POLY) {
ntresh <- ncol(THRESH)
ncat <- ntresh + 1
nitem <- nrow(LOAD)
nfact <- ncol(LOAD)
POLYR <- LOAD %*% FACCOR %*% t(LOAD)
diag(POLYR) <- 1 # factor variances are set to 1
DIFFPOLY <- POLY - POLYR
sumnum <- 0
addden <- 0
for (j in 1:nitem) {
for (jp in 1:nitem) {
sumprobn2 <- 0
addprobn2 <- 0
for (c in 1:ntresh) {
for (cp in 1:ntresh) {
sumrvstar <- 0
for (k in 1:nfact) {
for (kp in 1:nfact) {
sumrvstar <- sumrvstar+LOAD[j,k] %*% LOAD[jp,kp] %*% FACCOR[k,kp]
}
}
sumprobn2 <- sumprobn2 + probbnrm(THRESH[j,c], THRESH[jp,cp], sumrvstar)
addprobn2 <- addprobn2 + probbnrm(THRESH[j,c], THRESH[jp,cp], POLY[j,jp])
}
}
sumprobn1 <- 0
sumprobn1p <- 0
for (cc in 1:ntresh) {
sumprobn1 <- sumprobn1 + pnorm(THRESH[j,cc])
sumprobn1p <- sumprobn1p + pnorm(THRESH[jp,cc])
}
sumnum <- sumnum + (sumprobn2 - sumprobn1 * sumprobn1p)
addden <- addden + (addprobn2 - sumprobn1 * sumprobn1p)
}
}
reliab <- sumnum/addden
reliab
}
# Determine number of thresholds
# ==============================
nr.thresholds <- function(X) {
nr.thresh <- nrow(sapply(X,table))# NULL if not all items
# with same nr of categories
nr.thresh-1 # thresholds = cat -1
}
# Calculate the linear cronbachs alpha
# ====================================
calc.alpha.lin <- function(X) {
# calulate the basic alpha, using the implementation
# in the psych-package
alpha(X)$total
}
# Calculate the "ordinal" cronbachs alpha
# =======================================
calc.alpha.nlin <- function(X) {
nthresh <- nr.thresholds(X) # number of thresholds
X.ordinal <- mxFactor(X,levels=1:(nthresh+1)) # make all ordered factors
# calculate the polychoric correlation matrix
R <- hetcor(X.ordinal)$correlations
# calculate alpha on the basis of the polychoric corrl. matrix
alpha(R)$total
}
# CALCULATE LINEAR SEM RELIABILITY
# ================================
calc.rho.lin <- function(X) {
n <- nrow(X) # number of observations
nv <- ncol(X) # number of items
manifest <- names(X) # vector of item labels
X.continous <- X # items treated as continous
m.lin <- mxModel(name="Linear SEM reliability model",
# A matrix for assymetric/regression paths
mxMatrix("Full", nv, 1, values=0.2, free=T, name="A"),
# L: lambda matrix
mxMatrix("Symm", 1, 1, values=1, free=F, name="L"),
# U matrix
mxMatrix("Diag", nv, nv, values=1, free=T, name="U"),
# Covariance matrix R = ALA' + U
mxAlgebra(A %*% L %*% t(A) + U, name="R"),
# rho_raykov = A / A + U (= variance / variance + error)
mxAlgebra(sum(A)^2/(sum(A)^2 + sum(diag2vec(U))), name="rho"),
# maximum likelihood objective is the covariance matrix R
mxMLObjective("R", dimnames = manifest),
# optionally request CI around rho
#mxCI('rho'),
mxData(
# input is the covariance matrix, not the raw [ML, not FIML]
cov(X.continous, use="pairwise.complete.obs"),
type="cov", numObs=n))
m.lin.run <- mxRun(m.lin) # run model
# optionally request CI
#m.lin.run <- mxRun(m.lin, intervals=TRUE)
# extract rho from solution
rho.lin <- mxEval(rho,m.lin.run)[1]
rho.lin
}
# CALCULATE NONLINEAR SEM RELIABILITY
# ===================================
calc.rho.nlin <- function(X) {
n <- nrow(X) # number of observations
nv <- ncol(X) # number of items
manifest <- names(X) # vector of item labels
nfac <- 1 # 1 latent factor (unidimensional)
nthresh <- nr.thresholds(X) # number of thresholds
if (is.null(nthresh)) {
return('Error: items with dissimilar number of categories')
}
X.ordinal <- mxFactor(X,levels=1:(nthresh+1)) # make all ordered factors
# -> non-continous
# construct threshold matrix with starting values, fully free
T.values <- matrix(rep(seq(0,2,length.out=nthresh),nv),ncol=nv)
T.free <- is.na(matrix(ncol=nv,nrow=nthresh))
m.nlin <- mxModel("Nonlinear SEM reliability model",
# loadings matrix L, all freely estimated
mxMatrix("Full", nv, nfac, values=0.2, free=T, name="L"),
# I vector for matrix algebra
mxMatrix("Unit", nv, 1, name="vectorofOnes"),
# means/intercepts matrix M
mxMatrix("Zero", 1, nv, name="M"),
# residuals vector E = I - LL'
mxAlgebra(vectorofOnes - (diag2vec(L %*% t(L))) , name="E"),
# model implied covar matrix sigma = LL' + E
mxAlgebra(L %*% t(L) + vec2diag(E), name="impliedCovs"),
# threshold matrix, all freely estimated
mxMatrix("Full",
name="thresh",
values = T.values,
free = T.free,
dimnames = list(c(), manifest)),
# Full information ML-estimation
mxFIMLObjective(
covariance="impliedCovs",
means="M",
dimnames = manifest,
thresholds="thresh"),
# raw data as input (ordinal, missing possible [FIML])
mxData(observed=X.ordinal, type='raw')
)
m.nlin.run <- mxRun(m.nlin)
# extract model-implied covariance matrix
m.POLY <- mxEval(impliedCovs, m.nlin.run)
# extract loadings matrix
m.LOAD <- mxEval(L, m.nlin.run)
# extract thresholds matrix, invert
m.THRESH <- t(mxEval(thresh, m.nlin.run))
# covariance matrix between latent variables: 1x1 identity matrix
m.FACCOR <- matrix(c(1), nrow=1, byrow=T) # 1 factor/unidimensional
rho.nlin <- unlist(est.rho.nl(m.THRESH, m.LOAD, m.FACCOR, m.POLY))
rho.nlin
}
reliability <- function(X, coefs='') {
results <- list()
if ('rho.lin' %in% coefs) {
results[['rho.lin']] <- calc.rho.lin(X)
}
if ('rho.nlin' %in% coefs) {
results[['rho.nlin']] <- calc.rho.nlin(X)
}
if ('a.lin' %in% coefs) {
results[['a.lin']] <- calc.alpha.lin(X)
}
if ('a.nlin' %in% coefs) {
results[['a.nlin']] <- calc.alpha.nlin(X)
}
results
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment