Skip to content

Instantly share code, notes, and snippets.

View Laurae2's full-sized avatar

Laurae Laurae2

View GitHub Profile
@Laurae2
Laurae2 / much_packages.R
Last active July 1, 2017 15:48
Much R packages
packages <- c("abind", "acepack", "actuar", "ActuDistns", "ada",
"adabag", "addinplots", "ade4",
"ade4TkGUI", "adegraphics", "adehabitatLT", "adehabitatMA", "ADGofTest",
"AER", "AGD", "agricolae", "AICcmodavg", "akima", "alabama",
"AlgDesign", "alphahull", "alr3", "alr4", "amap", "Amelia", "anchors",
"animation", "aod", "aods3", "ape", "aplpack",
"argparse", "arm", "arules", "arulesViz", "ascii",
"assertthat", "AUC", "BaBooN", "backports", "barcode", "bartMachine",
"bartMachineJARs", "base64", "base64enc", "BatchJobs",
"BayesFactor", "bayesplot", "BayesX", "BayesXsrc", "BB", "BBmisc",
@Laurae2
Laurae2 / plotting.R
Created April 26, 2017 09:33
Code used for Medium blog post "Destroying the Myth of number of threads = number of physical cores": https://medium.com/data-design/destroying-the-myth-of-number-of-threads-number-of-physical-cores-762ad3919880
library(plotly)
library(ggplot2)
plot1 <- function(data, title) {
ggplot(data = data, aes(x = Threading, y = Score, fill = Score)) + geom_bar(stat = "identity", color = "black") + geom_label(aes(label = Score, y = mean(Score) * 0.05 + Score, fill = Score), position = "identity", color = "white", fontface = "bold") + scale_fill_gradientn(colours = c("#ff9999", "#33cc33")) + labs(title = title, x = "Threading Mode", y = "Cinebench R15 Score", fill = "Score") + theme_bw()
}
plot2 <- function(data, title) {
ggplot(data = data, aes(x = Threading, y = ScoreScale, fill = ScoreScale)) + geom_bar(stat = "identity", color = "black") + geom_label(aes(label = round(ScoreScale, digits = 2), y = mean(ScoreScale) * 0.05 + ScoreScale, fill = ScoreScale), position = "identity", color = "white", fontface = "bold") + scale_fill_gradientn(colours = c("#ff9999", "#33cc33")) + labs(title = title, x = "Threading Mode", y = "Scaled Cinebench R15 Score (GHz)", fill = "Scale") + theme_bw()
}
@Laurae2
Laurae2 / ssd_svm.R
Created March 1, 2017 11:33
Stochastic Subgradient Descent + Linear SVM example in R
# Setting up random matrix
set.seed(11111)
x <- data.frame(a = rnorm(n = 100) * 5,
b = rnorm(n = 100) * 3 + 1,
c = rnorm(n = 100) * 2 + 2)
# Setting up an imperfect relationship (non-linear problem) with +1 or -1 for classes
y <- 2 * as.integer((2 + (x[, 1] * 2) + (x[, 2] * 3) + (x[, 3] * 4) + (x[, 3] ^ 2) + (x[, 1] * x[, 2])) > 20) - 1
# Setting up polynomial features
@Laurae2
Laurae2 / logistic_regression_EN.R
Created February 27, 2017 23:24
Logistic Regression + Elastic Net Regularizaion example in R
# Setting up random matrix
set.seed(11111)
x <- data.frame(a = rnorm(n = 15) * 5,
b = rnorm(n = 15) * 3 + 1,
c = rnorm(n = 15) * 2 + 2)
# Setting up an imperfect relationship (non-linear problem)
y <- as.integer((2 + (x[, 1] * 2) + (x[, 2] * 3) + (x[, 3] * 4) + (x[, 3] ^ 2) + (x[, 1] * x[, 2])) > 20)
# Setting up polynomial features
@Laurae2
Laurae2 / Linear_regression_simple_GD.R
Created February 26, 2017 12:58
Linear Regression simple gradient descent (brute forced) in R
# Setting up random matrix
set.seed(11111)
x <- data.frame(a = rnorm(n = 15) * 5,
b = rnorm(n = 15) * 3 + 1,
c = rnorm(n = 15) * 2 + 2)
# Setting up the (perfect) linear relationship
y <- 2 + (x[, 1] * 2) + (x[, 2] * 3) + (x[, 3] * 4) + (x[, 3] ^ 2) + (x[, 1] * x[, 2])
# Setting up polynomial features
@Laurae2
Laurae2 / elastic_net.R
Last active February 25, 2017 20:19
Elastic Net Regularizaion example in R
# Setting up random matrix
set.seed(11111)
x <- data.frame(a = rnorm(n = 15) * 5,
b = rnorm(n = 15) * 3 + 1,
c = rnorm(n = 15) * 2 + 2)
# Setting up the (perfect) linear relationship
y <- 2 + (x[, 1] * 2) + (x[, 2] * 3) + (x[, 3] * 4) + (x[, 3] ^ 2) + (x[, 1] * x[, 2])
# Setting up polynomial features
@Laurae2
Laurae2 / L2_regularization_regression.R
Last active February 25, 2017 20:20
L2 Regularizaion Regression example in R
# Setting up random matrix
set.seed(11111)
x <- data.frame(a = rnorm(n = 15) * 5,
b = rnorm(n = 15) * 3 + 1,
c = rnorm(n = 15) * 2 + 2)
# Setting up the (perfect) linear relationship
y <- 2 + (x[, 1] * 2) + (x[, 2] * 3) + (x[, 3] * 4) + (x[, 3] ^ 2) + (x[, 1] * x[, 2])
# Setting up polynomial features
@Laurae2
Laurae2 / L1_regularization_regression.R
Last active February 25, 2017 20:21
L1 Regularizaion Regression example in R
# Setting up random matrix
set.seed(11111)
x <- data.frame(a = rnorm(n = 15) * 5,
b = rnorm(n = 15) * 3 + 1,
c = rnorm(n = 15) * 2 + 2)
# Setting up the (perfect) linear relationship
y <- 2 + (x[, 1] * 2) + (x[, 2] * 3) + (x[, 3] * 4) + (x[, 3] ^ 2) + (x[, 1] * x[, 2])
# Setting up polynomial features
@Laurae2
Laurae2 / stepwise_regression_forward.R
Created February 24, 2017 20:55
Forward Stepwise Regression in R without step/lm
# Setting up random matrix
set.seed(11111)
data <- data.frame(a = rnorm(n = 15) * 5,
b = rnorm(n = 15) * 3 + 1,
c = rnorm(n = 15) * 2 + 2)
# Setting up the (perfect) linear relationship
preds <- 2 + (data[, 1] * 2) + (data[, 2] * 3) + (data[, 3] * 4) + (data[, 3] ^ 2) + (data[, 1] * data[, 2])
# Setting up polynomial features
@Laurae2
Laurae2 / aic_bic.R
Created February 24, 2017 19:55
Akaike Information Criterion and Bayesian Information Criterion
lm_model <- lm(preds ~ ., data = data)
my_AIC <- AIC(lm_model)
my_AIC <- nrow(data) * (log(2 * pi) + 1 + log((sum(lm_model$residuals ^ 2) / nrow(data)))) + ((length(lm_model$coefficients) + 1) * 2)
my_BIC <- AIC(lm_model, k = log(nrow(data)))
my_BIC <- nrow(data) * (log(2 * pi) + 1 + log((sum(lm_model$residuals ^ 2) / nrow(data)))) + ((length(lm_model$coefficients) + 1) * log(nrow(data)))