Skip to content

Instantly share code, notes, and snippets.

@Laurae2
Last active February 25, 2017 12:26
Show Gist options
  • Save Laurae2/458f545994dea7e0529a89498bf9ce48 to your computer and use it in GitHub Desktop.
Save Laurae2/458f545994dea7e0529a89498bf9ce48 to your computer and use it in GitHub Desktop.
Polynomial Regression with Exploding Gradient in R
# Setting up random matrix
set.seed(11111)
x <- data.frame(a = rnorm(n = 15) * 5,
b = rnorm(n = 15) * 3 + 1,
c = rnorm(n = 15) * 2 + 2)
# Setting up the (perfect) linear relationship
y <- 2 + (x[, 1] * 2) + (x[, 2] * 3) + (x[, 3] * 4) + (x[, 3] ^ 2) + (x[, 1] * x[, 2])
# Setting up polynomial features
columns <- ncol(x)
for (i in 1:columns) {
x[, paste0(colnames(x)[i], "X", colnames(x)[i])] <- x[, i] * x[, i]
for (j in i:columns) {
x[, paste0(colnames(x)[i], "X", colnames(x)[j])] <- x[, i] * x[, j]
}
}
x <- as.matrix(cbind(Intercept = 1, x))
# Assume random null parameters
param <- rep(0, 10)
# Calculate Mean Squared Error cost
cost <- function(x, y, param) {mean(((x %*% param)- y) ^ 2)}
grad <- function(x, y, param) {
gradient <- rep(0, length(param))
pre_sum <- ((x %*% param) - y)
for (i in 1:length(param)) {
# Squared Error = (x - y) ^ 2
# Squared Error Gradient: 2 * (x - y)
gradient[i] <- 2 * mean(pre_sum * x[, i])
}
return(gradient)
}
# Set learning rate (eta)
eta <- 0.20
# Set number of boosting iterations
iters <- 10
# Perform boosting with real-time printing
for(i in 1:iters) {
cat("[", sprintf("%03d", i), "] Cost: ", sprintf("%10.07f", cost(x, y, param)), "\n", sep = "")
param <- param - eta * grad(x, y, param)
}
cat("Final cost: ", sprintf("%10.07f", cost(x, y, param)), "\n", sep = "")
print(param)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment