Created
September 9, 2017 04:17
-
-
Save mmuratarat/5578a3b9695b6cfd62aa255fd6087612 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
##GRADIENT DESCENT ALGORITHM FOR ONE VARIABLE REGRESSION ANALYSIS | |
#Data is from an online course taught by Andrew Ng of Stanford University | |
#provided by Coursea | |
#https://github.com/ahawker/machine-learning-coursera | |
############## Cevrimici (online) bir kaynaktan veri seti okutma ############## | |
URL_subs <-"https://goo.gl/zEVbcU" | |
data <- read.table(URL_subs, header=FALSE, sep=",") | |
x.test = data[,1] #bagimsiz degisken | |
y.test = data[,2] #bagimli degisken | |
m = length(y.test); # egitim kumesinde bulunan veri noktalarinin sayisi | |
############## Verinin sacilim grafigini cizdirme ############## | |
library(ggplot2) #grafik cizdirmek icin kullanilan en iyi R paketlerden biri | |
data = data.frame(x=x.test, y=y.test) | |
g <- ggplot(data, aes(x=x.test, y=y.test)) + | |
geom_point(alpha=1/3, size=4) + | |
geom_smooth(method="lm", se=F, col="steelblue") + | |
labs(title = "Doğrusal Regresyon – Örnek Veri") | |
g | |
#x girdi degiskenine 1'lerden olusan kolon ekleyelim (Sabit degisken olan | |
#theta_{0}'u hesaplayabilmek icin) | |
x.test <- matrix(c(rep(1,length(x.test)),x.test), ncol=2) | |
head(x.test) | |
############## Gradyan Inis yontemi ############## | |
theta <- c(0, 0) #theta parametresi icin baslangic degerleri | |
iterations <- 1500 # iterasyon sayisi | |
alpha <- 0.01 | |
# Maliyet fonksiyonunu tanimlama | |
CostFunction <- function(X, Y, theta){ | |
m <- length(Y); #number of observations in the model | |
J =0; | |
J <- 1/(2*m) * sum(((X %*% theta) - Y)^2); | |
} | |
# tek degisken icin Gradyan Inis yontemini tanimlama | |
GradientDescent <- function(X, Y, theta, alpha, num_iters){ | |
m <- length(Y) | |
J_history = rep(0, num_iters); | |
for (i in 1:num_iters){ | |
predictions <- X %*% theta; | |
updates = t(X) %*% (predictions - Y); | |
theta = theta - alpha * (1/m) * updates; | |
J_history[i] <- CostFunction(X, Y, theta); | |
} | |
list("theta" = theta, "J_history" = J_history) | |
} | |
#Sonuclar | |
result.test <- GradientDescent(x.test, y.test, theta, alpha, iterations) | |
theta <- result.test$theta | |
theta | |
#Bu yontem ile elde edilen theta degerleri asagidaki gibidir. | |
# [,1] | |
#[1,] -3.630291 #thetea_{0} | |
#[2,] 1.166362 #theta_{1} | |
#Tahmin denklemi y = -3.630291 + 1.166362 x | |
# Elde edilen tahmin denklemini kullanarak tahmini y degerlerini hesaplama ve | |
# grafigini cizdirme | |
data = data.frame(x=x.test[,2], y=y.test, y.hat = x.test%*%theta) | |
ggplot(data, aes(x=x.test[,2], y=y.test, test=y.hat)) + | |
geom_point(alpha=1/3, size=4) + | |
stat_smooth(method = "lm", formula = test~ x, size = 1, se = FALSE, | |
aes(color="Gradyan İniş")) + | |
geom_smooth(method="lm", se=F, aes(color="Eğitim Kümesi")) + | |
scale_colour_manual(name="Method", values=c("red", "steelblue")) + | |
theme(legend.position = "bottom") + | |
labs(title = "Gradyan İniş - Sonuçlar") | |
#Her iterasyon da elde edilen maliyet fonksiyonu degerlerini cizdirme | |
data <- data.frame(x=seq(1, length(result.test$J_history)), | |
y=result.test$J_history) | |
ggplot(data, aes(x=x, y=y)) + | |
geom_line() + | |
labs(title="Gradyan İniş iterasyonları", | |
x="İterasyonlar", y="Maliyet J") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment