Last active
August 29, 2015 14:08
-
-
Save faisal-w/9acdeefbddf2bbc6eb72 to your computer and use it in GitHub Desktop.
Implementation of Logistic Regression using Matlab
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
% MyLogisticRegression | |
% | |
% by Faisal Wirakusuma | |
% Implementation of Logistic Regression, inspired by lecture materials from Andrew Ng | |
% http://openclassroom.stanford.edu/MainFolder/DocumentPage.php?course=MachineLearning&doc=exercises/ex4/ex4.html | |
% | |
classdef MyLogisticRegression < handle | |
properties | |
theta; | |
sizem; | |
sizen; | |
%function theta(x), a sigmoid func | |
g = inline('1.0 ./ (1.0 + exp(-z))'); | |
%default iterations parameter | |
iterations = 5; | |
%default learning rate parameter | |
learningrate = 0.1; | |
%convergence indicator J | |
J; | |
%error | |
err = 0; | |
%accuracy | |
acc = 0; | |
%predictedLabels; | |
trainingtime; | |
end | |
methods | |
function this = setlearningrate(lrate) | |
this.learningrate = lrate; | |
end | |
function this = setiterations(iter) | |
this.iterations = iter; | |
end | |
function this = train(this,data,labels) | |
tic | |
%convergence indicator J | |
this.J = zeros(this.iterations, 1); | |
[this.sizem, this.sizen] = size(data); | |
temp_data = [ones(this.sizem,1), data]; | |
%find data and labels | |
label_one = find(labels); | |
label_zero = find(labels==0); | |
%initialize theta | |
this.theta = zeros(this.sizen+1, 1); | |
for i = 1:this.iterations | |
% Hypothesis function h(x) | |
h = this.g(temp_data * this.theta); | |
% Gradient | |
grdnt = (1/this.sizem) .* temp_data' * (h-labels); | |
% Hessian | |
hessian = (1/this.sizem) .* temp_data' * diag(h) * diag(1-h) * temp_data; | |
% Update J to prove convergence | |
this.J(i) = (1/this.sizem) * sum(-labels.*log(h) - (1-labels) .* log(1-h)); | |
% Update theta, calculate partial derivative of gradient | |
% using Newton's method : | |
%this.theta = this.theta - this.learningrate * (hessian\grdnt); | |
gradForGD = 0.0; | |
% using Gradient Descent : | |
for j = 1:this.sizem | |
gradForGD = gradForGD + ( h(j) - labels(j) ) * temp_data(j, :)'; | |
end | |
% gradient = nx1 column vector | |
gradForGD = (1/this.sizem) * gradForGD; | |
this.theta = this.theta - this.learningrate * gradForGD; | |
end | |
this.trainingtime = toc; | |
end%train | |
function guess = test(this, test_data, test_labels) | |
% Do the prediction from test data | |
[test_m test_n] = size(test_data); | |
test_data = [ones(test_m,1), test_data]; | |
% Predicts test data with sigmoid function | |
preds = this.g(test_data*this.theta); | |
% Assign predicted value to labels | |
predictedLabels = zeros(length(preds),1); | |
for i = 1:length(preds) | |
if (preds(i)>0.5) | |
predictedLabels(i) = 1; | |
else | |
predictedLabels(i) = 0; | |
end | |
end | |
% Calculate error and accuracy | |
% sum(y==y')/length(y) | |
this.acc = sum(test_labels==predictedLabels)/length(test_labels); | |
this.err = 1 - this.acc; | |
%guess = results(predictedLabels); | |
%if(exist('labels','var') | |
% guess.addtruelabels(labels); | |
%end | |
% Return the PREDICTIONS | |
guess = predictedLabels; | |
end%test | |
end%methods | |
end%classdef |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment