Skip to content

Instantly share code, notes, and snippets.

@cshjin
Last active September 10, 2017 01:49
Show Gist options
  • Save cshjin/887c6e9bde5751c118921303c04e5d63 to your computer and use it in GitHub Desktop.
Save cshjin/887c6e9bde5751c118921303c04e5d63 to your computer and use it in GitHub Desktop.
Gradient descent example
%% demonstrate the linear convergence rate of gradient descent
stepsize = 0.05;
x = rand(2, 1);
f_diff = 1;
f = @(x) 1/2*(x(1)^2 + 10 * x(2)^2);
first_grad = @(x) [x(1); 10*x(2)];
iter = 0;
fprintf('ITER \t F_VAL \t F_VAL_U \t F_DIFF \n');
% fprintf('ITER \t\t F_VAL \t F_VAL_U \t F_DIFF \t F_GRAD \t F_GRAD_U \n');
fprintf('------------------------------------------------------------\n');
f_diff_hist = [];
while f_diff > 10^-15
iter = iter + 1;
x_u = x - stepsize * first_grad(x);
f_val = f(x);
f_val_u = f(x_u);
f_grad = first_grad(x);
f_grad_u = first_grad(x_u);
f_diff = abs(f_val - f_val_u);
f_diff_hist = [f_diff_hist, f_diff];
fprintf('%d \t %f \t %f \t %f \n', iter, f_val, f_val_u, f_diff);
x = x_u;
end
disp(x), disp(f_diff);
semilogy(f_diff_hist, '.')
stepsize = 0.55;
x = rand();
tol = 1;
f = @(x) (x^2 - 1);
first_grad = @(x) (2*x);
iter = 0;
fprintf('ITER \t F_VAL \t F_VAL_U \t F_DIFF \t F_GRAD \t F_GRAD_U \n');
fprintf('------------------------------------------------------------\n');
while tol > 10^-15
iter = iter + 1;
x_u = x - stepsize * first_grad(x);
f_val = f(x);
f_val_u = f(x_u);
f_diff = abs(f_val - f_val_u);
f_grad = first_grad(x);
f_grad_u = first_grad(x_u);
tol = f_diff;
fprintf('%d \t %f \t %f \t %f \t %f \t %f\n', iter, f_val, f_val_u, f_diff, f_grad, f_grad_u);
x = x_u;
end
disp(x), disp(tol);
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment