-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathTestGradientAlgorithms.m
executable file
·62 lines (47 loc) · 1.72 KB
/
TestGradientAlgorithms.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
clc
clear All
format long
%//////////////////////////////////////////////////
%// Variables for the Gradient based Algorithms: //
%// ============================================ //
%//////////////////////////////////////////////////
% Step size
h = 0.0001;
% Tolerance
tol = 0.0001;
% Absolut maximum number of iterations for the algorithm (termination criterion)
maxiter = 10.^5;
% Starting point x0
x0 = [-6; -3];
% Step size damping
d = 0.0001;
% Experience-Parameter for the variable step size
sigma = 0.5;
% Experience-Parameter for the variable step size
q = 0.6;
% Experience-Parameter
delta = 0.5;
% Experience-Parameter
ro = 0.9;
%////////////////////////////////////////////
%// Functions to test the Algorithms with: //
%// ====================================== //
%////////////////////////////////////////////
fun1 = TestFunctions.rosenbrock(x0);
fun2 = TestFunctions.himmelblau(x0);
fun4 = TestFunctions.normalMin(x0);
fun5 = TestFunctions.saddle(x0);
fun6 = TestFunctions.valley(x0);
fun7 = TestFunctions.towel(x0);
fun8 = TestFunctions.eggcrate(x0);
fun9 = TestFunctions.ackley(x0);
%////////////////////////////////////////
%// Gradient based Algorithms to Test: //
%// ================================== //
%////////////////////////////////////////
SGD(fun4, x0, h, d, tol, maxiter, true, range)
% GradientDescentVariable(fun3, x0, h, sigma, q, tol, maxiter, true, range)
% NewtonProcedure(fun7, x0, h, tol, maxiter, true, range)
% NewtonProcedureSubdued(fun7, x0, h, sigma, q, tol, maxiter, true, range)
% NewtonProcedureSimplified(fun7, x0, h, tol, maxiter, true, range)
% NewtonProcedureGlobal(fun4, x0, h, delta, ro, sigma, q, tol, maxiter, true, range)