-
Notifications
You must be signed in to change notification settings - Fork 1
/
GreyWolfOptimizer.m
111 lines (104 loc) · 2.45 KB
/
GreyWolfOptimizer.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
%[2014]-"Grey wolf optimizer"
% (9/12/2020)
function GWO = GreyWolfOptimizer(fun, classifierList, opts)
% Parameters
lb = 0;
ub = 1;
thres = 0.5;
if isfield(opts,'N'), N = opts.N; end
if isfield(opts,'T'), max_Iter = opts.T; end
if isfield(opts,'thres'), thres = opts.thres; end
% Objective function
% fun = @jFitnessFunction;
% Number of classifiers
num = length(classifierList);
% Initial
X = zeros(N,num);
% for i = 1 : num
% X(i,i) = 1;
% end
% for i = 1 : num
% X(num + 1, i) = 1;
% end
for i = 1:N
for d = 1:num
X(i,d) = lb + (ub - lb) * rand();
end
end
% Fitness
fit = zeros(1,N);
for i = 1:N
fit(i) = fun(X(i,:), thres);
end
% Sort fitness
[~, idx] = sort(fit,'ascend');
% Update alpha, beta & delta
Xalpha = X(idx(1),:);
Xbeta = X(idx(2),:);
Xdelta = X(idx(3),:);
Falpha = fit(idx(1));
Fbeta = fit(idx(2));
Fdelta = fit(idx(3));
% Pre
curve = zeros(1,max_Iter);
curve(1) = Falpha;
t = 2;
% Iterations
while t <= max_Iter
% Coefficient decreases linearly from 2 to 0
a = 2 - t * (2 / max_Iter);
for i = 1:N
for d = 1:num
% Parameter C (3.4)
C1 = 2 * rand();
C2 = 2 * rand();
C3 = 2 * rand();
% Compute Dalpha, Dbeta & Ddelta (3.5)
Dalpha = abs(C1 * Xalpha(d) - X(i,d));
Dbeta = abs(C2 * Xbeta(d) - X(i,d));
Ddelta = abs(C3 * Xdelta(d) - X(i,d));
% Parameter A (3.3)
A1 = 2 * a * rand() - a;
A2 = 2 * a * rand() - a;
A3 = 2 * a * rand() - a;
% Compute X1, X2 & X3 (3.6)
X1 = Xalpha(d) - A1 * Dalpha;
X2 = Xbeta(d) - A2 * Dbeta;
X3 = Xdelta(d) - A3 * Ddelta;
% Update wolf (3.7)
X(i,d) = (X1 + X2 + X3) / 3;
end
% Boundary
XB = X(i,:); XB(XB > ub) = ub; XB(XB < lb) = lb;
X(i,:) = XB;
end
% Fitness
for i = 1:N
% Fitness
fit(i) = fun(X(i,:), thres);
% Update alpha, beta & delta
if fit(i) < Falpha
Falpha = fit(i);
Xalpha = X(i,:);
end
if fit(i) < Fbeta && fit(i) > Falpha
Fbeta = fit(i);
Xbeta = X(i,:);
end
if fit(i) < Fdelta && fit(i) > Falpha && fit(i) > Fbeta
Fdelta = fit(i);
Xdelta = X(i,:);
end
end
curve(t) = Falpha;
% fprintf('\nIteration %d Best (GWO)= %f',t,curve(t))
t = t + 1;
end
% Select features based on selected index
Pos = 1:num;
Sc = Pos((Xalpha > thres) == 1);
fprintf('\nSelected Classifiers %d & best results %f',Sc,Falpha);
% Store results
GWO.sc = Sc;
GWO.nf = length(Sc);
end