-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathRELM.m
209 lines (183 loc) · 6.23 KB
/
RELM.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
% Extreme Learning Machine function that takes as an input CVSet, number of
% neurons and type of activation function on hidden layer. As an output it
% returns confusion matrix arr, T2 confusion matrix with labels and t that
% gives info about time of learning the net.
function [arr,T2,t] = RELM(CVSets, nNeurons, activation, C)
% starting variables
rowName = CVSets.ClassesNames;
nValidations = CVSets.NValidations;
allResults = cell(nValidations, 1);
% activation function selection
switch activation
case 'sigmoid'
f = @(x)(1 ./ (1 + exp(-x)));
case 'tanh'
f = @(x)(tanh(x));
case 'relu'
f = @(x)(max(0,x));
case 'rbf'
f = @(x)(radbas(x));
case 'linear'
f = @(x)(x);
case 'swish'
f = @(x)(x ./ (1 + exp(-x)));
case 'ELiSH'
f = @(x)(ELiSH(x));
case 'HardTanH'
f = @(x)(HardTanH(x));
case 'TanhRe'
f = @(x)(TanhRe(x));
case 'ELUs'
f = @(x)(ELUs(x));
case 'Softplus'
f = @(x)(log(1+exp(x)));
case 'LReLU'
f = @(x)(LReLU(x));
case 'BinaryStep'
f = @(x)(BinaryStep(x));
end
% loop throught the all CVSets
for i = 1:nValidations
% init results vector
results = zeros(length(rowName), length(rowName));
% get learning and testing set
learningSet = CVSets.LearningSet{i};
testingSet = CVSets.TestingSet{i};
% start measuring time
tic
% learn ELM network giving values, names, number of neurons,
% activation function f and as a result we get wages w between
% input and hidden layer (random), bias(random) and beta wages
% that are calculated using pseudo-inverse operation
[w,b,beta] = EMLNetLearn(learningSet.Values, learningSet.Names, nNeurons, f, C);
% stop measuring time and save results
t(i) = toc;
%test network iterating throught the testing set, store results
for j = 1:size(testingSet.Values,1)
% test EML network that takes vector of values from testing
% set, learnt EML parameters (wages, bias, beta, number of
% neurons and activation function) and gives classification
% that means number of class that net estimates vector belongs
classification = EMLNetTest(testingSet.Values(j,:)', w, b, beta, nNeurons, f);
% calculations to build confusion matrix
row = find(rowName == testingSet.Names(j));
results(row, classification) = results(row, classification) + 1;
end
% build confusion matrix with labels
rowNameI = strcat(rowName, "_" + i);
allResults{i} = array2table(results,'RowNames',rowNameI,'VariableNames',rowName);
end
% sum up all results from the loop to the one table
arr = makeASummary(allResults);
T2 = array2table(arr,'RowNames',rowName,'VariableNames',rowName);
t = mean(t);
end
% learn EML network giving values, names, number of neurons,
% activation function f and as a result we get wages, bias and beta
% that are EML parameters
function [w, b, beta] = EMLNetLearn(data, names, nNeurons, f, C)
data = data';
N = size(data,2); % number of training samples
d = size(data,1); % length of sample
L = nNeurons; % number of hidden units
% create output layer
uNames = unique(names); % get classes names
[~,~,idxMap] = unique(names); % create map of classes
T = zeros(size(data,2),length(uNames)); % init output layer
for i = 1:size(data,2)
T(i,idxMap(i)) = 1; % fill output layer with classification true results
end
% create random wages between input and hidden layer and random bias
w = unifrnd(-1,1,[d,L]);
b = unifrnd(-1,1,[N,1]);
H = zeros(N,L);
% make calculations in hidden layer using value of activation function f
% of dot product of input vector and random wages increased by bias,
% results are output values from hidden layer
for i=1:N
for j=1:L
H(i,j) = f(dot(w(:,j),data(:,i)) + b(i));
end
end
% minimizing the regularized least squares estimation cost function
beta = H'*inv(eye(N)/C + (H*H'))*T;
end
% test EML network that takes vector of values from testing
% set, learnt EML parameters (wages, bias, beta, number of
% neurons and activation function) and gives classification
% that means number of class that net estimates vector belongs
function classification = EMLNetTest(data, w, b, beta, nNeurons, f)
N = size(data,2); % number of training samples
d = size(data,1); % length of sample
L = nNeurons;%number of hidden units
% make calculations in hidden layer using value of activation function f
% of dot product of input vector and wages increased by bias,
% results are output values from hidden layer
for i=1:N
for j=1:L
H(i,j) = f(dot(w(:,j),data(:,i)) + b(i));
end
end
%output layer
z = H*beta;
z = f(z);
% output layer results
[~,classification] = max(z);
end
%% activation functions
function y = ELiSH(x)
for i = 1:length(x)
if x(i) >= 0
y(i) = x(i) / (1+exp(-x(i)));
else
y(i) = (exp(x(i))-1) / (1+exp(-x(i)));
end
end
end
function y = HardTanH(x)
for i = 1:length(x)
if x(i) < -1
y(i) = -1;
elseif x(i) >= -1 && x(i) <= 1
y(i) = x(i);
else
y(i) = 1;
end
end
end
function y = TanhRe(x)
for i = 1:length(x)
if x(i) > 0
y(i) = x(i);
else
y(i) = tanh(x(i));
end
end
end
function y = ELUs(x)
for i = 1:length(x)
if x(i) > 0
y(i) = x(i);
else
y(i) = exp(x(i))-1;
end
end
end
function y = LReLU(x)
for i = 1:length(x)
if x(i) > 0
y(i) = x(i);
else
y(i) = 0.01*x(i);
end
end
end
function y = BinaryStep(x)
for i = 1:length(x)
if x(i) >= 0
y(i) = 1;
else
y(i) = 0;
end
end
end