-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmpl_simple2
117 lines (85 loc) · 3.62 KB
/
mpl_simple2
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
%tic
%clear;
%clc;
%na kanw random ta data kai na apothikeusw ta apotelesmata
%import data
opts = detectImportOptions('shuflenoise.csv','NumHeaderLines',1);
ctg_smoted_noise = readtable('shuflenoise.csv',opts);
ctg_smoted_noise.Properties.VariableNames([1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27]) = {'A','LB','AC','FM','UC','DL','DS','DP','ASTV','MSTV','ALTV','MLTV','Width','Min','Max','Nmax','Nzeros','Mode','Mean','Median','Variance','Tendency','0','1','2','3','status'}
%summary of dataset
summary(ctg_smoted_noise);
% size table (2644 rows, columns)
[r c] = size(ctg_smoted_noise);
% seperation of depentent and independent variables
Xsmt = table2array(ctg_smoted_noise(:,2:26));
size_Xsmt = size(Xsmt,1)
Ysmt = table2array(ctg_smoted_noise(:,27));
%[Xsmt, mu, stddev] = normalize(Xsmt);
%save all variables
dep_variables = ctg_smoted_noise.Properties.VariableNames;
Xsmt_variables = ctg_smoted_noise(:,2:26);
%define classes
count_y0_y1 = tabulate(Ysmt)%
%seperate data into training and test sets using cv partition to account
split = round(size_Xsmt*0.70); %70/30 train/test
rng(123);
% random num_points
seq = randperm(size_Xsmt);
% Split X and y into the same proportion of train and test set
Xsmt_Train = Xsmt(seq(1:split),:);
ysmt_Train = Ysmt(seq(1:split));
Xsmt_Test = Xsmt(seq(split+1:end),:);
ysmt_Test = Ysmt(seq(split+1:end),:);
% %% MultiLayer Perceptron training
%
% trainFcn = 'trainscg';
%
% net = patternnet([5 5]);
% net.layers{1}.transferFcn = 'logsig'; % Sigmoid
% net.layers{2}.transferFcn = 'logsig'; % Sigmoid
% net.layers{3}.transferFcn = 'softmax'
% net.divideMode = 'none'; %training all the data (Xtrain)
% net.performFcn = 'crossentropy';
% % %Combines adaptive learning rate with momentum training (traingdx)
% net.trainFcn='traingdx';
% net.trainParam.lr = 0.5; %Learning Rate
% net.trainParam.epochs = 500;
%
% % %%Training Phase
% [net,tr] = train(net, Xsmt_Train, ysmt_Train);
% view(net)
% save('randomNeural','net'); %Save the Model
%
%
% %% Evaluate model predictions and get confusion matrix
%
% Y_predict = net(Xsmt_Test)
% predicted = round(Y_predict);%round results
% error = gsubtract(ysmt_Test,predicted); %Error Target - Predicted
% Performance = crossentropy(net,ysmt_Test,predicted)
% targetind = vec2ind(ysmt_Test);
% predictind = vec2ind(Y_predict);
% view(net)
% figure, plotconfusion(ysmt_Test,predicted)
% figure, plotroc(ysmt_Test,predicted)
%%%bayes opt
XX = Xsmt_Train;
YY = ysmt_Train;
%Index and partition for second cvpartitioned dataset
cvmlp = cvpartition(size(YY,1), 'Holdout', 1/3);
%
var = [optimizableVariable('networkDepth', [1, 3], 'Type', 'integer');
optimizableVariable('numHiddenNeurons', [1 20], 'Type', 'integer');
optimizableVariable('lr', [0.001 1], 'Transform', 'log');
optimizableVariable('momentum', [0.6 0.95]);
optimizableVariable('trainFcn', {'traingda', 'traingdm', 'traingdx', 'trainscg', 'trainoss'}, 'Type', 'categorical');
optimizableVariable('transferFcn', {'logsig', 'poslin', 'tansig', 'purelin'}, 'Type', 'categorical')];
% % %% train network
%% Optimization Using crossloss (this is a defined function which train the network)
mnfn = @(X)crossloss(XX', YY', cvmlp, X.networkDepth, X.numHiddenNeurons,...
X.lr, X.momentum, X.trainFcn, X.transferFcn);
res = bayesopt(mnfn, var,'IsObjectiveDeterministic', false,...
'AcquisitionFunctionName', 'expected-improvement-plus', ...
'MaxObjectiveEvaluations', 200);
X = bestPoint(res);
disp('Optimised')