Skip to content

Commit

Permalink
redid tuning for incode, hist and asci
Browse files Browse the repository at this point in the history
  • Loading branch information
antoineBarbez committed May 31, 2019
1 parent 2d8d12d commit 5e599dc
Show file tree
Hide file tree
Showing 93 changed files with 5,083 additions and 142 deletions.
Binary file modified .DS_Store
Binary file not shown.
3 changes: 2 additions & 1 deletion experiments/study_results/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,5 @@
import neural_networks.hist.detect_feature_envy as hist_fe
import neural_networks.jdeodorant.detect_feature_envy as jdeodorant_fe

import neural_networks.vote.detect as vote
import neural_networks.vote.detect as vote
import neural_networks.asci.predict as asci
11 changes: 10 additions & 1 deletion experiments/study_results/perfs_god_class.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from context import nnUtils, decor, hist_gc, jdeodorant_gc, vote
from context import nnUtils, decor, hist_gc, jdeodorant_gc, vote, asci

import numpy as np

Expand All @@ -17,6 +17,7 @@
overall_prediction_hist = np.empty(shape=[0, 1])
overall_prediction_jd = np.empty(shape=[0, 1])
overall_prediction_vote = np.empty(shape=[0, 1])
overall_prediction_asci = np.empty(shape=[0, 1])

overall_labels = np.empty(shape=[0, 1])
for system in systems:
Expand All @@ -40,6 +41,10 @@
prediction_vote = nnUtils.predictFromDetect('god_class', system, vote.detect('god_class', system))
overall_prediction_vote = np.concatenate((overall_prediction_vote, prediction_vote), axis=0)

# Compute performances for ASCI
prediction_asci = asci.predict('god_class', system)
overall_prediction_asci = np.concatenate((overall_prediction_asci, prediction_asci), axis=0)

# Print performances for the considered system
print(system)
print(' |precision |recall |f_measure')
Expand All @@ -52,6 +57,8 @@
print('-------------------------------------------')
print('Vote |' + "{0:.3f}".format(nnUtils.precision(prediction_vote, labels)) + ' |' + "{0:.3f}".format(nnUtils.recall(prediction_vote, labels)) + ' |' + "{0:.3f}".format(nnUtils.f_measure(prediction_vote, labels)))
print('-------------------------------------------')
print('ASCI |' + "{0:.3f}".format(nnUtils.precision(prediction_asci, labels)) + ' |' + "{0:.3f}".format(nnUtils.recall(prediction_asci, labels)) + ' |' + "{0:.3f}".format(nnUtils.f_measure(prediction_asci, labels)))
print('-------------------------------------------')

print('\n')

Expand All @@ -67,3 +74,5 @@
print('-------------------------------------------')
print('Vote |' + "{0:.3f}".format(nnUtils.precision(overall_prediction_vote, overall_labels)) + ' |' + "{0:.3f}".format(nnUtils.recall(overall_prediction_vote, overall_labels)) + ' |' + "{0:.3f}".format(nnUtils.f_measure(overall_prediction_vote, overall_labels)))
print('-------------------------------------------')
print('ASCI |' + "{0:.3f}".format(nnUtils.precision(overall_prediction_asci, overall_labels)) + ' |' + "{0:.3f}".format(nnUtils.recall(overall_prediction_asci, overall_labels)) + ' |' + "{0:.3f}".format(nnUtils.f_measure(overall_prediction_asci, overall_labels)))
print('-------------------------------------------')
14 changes: 7 additions & 7 deletions experiments/training/train_asci.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ def parse_args():
parser.add_argument("antipattern", help="Either 'god_class' or 'feature_envy'.")
parser.add_argument("test_system", help="The name of the system to be used for testing.\n Hence, the training will be performed using all the systems except this one.")
parser.add_argument("-n_tree", type=int, default=10, help="The number of distinct trees to be trained and saved.")
parser.add_argument("-min_samples_split", type=int, default=5)
parser.add_argument("-max_features", default='log2')
parser.add_argument("-min_samples_split", type=float, default=0.01)
parser.add_argument("-max_features", default=None)
parser.add_argument("-max_depth", type=int, default=None)
parser.add_argument("-min_samples_leaf", type=int, default=2)
parser.add_argument("-min_samples_leaf", type=int, default=1)
return parser.parse_args()

# Build the dataset for asci, i.e., the labels are the indexes of the best tool for each input instance.
Expand All @@ -35,7 +35,7 @@ def parse_args():
# idx = 2: JDeodorant
def build_asci_dataset(antipattern, systems):
# Get real instances and labels
instances, labels = nnUtils.build_dataset(antipattern, systems)
instances, labels = nnUtils.build_dataset(antipattern, systems, True)

# Compute the performances of each tool in order to sort them accordingly
nb_tools = 3
Expand All @@ -46,7 +46,7 @@ def build_asci_dataset(antipattern, systems):
toolsOverallPredictions[i] = np.concatenate((toolsOverallPredictions[i], toolsPredictions[i]), axis=0)

toolsPerformances = [nnUtils.f_measure(pred, labels) for pred in toolsOverallPredictions]

# Indexes of the tools, sorted according to their performances on the training set
toolsSortedIndexes = np.argsort(np.array(toolsPerformances))

Expand All @@ -68,9 +68,9 @@ def build_asci_dataset(antipattern, systems):
# Remove the test system from the training set and build dataset
training_systems.remove(args.test_system)
x_train, y_train = build_asci_dataset(args.antipattern, training_systems)

# Test dataset, note that here y_test contains the real labels while y_train contains tools' indexes
x_test, y_test = nnUtils.build_dataset(args.antipattern, [args.test_system])
x_test, y_test = nnUtils.build_dataset(args.antipattern, [args.test_system], True)
toolsPredictions = asci.getToolsPredictions(args.antipattern, args.test_system)

# Train and compute ensemble prediction on test set
Expand Down
9 changes: 6 additions & 3 deletions experiments/tuning/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@

import utils.nnUtils as nnUtils

import neural_networks.asci.predict as asci
import neural_networks.smad.model as md
import neural_networks.vote.detect as vote
import neural_networks.asci.predict as asci
import neural_networks.smad.model as md
import neural_networks.vote.detect as vote
import neural_networks.incode.detect as incode
import neural_networks.hist.detect_god_class as hist_gc
import neural_networks.hist.detect_feature_envy as hist_fe
Original file line number Diff line number Diff line change
@@ -0,0 +1,201 @@
Max features;Max depth;Min samples leaf;Min samples split;Accuracy
None;20;3;0.00392337327543;0.9942577426339075
None;70;3;0.00415137948766;0.9942308468148626
None;100;1;0.00121996639857;0.9941905030862953
None;10;2;0.00477694847714;0.9941905030862953
None;60;3;0.00115969306478;0.9941770551767727
log2;100;2;0.0012388910772;0.9941770551767727
None;40;5;0.00122544347873;0.9941367114482054
None;100;4;0.00244088298633;0.9941098156291605
None;90;2;0.00591068226943;0.9940829198101154
None;60;1;0.00354835352802;0.9940560239910706
None;20;2;0.00485624370972;0.9940425760815481
None;80;4;0.00180820420956;0.9940291281720257
None;90;2;0.0037083991576;0.9940156802625033
None;20;5;0.00420221098441;0.9939887844434584
None;70;4;0.00487983533242;0.9939887844434582
None;40;4;0.00520644531457;0.9939887844434582
sqrt;50;3;0.00108515208413;0.9939887844434582
None;60;1;0.00300603843205;0.9939753365339358
None;100;4;0.00182410236694;0.9939753365339358
log2;80;5;0.00231224049206;0.9939618886244134
None;40;1;0.00187001480839;0.9939484407148909
None;70;3;0.00501662753613;0.9939349928053685
log2;70;2;0.00322479945644;0.993921544895846
sqrt;100;4;0.00131865079804;0.9939080969863235
log2;10;2;0.00109909713983;0.9939080969863235
None;50;3;0.0019673450169;0.9939080969863235
sqrt;20;3;0.00227413510204;0.9938677532577561
log2;None;2;0.00103308758505;0.993867753257756
log2;10;4;0.00102524668605;0.9938543053482336
None;30;1;0.00333983372034;0.9938274095291887
sqrt;50;5;0.00380724243318;0.9938274095291887
None;60;4;0.00525217173773;0.9938274095291887
sqrt;70;5;0.00238724362088;0.9938139616196663
sqrt;80;4;0.00175817429386;0.9938139616196663
sqrt;None;3;0.00170158001533;0.9938005137101437
log2;60;4;0.00130314857902;0.9938005137101437
None;80;3;0.00277220328056;0.9937870658006213
sqrt;60;3;0.0028170391585;0.9937601699815763
log2;50;1;0.00165346087831;0.9937332741625315
None;70;2;0.00235949515851;0.993719826253009
sqrt;80;1;0.00188297763754;0.993719826253009
sqrt;50;5;0.0010494052027;0.993719826253009
sqrt;40;3;0.00445419193036;0.9937063783434866
None;80;2;0.00216938299016;0.9937063783434864
sqrt;70;5;0.00116227878996;0.9937063783434864
log2;None;1;0.00273957938143;0.993692930433964
log2;90;5;0.00325838163011;0.993692930433964
None;70;3;0.00273292319368;0.9936929304339639
None;20;5;0.00224412943196;0.9936794825244416
None;100;1;0.0018572096612;0.9936660346149191
None;40;1;0.00224580575892;0.9936660346149191
sqrt;80;3;0.00380498226201;0.9936660346149191
log2;80;5;0.00446703978821;0.9936660346149191
log2;100;4;0.00146585722624;0.9936660346149191
sqrt;20;1;0.00267220424968;0.9936391387958742
sqrt;80;1;0.0048573653306;0.9936122429768294
log2;30;2;0.00190429792201;0.9936122429768292
log2;50;3;0.00113849973717;0.9935987950673068
sqrt;90;4;0.00363814052683;0.9935987950673068
sqrt;100;2;0.00657126655655;0.9935853471577843
sqrt;None;1;0.00458924174952;0.9935853471577843
sqrt;20;2;0.00184253796299;0.9935853471577842
sqrt;30;2;0.0554985951232;0.9935718992482618
log2;30;4;0.0801409856068;0.9935718992482618
sqrt;60;5;0.00279975978895;0.9935718992482618
sqrt;10;3;0.0136143793389;0.9935718992482618
log2;70;5;0.0096853902825;0.9935718992482618
log2;30;4;0.00171546069702;0.9935450034292169
sqrt;90;5;0.00426829241967;0.9935315555196945
log2;50;4;0.0182329091487;0.9935315555196945
log2;10;2;0.018739430622;0.9935315555196945
None;50;2;0.0063098261406;0.9935181076101719
log2;60;3;0.0616597463597;0.9935181076101719
sqrt;90;2;0.0176560390633;0.9935181076101719
sqrt;60;1;0.0869667350808;0.9935181076101719
log2;70;1;0.0858161337767;0.9935181076101719
None;30;1;0.0590171156449;0.9935046597006495
None;20;1;0.0274806138515;0.9935046597006495
None;80;2;0.0147259657302;0.9935046597006495
None;10;3;0.0205178447634;0.9935046597006495
None;None;1;0.048128428843;0.9935046597006495
None;90;4;0.0211157443475;0.9935046597006495
log2;30;3;0.0338011015856;0.9935046597006495
None;40;4;0.0276606290669;0.9935046597006495
sqrt;40;5;0.0905025582849;0.9935046597006495
None;80;1;0.0562151318527;0.9935046597006495
sqrt;10;3;0.0127083139509;0.9935046597006495
None;10;1;0.0541854553682;0.9935046597006495
None;100;5;0.0497609679425;0.9935046597006495
log2;70;5;0.0587009257043;0.9935046597006495
None;10;2;0.0562636064637;0.9935046597006495
None;80;1;0.0475001552817;0.9935046597006495
None;50;1;0.0386064580234;0.9935046597006495
sqrt;10;2;0.011895533503;0.9935046597006495
None;40;3;0.0215167225039;0.9935046597006495
None;80;1;0.0769090442149;0.9935046597006495
None;40;3;0.0275906181228;0.9935046597006495
None;10;4;0.0326538210915;0.9935046597006495
log2;40;5;0.0123217029708;0.9935046597006495
None;30;4;0.0298679402774;0.9935046597006495
log2;40;5;0.0194243925516;0.9935046597006495
log2;30;1;0.0328617916308;0.9935046597006495
None;70;4;0.0202526870031;0.9935046597006495
None;50;2;0.0152487874884;0.9935046597006495
sqrt;50;2;0.028919030422;0.9935046597006495
sqrt;100;5;0.0292550389498;0.9935046597006495
None;40;1;0.0654562387198;0.9935046597006495
None;100;5;0.0455729696834;0.9935046597006495
None;10;3;0.066194791414;0.9935046597006495
sqrt;100;4;0.0857734453956;0.9935046597006495
None;10;1;0.0646732080748;0.9935046597006495
None;20;3;0.0162291939874;0.9935046597006495
None;40;2;0.0552152219992;0.9935046597006495
None;20;1;0.0150476044085;0.9935046597006495
log2;20;2;0.0338528425332;0.9935046597006495
None;80;4;0.0987782832848;0.9935046597006495
None;None;2;0.0641185189127;0.9935046597006495
None;40;4;0.0237050172347;0.9935046597006495
None;100;4;0.0951565292562;0.9935046597006495
sqrt;70;2;0.0714487279671;0.9935046597006495
None;90;5;0.0226839264625;0.9935046597006495
None;40;4;0.0328997041525;0.9935046597006495
None;60;4;0.0193038966679;0.9935046597006495
log2;70;2;0.00697356191135;0.9935046597006494
log2;90;3;0.0209289135673;0.9934912117911271
log2;None;3;0.0675838603511;0.9934912117911271
sqrt;None;4;0.0694617081091;0.993491211791127
log2;20;5;0.00930638684086;0.993491211791127
sqrt;20;4;0.0277515008483;0.993491211791127
sqrt;30;1;0.0886286537902;0.993491211791127
sqrt;80;4;0.041077389034;0.993491211791127
log2;50;1;0.0234319941768;0.993491211791127
log2;None;2;0.0728107741898;0.9934777638816046
sqrt;100;5;0.0108069495934;0.9934777638816046
log2;40;3;0.0920319822527;0.9934777638816046
log2;30;2;0.0195269312029;0.9934777638816046
sqrt;30;4;0.0351954307351;0.9934643159720822
sqrt;30;2;0.0133629917614;0.9934643159720822
sqrt;60;3;0.0110457434137;0.9934643159720822
log2;20;3;0.0186372031541;0.9934643159720821
sqrt;100;2;0.0663927094853;0.9934643159720821
log2;50;4;0.0364023282734;0.9934643159720821
sqrt;100;4;0.0982723195595;0.9934643159720821
None;100;5;0.00647076989088;0.9934643159720821
log2;70;3;0.00155253018635;0.9934508680625598
log2;30;4;0.0712718225633;0.9934508680625597
log2;70;4;0.0602578890734;0.9934508680625597
log2;90;2;0.0156604560842;0.9934508680625597
sqrt;90;2;0.00379147259782;0.9934508680625597
sqrt;90;2;0.0260698999586;0.9934374201530373
sqrt;30;1;0.0438059440669;0.9934239722435149
log2;None;2;0.00158742251053;0.9934239722435149
sqrt;None;4;0.00290008048808;0.9934239722435146
sqrt;20;3;0.0163738996902;0.9934239722435146
None;10;1;0.001693928926;0.9934105243339922
log2;70;2;0.00898369994606;0.9934105243339922
sqrt;20;3;0.0116280634477;0.9934105243339922
sqrt;80;4;0.0239458837957;0.9934105243339922
sqrt;40;3;0.0246829233848;0.9934105243339922
log2;60;2;0.0410252529913;0.9933970764244698
log2;10;2;0.00643159487821;0.9933970764244697
sqrt;10;1;0.00849028251372;0.9933970764244697
sqrt;40;5;0.0214515977709;0.9933970764244697
None;60;3;0.0117067791382;0.9933970764244697
log2;50;4;0.0292546284391;0.9933836285149473
log2;70;3;0.0387162090028;0.9933836285149473
sqrt;40;3;0.00817649721061;0.9933701806054249
log2;80;2;0.0237875606089;0.9933701806054249
log2;60;2;0.00575822867305;0.9933701806054249
sqrt;10;2;0.00803523427005;0.9933701806054249
log2;40;5;0.0468930669902;0.9933701806054248
sqrt;30;3;0.0184261518972;0.9933567326959025
log2;80;1;0.00198280621575;0.9933567326959025
sqrt;50;3;0.045855598287;0.9933567326959025
sqrt;50;2;0.0435606998212;0.9933567326959024
log2;80;4;0.0442424876901;0.9933567326959024
sqrt;60;5;0.0102270946447;0.9933567326959024
sqrt;60;3;0.00583216371344;0.99334328478638
None;20;5;0.0117417984026;0.99334328478638
log2;10;1;0.0249246683838;0.99334328478638
sqrt;10;3;0.00784336639625;0.99334328478638
None;40;5;0.00657930798762;0.99334328478638
sqrt;50;3;0.00759143188358;0.9933298368768576
None;30;4;0.0110211567576;0.993316388967335
sqrt;90;5;0.0284826415299;0.993316388967335
log2;30;3;0.00618932493489;0.993316388967335
log2;10;3;0.0115153795226;0.9932894931482901
log2;80;2;0.0111608395665;0.9932894931482901
sqrt;10;1;0.0863462621226;0.9932894931482901
sqrt;30;5;0.0178166631992;0.9932491494197228
log2;20;1;0.0018579256717;0.9932088056911553
log2;40;4;0.00834515198867;0.9931415661435429
log2;50;3;0.00422514819828;0.9931146703244981
None;50;1;0.008194724872;0.9930474307768856
sqrt;90;3;0.00224589406774;0.9930339828673632
None;10;1;0.00857113712732;0.9930205349578408
None;20;5;0.0076453020145;0.9930070870483183
None;70;3;0.0098167002041;0.9929263995911836
sqrt;70;2;0.00214203849601;0.9928995037721386
None;60;2;0.0105285074682;0.9927919204959589
Loading

0 comments on commit 5e599dc

Please sign in to comment.