Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.
| Download
Project: Roey Dvir - moac_ud_bet_2019
Views: 329Kernel: Python 3 (Anaconda 5)
In [186]:
In [187]:
In [188]:
Out[188]:
Cancer data set dimensions : (569, 31)
In [189]:
In [190]:
Out[190]:
['M' 'B']
[1 0]
In [191]:
Out[191]:
[-9.46200126e-16 7.23039719e-16 3.19119382e-16 -1.56212285e-16
-2.00844366e-17 5.13268936e-17 -5.13268936e-17 0.00000000e+00
1.16266572e-15 1.78528326e-16 -4.68636855e-17 -7.14113302e-17
2.25392011e-16 4.46320814e-17 -3.12424570e-16 9.37273709e-17
3.57056651e-17 -1.78528326e-16 -3.43667027e-16 -2.23160407e-18
1.85223138e-16 -5.75753850e-16 2.18697199e-16 1.22738224e-16
2.38781635e-16 1.33896244e-17 -7.81061424e-17 -1.76296721e-16
-2.14233991e-16 -1.08009637e-15]
[ 0.09042209 -0.09017492 0.08870921 0.12261548 -0.03214772 0.00590519
-0.01806404 -0.01493757 0.04392402 -0.03657289 0.15955937 -0.08822509
0.14820771 0.23762433 -0.12130755 -0.00697142 -0.05599851 -0.13639054
0.0279137 0.04923451 0.08896116 -0.13465217 0.08081482 0.11978937
-0.12920354 0.02132954 -0.00780344 -0.05244179 0.02996088 0.0527163 ]
In [192]:
In [0]:
In [193]:
In [194]:
In [195]:
Out[195]:
Acuuracy = 0.987 Sensitivity = 0.974 Specifity = 0.996 Precision = 0.993
Acuuracy = 0.988 Sensitivity = 0.983 Specifity = 0.991 Precision = 0.983
In [196]:
In [197]:
In [198]:
In [199]:
Out[199]:
Acuuracy = 0.977 Sensitivity = 0.966 Specifity = 0.982 Precision = 0.966
In [200]:
In [201]:
In [202]:
Out[202]:
Acuuracy = 0.977 Sensitivity = 0.966 Specifity = 0.982 Precision = 0.966
In [203]:
In [204]:
In [205]:
Out[205]:
In [206]:
Out[206]:
SVC using kernel linear
Acuuracy = 0.977 Sensitivity = 0.949 Specifity = 0.991 Precision = 0.982
In [207]:
In [208]:
In [209]:
In [210]:
Out[210]:
max score is 0.9590643274853801 - at 5 features
In [211]:
Out[211]:
Acuuracy = 1.0 Sensitivity = 1.0 Specifity = 1.0 Precision = 1.0
Acuuracy = 0.959 Sensitivity = 0.966 Specifity = 0.955 Precision = 0.919
{'criterion': 'gini',
'splitter': 'best',
'max_depth': None,
'min_samples_split': 2,
'min_samples_leaf': 1,
'min_weight_fraction_leaf': 0.0,
'max_features': 5,
'random_state': 0,
'max_leaf_nodes': None,
'min_impurity_decrease': 0.0,
'min_impurity_split': None,
'class_weight': None,
'presort': False,
'n_features_': 30,
'n_outputs_': 1,
'classes_': array([0, 1]),
'n_classes_': 2,
'max_features_': 5,
'tree_': <sklearn.tree._tree.Tree at 0x7f1637239578>}
In [212]:
In [213]:
Out[213]:
Index(['concave points.mean', 'concave points.std', 'texture.w', 'perimeter.w',
'area.w'],
dtype='object') 5
In [214]:
Out[214]:
Index(['perimeter.mean', 'area.mean', 'concave points.mean', 'area.std',
'radius.w', 'perimeter.w', 'area.w', 'concavity.w', 'concave points.w'],
dtype='object') 9
In [215]:
In [216]:
Out[216]:
Acuuracy = 0.971 Sensitivity = 0.966 Specifity = 0.973 Precision = 0.95
In [217]:
Out[217]:
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
hidden_1 (Dense) (None, 32) 992
_________________________________________________________________
hidden_2 (Dense) (None, 16) 528
_________________________________________________________________
output (Dense) (None, 1) 17
=================================================================
Total params: 1,537
Trainable params: 1,537
Non-trainable params: 0
_________________________________________________________________
TRAINING
Acuuracy = 0.975 Sensitivity = 0.941 Specifity = 0.996 Precision = 0.993
VALIDATION
Acuuracy = 0.982 Sensitivity = 0.966 Specifity = 0.991 Precision = 0.983
Train Loss: 0.096 || Train Accuarcy: 0.975
Validation Loss: 0.095 || Validation Accuarcy: 0.982
{'batch_size': 32, 'epochs': 48, 'steps': None, 'samples': 398, 'verbose': 0, 'do_validation': True, 'metrics': ['loss', 'acc', 'val_loss', 'val_acc']}
In [0]:
Our Implementation of Neural Network
In [218]:
In [220]:
Out[220]:
0 9.210340415134102 10.556881396878854
3 12.66421803330939 10.556881396878854
6 9.210340415134104 10.556881396878854
9 11.512925493917626 10.556881396878854
12 6.90775533635058 10.556881396878854
15 12.66421803330939 10.556881396878854
18 9.210340415134102 10.556881396878854
21 10.361632954525863 10.556881396878854
24 11.512925493917626 10.556881396878854
27 8.059047875742342 10.556881396878854
30 9.210340415134104 10.556881396878854
33 11.51292549391763 10.556881396878854
36 9.210340415134104 10.556881396878854
39 6.90775533635058 10.556881396878854
42 8.059047875742342 10.556881396878854
45 10.361632954525865 10.556881396878854
48 13.815510572701154 10.556881396878854
51 8.059047875742342 10.556881396878854
54 8.059047875742342 10.556881396878854
57 9.210340415134104 10.556881396878854
60 9.210340415134102 10.556881396878854
63 9.210340415134104 10.556881396878854
66 9.210340415134104 10.556881396878854
69 9.210340415134102 10.556881396878854
72 10.361632954525863 10.556881396878854
75 9.210340415134104 10.556881396878854
78 12.664218033309387 10.556881396878854
81 8.059047875742342 10.556881396878854
84 6.90775533635058 10.556881396878854
87 11.512925493917626 10.556881396878854
90 11.512925493917626 10.556881396878854
93 10.361632954525865 10.556881396878854
96 9.210340415134104 10.556881396878854
99 11.51292549391763 10.556881396878854
102 12.66421803330939 10.556881396878854
105 10.361632954525863 10.556881396878854
108 8.059047875742342 10.556881396878854
111 8.059047875742342 10.556881396878854
114 8.059047875742342 10.556881396878854
117 11.512925493917626 10.556881396878854
120 11.512925493917626 10.556881396878854
123 12.66421803330939 10.556881396878854
126 10.361632954525865 10.556881396878854
129 10.361632954525865 10.556881396878854
132 10.361632954525865 10.556881396878854
135 5.756462796958817 10.556881396878854
138 11.512925493917624 10.556881396878854
141 6.90775533635058 10.556881396878854
144 11.512925493917624 10.556881396878854
147 13.815510572701152 10.556881396878854
150 8.059047875742342 10.556881396878854
153 11.512925493917626 10.556881396878854
156 4.605170257567055 10.556881396878854
159 8.059047875742342 10.556881396878854
162 10.361632954525865 10.556881396878854
165 9.210340415134104 10.556881396878854
168 11.512925493917626 10.556881396878854
171 11.512925493917626 10.556881396878854
174 6.90775533635058 10.556881396878854
177 6.90775533635058 10.556881396878854
180 6.90775533635058 10.556881396878854
183 11.51292549391763 10.556881396878854
186 5.756462796958817 10.556881396878854
189 16.118095651484676 10.556881396878854
192 10.361632954525865 10.556881396878854
195 9.210340415134102 10.556881396878854
198 10.361632954525865 10.556881396878854
201 9.210340415134102 10.556881396878854
204 9.210340415134102 10.556881396878854
207 9.210340415134102 10.556881396878854
210 9.210340415134104 10.556881396878854
213 9.210340415134104 10.556881396878854
216 11.512925493917626 10.556881396878854
219 11.51292549391763 10.556881396878854
222 10.361632954525865 10.556881396878854
225 14.966803112092915 10.556881396878854
228 9.210340415134102 10.556881396878854
231 8.059047875742342 10.556881396878854
234 10.361632954525865 10.556881396878854
237 13.815510572701154 10.556881396878854
240 10.361632954525865 10.556881396878854
243 11.512925493917626 10.556881396878854
246 12.66421803330939 10.556881396878854
249 11.51292549391763 10.556881396878854
252 10.361632954525865 10.556881396878854
255 14.966803112092915 10.556881396878854
258 10.361632954525865 10.556881396878854
261 5.756462796958817 10.556881396878854
264 11.512925493917626 10.556881396878854
267 9.210340415134104 10.556881396878854
270 9.210340415134102 10.556881396878854
273 11.512925493917624 10.556881396878854
276 10.361632954525863 10.556881396878854
279 11.512925493917626 10.556881396878854
282 10.361632954525865 10.556881396878854
285 3.453877718175293 10.556881396878854
288 10.361632954525865 10.556881396878854
291 10.361632954525863 10.556881396878854
294 10.361632954525865 10.556881396878854
297 8.059047875742342 10.556881396878854
Acuuracy = 0.345 Sensitivity = 1.0 Specifity = 0.0 Precision = 0.345