In [1]:
from keras.models import Sequential
from keras.layers import Dense
import pandas as pd
import numpy as np
import pickle
In [2]:
data = pd.read_csv("mlh1.csv",header=None) 
data.shape
small = data.dropna()
small.shape
X = small.iloc[:,0:4].values
y = small.iloc[:,4].values
In [3]:
print(data)
          0       1       2       3  4
0    0.9908 -0.6535  0.6548 -2.2655  0
1   -0.3444  0.5817  1.8871 -1.2221  0
2    0.1206  0.9814  1.1101 -1.1757  0
3    0.9666 -0.1179  0.5870  0.2025  0
4   -0.3356  1.3352  1.5351 -0.6308  0
..      ...     ...     ...     ... ..
218  1.7897 -0.1683 -0.2239  0.0128  0
219  0.8280  1.7848  0.4983 -1.0822  0
220 -1.0404  0.0381 -1.8959 -0.4171  0
221 -2.0500 -0.7193  0.3134 -2.6699  0
222  1.4275 -0.1427  0.1080 -2.5215  0

[223 rows x 5 columns]
In [4]:
model = Sequential()
model.add(Dense(12, input_dim=4, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
In [5]:
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
In [6]:
history=model.fit(X, y, epochs=150, batch_size=10)
Epoch 1/150
23/23 [==============================] - 1s 2ms/step - loss: 0.6689 - accuracy: 0.6054
Epoch 2/150
23/23 [==============================] - 0s 2ms/step - loss: 0.5538 - accuracy: 0.8655
Epoch 3/150
23/23 [==============================] - 0s 1ms/step - loss: 0.4537 - accuracy: 0.9013
Epoch 4/150
23/23 [==============================] - 0s 1ms/step - loss: 0.3808 - accuracy: 0.9013
Epoch 5/150
23/23 [==============================] - 0s 1ms/step - loss: 0.3270 - accuracy: 0.8969
Epoch 6/150
23/23 [==============================] - 0s 1ms/step - loss: 0.2872 - accuracy: 0.9013
Epoch 7/150
23/23 [==============================] - 0s 1ms/step - loss: 0.2538 - accuracy: 0.9103
Epoch 8/150
23/23 [==============================] - 0s 2ms/step - loss: 0.2278 - accuracy: 0.9148
Epoch 9/150
23/23 [==============================] - 0s 1ms/step - loss: 0.2086 - accuracy: 0.9327
Epoch 10/150
23/23 [==============================] - 0s 2ms/step - loss: 0.1946 - accuracy: 0.9327
Epoch 11/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1823 - accuracy: 0.9462
Epoch 12/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1737 - accuracy: 0.9507
Epoch 13/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1654 - accuracy: 0.9552
Epoch 14/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1587 - accuracy: 0.9596
Epoch 15/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1530 - accuracy: 0.9552
Epoch 16/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1492 - accuracy: 0.9552
Epoch 17/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1450 - accuracy: 0.9552
Epoch 18/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1421 - accuracy: 0.9552
Epoch 19/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1396 - accuracy: 0.9552
Epoch 20/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1375 - accuracy: 0.9507
Epoch 21/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1363 - accuracy: 0.9507
Epoch 22/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1344 - accuracy: 0.9552
Epoch 23/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1329 - accuracy: 0.9552
Epoch 24/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1314 - accuracy: 0.9552
Epoch 25/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1309 - accuracy: 0.9552
Epoch 26/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1285 - accuracy: 0.9507
Epoch 27/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1288 - accuracy: 0.9462
Epoch 28/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1270 - accuracy: 0.9507
Epoch 29/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1263 - accuracy: 0.9552
Epoch 30/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1253 - accuracy: 0.9596
Epoch 31/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1243 - accuracy: 0.9507
Epoch 32/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1237 - accuracy: 0.9507
Epoch 33/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1245 - accuracy: 0.9462
Epoch 34/150
23/23 [==============================] - 0s 2ms/step - loss: 0.1251 - accuracy: 0.9596
Epoch 35/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1224 - accuracy: 0.9596
Epoch 36/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1220 - accuracy: 0.9552
Epoch 37/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1211 - accuracy: 0.9507
Epoch 38/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1209 - accuracy: 0.9507
Epoch 39/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1203 - accuracy: 0.9507
Epoch 40/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1202 - accuracy: 0.9507
Epoch 41/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1201 - accuracy: 0.9462
Epoch 42/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1195 - accuracy: 0.9462
Epoch 43/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1194 - accuracy: 0.9552
Epoch 44/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1198 - accuracy: 0.9552
Epoch 45/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1188 - accuracy: 0.9507
Epoch 46/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1181 - accuracy: 0.9507
Epoch 47/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1180 - accuracy: 0.9507
Epoch 48/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1173 - accuracy: 0.9552
Epoch 49/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1191 - accuracy: 0.9552
Epoch 50/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1182 - accuracy: 0.9462
Epoch 51/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1168 - accuracy: 0.9507
Epoch 52/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1177 - accuracy: 0.9507
Epoch 53/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1163 - accuracy: 0.9507
Epoch 54/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1160 - accuracy: 0.9552
Epoch 55/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1170 - accuracy: 0.9596
Epoch 56/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1157 - accuracy: 0.9552
Epoch 57/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1156 - accuracy: 0.9552
Epoch 58/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1161 - accuracy: 0.9462
Epoch 59/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1150 - accuracy: 0.9552
Epoch 60/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1157 - accuracy: 0.9552
Epoch 61/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1150 - accuracy: 0.9552
Epoch 62/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1145 - accuracy: 0.9552
Epoch 63/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1145 - accuracy: 0.9552
Epoch 64/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1146 - accuracy: 0.9507
Epoch 65/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1137 - accuracy: 0.9552
Epoch 66/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1145 - accuracy: 0.9641
Epoch 67/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1142 - accuracy: 0.9596
Epoch 68/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1147 - accuracy: 0.9552
Epoch 69/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1143 - accuracy: 0.9552
Epoch 70/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1131 - accuracy: 0.9552
Epoch 71/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1133 - accuracy: 0.9596
Epoch 72/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1127 - accuracy: 0.9552
Epoch 73/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1126 - accuracy: 0.9552
Epoch 74/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1130 - accuracy: 0.9552
Epoch 75/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1128 - accuracy: 0.9507
Epoch 76/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1126 - accuracy: 0.9552
Epoch 77/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1115 - accuracy: 0.9552
Epoch 78/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1116 - accuracy: 0.9552
Epoch 79/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1120 - accuracy: 0.9552
Epoch 80/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1117 - accuracy: 0.9596
Epoch 81/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1127 - accuracy: 0.9507
Epoch 82/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1108 - accuracy: 0.9552
Epoch 83/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1112 - accuracy: 0.9552
Epoch 84/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1100 - accuracy: 0.9596
Epoch 85/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1102 - accuracy: 0.9552
Epoch 86/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1098 - accuracy: 0.9552
Epoch 87/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1104 - accuracy: 0.9552
Epoch 88/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1098 - accuracy: 0.9552
Epoch 89/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1098 - accuracy: 0.9552
Epoch 90/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1098 - accuracy: 0.9552
Epoch 91/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1089 - accuracy: 0.9552
Epoch 92/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1092 - accuracy: 0.9596
Epoch 93/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1082 - accuracy: 0.9552
Epoch 94/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1090 - accuracy: 0.9552
Epoch 95/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1080 - accuracy: 0.9596
Epoch 96/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1086 - accuracy: 0.9596
Epoch 97/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1081 - accuracy: 0.9596
Epoch 98/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1079 - accuracy: 0.9596
Epoch 99/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1084 - accuracy: 0.9552
Epoch 100/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1069 - accuracy: 0.9596
Epoch 101/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1074 - accuracy: 0.9596
Epoch 102/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1081 - accuracy: 0.9596
Epoch 103/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1066 - accuracy: 0.9596
Epoch 104/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1067 - accuracy: 0.9596
Epoch 105/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1068 - accuracy: 0.9552
Epoch 106/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1063 - accuracy: 0.9596
Epoch 107/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1064 - accuracy: 0.9596
Epoch 108/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1066 - accuracy: 0.9596
Epoch 109/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1059 - accuracy: 0.9596
Epoch 110/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1054 - accuracy: 0.9596
Epoch 111/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1056 - accuracy: 0.9596
Epoch 112/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1062 - accuracy: 0.9552
Epoch 113/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1052 - accuracy: 0.9596
Epoch 114/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1050 - accuracy: 0.9596
Epoch 115/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1048 - accuracy: 0.9641
Epoch 116/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1051 - accuracy: 0.9596
Epoch 117/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1045 - accuracy: 0.9596
Epoch 118/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1043 - accuracy: 0.9596
Epoch 119/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1042 - accuracy: 0.9596
Epoch 120/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1049 - accuracy: 0.9596
Epoch 121/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1036 - accuracy: 0.9596
Epoch 122/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1040 - accuracy: 0.9596
Epoch 123/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1037 - accuracy: 0.9596
Epoch 124/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1045 - accuracy: 0.9596
Epoch 125/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1034 - accuracy: 0.9596
Epoch 126/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1037 - accuracy: 0.9596
Epoch 127/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1030 - accuracy: 0.9596
Epoch 128/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1036 - accuracy: 0.9596
Epoch 129/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1031 - accuracy: 0.9596
Epoch 130/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1034 - accuracy: 0.9641
Epoch 131/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1026 - accuracy: 0.9641
Epoch 132/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1019 - accuracy: 0.9596
Epoch 133/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1021 - accuracy: 0.9596
Epoch 134/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1028 - accuracy: 0.9596
Epoch 135/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1019 - accuracy: 0.9596
Epoch 136/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1024 - accuracy: 0.9641
Epoch 137/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1026 - accuracy: 0.9596
Epoch 138/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1015 - accuracy: 0.9596
Epoch 139/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1010 - accuracy: 0.9552
Epoch 140/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1016 - accuracy: 0.9641
Epoch 141/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1014 - accuracy: 0.9596
Epoch 142/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1007 - accuracy: 0.9596
Epoch 143/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1006 - accuracy: 0.9596
Epoch 144/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1003 - accuracy: 0.9596
Epoch 145/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1002 - accuracy: 0.9596
Epoch 146/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1002 - accuracy: 0.9596
Epoch 147/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0997 - accuracy: 0.9596
Epoch 148/150
23/23 [==============================] - 0s 1ms/step - loss: 0.1013 - accuracy: 0.9596
Epoch 149/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0992 - accuracy: 0.9596
Epoch 150/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0998 - accuracy: 0.9552
In [9]:
from matplotlib.pyplot import figure

figure(num=None, figsize=(8, 6), dpi=300, facecolor='w', edgecolor='k')
import matplotlib.pyplot as plt

plt.plot(history.history['accuracy'])

plt.title('CRC mlh1 validation accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['accuracy'], loc='lower right' )
plt.show()
In [11]:
from matplotlib.pyplot import figure

figure(num=None, figsize=(8, 6), dpi=300, facecolor='w', edgecolor='k')
import matplotlib.pyplot as plt

plt.plot(history.history['loss'])

plt.title('CRC mlh1 validation loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['loss'], loc='upper right' )
plt.show()
In [12]:
model.fit(X, y, epochs=150, batch_size=10)
Epoch 1/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0993 - accuracy: 0.9552
Epoch 2/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0992 - accuracy: 0.9552
Epoch 3/150
23/23 [==============================] - 0s 2ms/step - loss: 0.1001 - accuracy: 0.9596
Epoch 4/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0986 - accuracy: 0.9596
Epoch 5/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0988 - accuracy: 0.9596
Epoch 6/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0982 - accuracy: 0.9596
Epoch 7/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0985 - accuracy: 0.9596
Epoch 8/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0983 - accuracy: 0.9596
Epoch 9/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0982 - accuracy: 0.9596
Epoch 10/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0984 - accuracy: 0.9552
Epoch 11/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0992 - accuracy: 0.9552
Epoch 12/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0983 - accuracy: 0.9552
Epoch 13/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0977 - accuracy: 0.9596
Epoch 14/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0973 - accuracy: 0.9596
Epoch 15/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0976 - accuracy: 0.9596
Epoch 16/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0992 - accuracy: 0.9596
Epoch 17/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0975 - accuracy: 0.9552
Epoch 18/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0980 - accuracy: 0.9596
Epoch 19/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0976 - accuracy: 0.9552
Epoch 20/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0972 - accuracy: 0.9596
Epoch 21/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0964 - accuracy: 0.9596
Epoch 22/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0967 - accuracy: 0.9596
Epoch 23/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0962 - accuracy: 0.9596
Epoch 24/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0959 - accuracy: 0.9596
Epoch 25/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0961 - accuracy: 0.9552
Epoch 26/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0961 - accuracy: 0.9552
Epoch 27/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0951 - accuracy: 0.9552
Epoch 28/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0952 - accuracy: 0.9596
Epoch 29/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0950 - accuracy: 0.9596
Epoch 30/150
23/23 [==============================] - 0s 2ms/step - loss: 0.0955 - accuracy: 0.9552
Epoch 31/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0952 - accuracy: 0.9596
Epoch 32/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0965 - accuracy: 0.9552
Epoch 33/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0941 - accuracy: 0.9552
Epoch 34/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0945 - accuracy: 0.9552
Epoch 35/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0950 - accuracy: 0.9552
Epoch 36/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0943 - accuracy: 0.9552
Epoch 37/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0936 - accuracy: 0.9552
Epoch 38/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0939 - accuracy: 0.9552
Epoch 39/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0935 - accuracy: 0.9552
Epoch 40/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0933 - accuracy: 0.9641
Epoch 41/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0940 - accuracy: 0.9596
Epoch 42/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0921 - accuracy: 0.9596
Epoch 43/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0924 - accuracy: 0.9552
Epoch 44/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0929 - accuracy: 0.9552
Epoch 45/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0931 - accuracy: 0.9552
Epoch 46/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0921 - accuracy: 0.9552
Epoch 47/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0924 - accuracy: 0.9552
Epoch 48/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0927 - accuracy: 0.9552
Epoch 49/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0923 - accuracy: 0.9552
Epoch 50/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0931 - accuracy: 0.9552
Epoch 51/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0950 - accuracy: 0.9552
Epoch 52/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0920 - accuracy: 0.9552
Epoch 53/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0921 - accuracy: 0.9552
Epoch 54/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0914 - accuracy: 0.9552
Epoch 55/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0909 - accuracy: 0.9552
Epoch 56/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0911 - accuracy: 0.9552
Epoch 57/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0932 - accuracy: 0.9596
Epoch 58/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0913 - accuracy: 0.9552
Epoch 59/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0924 - accuracy: 0.9507
Epoch 60/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0904 - accuracy: 0.9552
Epoch 61/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0904 - accuracy: 0.9552
Epoch 62/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0905 - accuracy: 0.9552
Epoch 63/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0902 - accuracy: 0.9552
Epoch 64/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0899 - accuracy: 0.9552
Epoch 65/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0898 - accuracy: 0.9552
Epoch 66/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0901 - accuracy: 0.9552
Epoch 67/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0903 - accuracy: 0.9552
Epoch 68/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0890 - accuracy: 0.9552
Epoch 69/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0889 - accuracy: 0.9552
Epoch 70/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0889 - accuracy: 0.9596
Epoch 71/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0896 - accuracy: 0.9552
Epoch 72/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0882 - accuracy: 0.9552
Epoch 73/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0884 - accuracy: 0.9552
Epoch 74/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0895 - accuracy: 0.9552
Epoch 75/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0882 - accuracy: 0.9552
Epoch 76/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0883 - accuracy: 0.9552
Epoch 77/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0878 - accuracy: 0.9552
Epoch 78/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0877 - accuracy: 0.9552
Epoch 79/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0877 - accuracy: 0.9552
Epoch 80/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0885 - accuracy: 0.9552
Epoch 81/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0874 - accuracy: 0.9552
Epoch 82/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0879 - accuracy: 0.9552
Epoch 83/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0871 - accuracy: 0.9596
Epoch 84/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0868 - accuracy: 0.9552
Epoch 85/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0870 - accuracy: 0.9552
Epoch 86/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0863 - accuracy: 0.9552
Epoch 87/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0862 - accuracy: 0.9552
Epoch 88/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0865 - accuracy: 0.9552
Epoch 89/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0867 - accuracy: 0.9552
Epoch 90/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0859 - accuracy: 0.9552
Epoch 91/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0873 - accuracy: 0.9552
Epoch 92/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0869 - accuracy: 0.9552
Epoch 93/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0864 - accuracy: 0.9552
Epoch 94/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0865 - accuracy: 0.9552
Epoch 95/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0874 - accuracy: 0.9552
Epoch 96/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0856 - accuracy: 0.9552
Epoch 97/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0855 - accuracy: 0.9552
Epoch 98/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0854 - accuracy: 0.9552
Epoch 99/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0862 - accuracy: 0.9552
Epoch 100/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0847 - accuracy: 0.9596
Epoch 101/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0851 - accuracy: 0.9552
Epoch 102/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0841 - accuracy: 0.9596
Epoch 103/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0837 - accuracy: 0.9552
Epoch 104/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0845 - accuracy: 0.9552
Epoch 105/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0848 - accuracy: 0.9596
Epoch 106/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0844 - accuracy: 0.9552
Epoch 107/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0840 - accuracy: 0.9596
Epoch 108/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0841 - accuracy: 0.9552
Epoch 109/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0839 - accuracy: 0.9596
Epoch 110/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0839 - accuracy: 0.9552
Epoch 111/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0829 - accuracy: 0.9552
Epoch 112/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0837 - accuracy: 0.9552
Epoch 113/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0827 - accuracy: 0.9552
Epoch 114/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0825 - accuracy: 0.9552
Epoch 115/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0826 - accuracy: 0.9552
Epoch 116/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0828 - accuracy: 0.9596
Epoch 117/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0822 - accuracy: 0.9552
Epoch 118/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0821 - accuracy: 0.9552
Epoch 119/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0816 - accuracy: 0.9552
Epoch 120/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0818 - accuracy: 0.9552
Epoch 121/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0818 - accuracy: 0.9552
Epoch 122/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0823 - accuracy: 0.9596
Epoch 123/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0810 - accuracy: 0.9596
Epoch 124/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0821 - accuracy: 0.9596
Epoch 125/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0819 - accuracy: 0.9641
Epoch 126/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0810 - accuracy: 0.9596
Epoch 127/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0809 - accuracy: 0.9596
Epoch 128/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0807 - accuracy: 0.9552
Epoch 129/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0810 - accuracy: 0.9596
Epoch 130/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0808 - accuracy: 0.9552
Epoch 131/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0810 - accuracy: 0.9596
Epoch 132/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0804 - accuracy: 0.9552
Epoch 133/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0812 - accuracy: 0.9596
Epoch 134/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0807 - accuracy: 0.9596
Epoch 135/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0805 - accuracy: 0.9552
Epoch 136/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0806 - accuracy: 0.9507
Epoch 137/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0797 - accuracy: 0.9596
Epoch 138/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0799 - accuracy: 0.9596
Epoch 139/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0798 - accuracy: 0.9596
Epoch 140/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0794 - accuracy: 0.9596
Epoch 141/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0802 - accuracy: 0.9552
Epoch 142/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0796 - accuracy: 0.9596
Epoch 143/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0792 - accuracy: 0.9596
Epoch 144/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0792 - accuracy: 0.9596
Epoch 145/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0784 - accuracy: 0.9596
Epoch 146/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0795 - accuracy: 0.9552
Epoch 147/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0816 - accuracy: 0.9596
Epoch 148/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0785 - accuracy: 0.9552
Epoch 149/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0786 - accuracy: 0.9552
Epoch 150/150
23/23 [==============================] - 0s 1ms/step - loss: 0.0784 - accuracy: 0.9596
Out[12]:
<keras.callbacks.History at 0x21c0c864d08>
In [13]:
_, accuracy = model.evaluate(X, y)
print('Accuracy: %.2f' % (accuracy*100))
7/7 [==============================] - 0s 2ms/step - loss: 0.0773 - accuracy: 0.9641
Accuracy: 96.41
In [14]:
model.summary()
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 dense (Dense)               (None, 12)                60        
                                                                 
 dense_1 (Dense)             (None, 8)                 104       
                                                                 
 dense_2 (Dense)             (None, 1)                 9         
                                                                 
=================================================================
Total params: 173
Trainable params: 173
Non-trainable params: 0
_________________________________________________________________
In [15]:
yhat_probs = model.predict(X)
In [16]:
yhat_classes = (model.predict(X) > 0.5).astype("int32")
yhat_classes
Out[16]:
array([[0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0]])
In [148]:
 
222/222 [==============================] - 0s 41us/step
In [17]:
yhat_probs
Out[17]:
array([[6.6917082e-08],
       [3.1836925e-10],
       [5.4667616e-06],
       [1.8223703e-01],
       [3.1095867e-06],
       [1.5097857e-04],
       [4.5718189e-05],
       [7.5930852e-13],
       [1.6880035e-04],
       [1.3288140e-02],
       [5.7659827e-12],
       [1.4023453e-01],
       [1.7783749e-09],
       [6.5513697e-09],
       [2.4984330e-02],
       [1.4648749e-10],
       [7.1092437e-10],
       [9.4391787e-08],
       [3.3584604e-12],
       [3.5706750e-15],
       [1.6278539e-08],
       [2.8434946e-05],
       [1.4968079e-01],
       [1.9277036e-02],
       [8.2872821e-06],
       [2.1554771e-13],
       [9.6510246e-13],
       [6.5667172e-10],
       [1.1805335e-14],
       [6.6314167e-16],
       [2.9190859e-01],
       [1.3816059e-03],
       [1.3611817e-08],
       [2.2496453e-01],
       [7.3589586e-11],
       [1.7558408e-01],
       [4.1397615e-13],
       [3.4772019e-10],
       [8.0708340e-15],
       [3.6735654e-15],
       [7.4476058e-15],
       [6.8505912e-16],
       [3.2041798e-21],
       [5.2791400e-11],
       [1.3077998e-20],
       [1.1017170e-16],
       [1.6713089e-11],
       [4.5752525e-04],
       [6.1873851e-17],
       [1.9605683e-05],
       [9.4469010e-09],
       [1.3892949e-03],
       [1.0023251e-24],
       [2.8460252e-11],
       [6.0486598e-12],
       [1.3625690e-01],
       [3.9178820e-11],
       [2.2203933e-10],
       [1.7749695e-05],
       [6.4421554e-12],
       [2.1966143e-14],
       [7.9974957e-11],
       [1.5999709e-16],
       [4.1275853e-18],
       [3.8494538e-05],
       [6.1403797e-14],
       [2.6268825e-05],
       [5.1562085e-15],
       [5.5857907e-08],
       [1.4457107e-04],
       [6.1335893e-05],
       [7.7071474e-12],
       [6.1255634e-02],
       [1.6590753e-14],
       [1.5680280e-09],
       [5.0941078e-18],
       [4.4291333e-17],
       [1.0358824e-11],
       [1.7932426e-13],
       [2.2268671e-01],
       [5.3908111e-12],
       [2.3252254e-07],
       [3.2165945e-03],
       [2.6685211e-17],
       [1.5043930e-16],
       [5.0222014e-12],
       [4.5999107e-01],
       [3.6081979e-12],
       [2.1784366e-13],
       [2.6682969e-06],
       [6.0402579e-11],
       [5.2944342e-12],
       [7.3653606e-15],
       [1.8909825e-11],
       [8.4003619e-19],
       [2.8381097e-08],
       [9.0864980e-01],
       [5.3596083e-10],
       [1.2201460e-08],
       [1.7540346e-05],
       [1.3301977e-12],
       [8.4892855e-16],
       [1.3332984e-13],
       [6.8948352e-06],
       [3.5327668e-11],
       [3.7449583e-06],
       [2.3631440e-15],
       [6.1335384e-27],
       [5.7567992e-11],
       [2.5187624e-01],
       [2.4674714e-02],
       [2.3528137e-26],
       [8.3174409e-06],
       [7.6118439e-01],
       [3.5106408e-22],
       [4.8422009e-12],
       [2.7843797e-11],
       [4.6060639e-05],
       [8.6119653e-11],
       [2.6830228e-06],
       [4.6191552e-05],
       [9.9264142e-17],
       [1.4450486e-06],
       [2.7200003e-10],
       [3.4447085e-15],
       [2.8837810e-09],
       [3.8316561e-13],
       [3.0595782e-14],
       [4.0633213e-22],
       [1.7749611e-18],
       [3.7291792e-09],
       [3.4328491e-06],
       [7.2747938e-18],
       [1.2444002e-13],
       [3.3164327e-13],
       [8.2992017e-03],
       [1.0376709e-17],
       [6.2153108e-06],
       [6.0181200e-19],
       [1.0938542e-13],
       [3.7213979e-13],
       [2.1375703e-12],
       [6.5291723e-18],
       [9.7808564e-01],
       [8.4773076e-01],
       [4.2903647e-01],
       [9.2407870e-01],
       [5.7650989e-01],
       [7.7931166e-01],
       [7.2734088e-01],
       [9.0189207e-01],
       [9.5244807e-01],
       [5.0026423e-01],
       [9.5092046e-01],
       [7.6472461e-01],
       [8.9610028e-01],
       [2.0508477e-01],
       [2.8876670e-07],
       [8.6479616e-01],
       [7.2475424e-13],
       [7.9789376e-01],
       [1.2903409e-10],
       [3.4601504e-01],
       [2.7948898e-01],
       [9.8436570e-01],
       [1.4622968e-12],
       [2.9974550e-01],
       [9.1367960e-04],
       [9.2709219e-01],
       [5.0832611e-01],
       [8.2115994e-06],
       [8.6799479e-01],
       [3.4182894e-13],
       [5.2238452e-01],
       [9.4687998e-02],
       [5.3101248e-14],
       [2.9437741e-12],
       [5.3163790e-06],
       [1.9590214e-08],
       [6.4985251e-01],
       [5.0841122e-06],
       [2.8988365e-05],
       [6.8423923e-08],
       [7.2152034e-08],
       [1.3820352e-07],
       [1.9109190e-02],
       [1.2427479e-08],
       [5.5215492e-07],
       [6.9070006e-07],
       [2.5685617e-01],
       [3.0896217e-02],
       [6.7491274e-06],
       [1.4746487e-03],
       [1.3130754e-01],
       [6.6363982e-06],
       [2.6965916e-02],
       [3.4940243e-04],
       [1.0800200e-04],
       [3.3926800e-01],
       [6.7242166e-16],
       [1.9176754e-07],
       [1.9367945e-10],
       [5.4902671e-10],
       [1.4577897e-10],
       [9.9312901e-01],
       [3.7500739e-02],
       [2.5908550e-08],
       [2.2315979e-04],
       [9.6669810e-09],
       [5.5374912e-05],
       [1.0538148e-04],
       [3.8928630e-10],
       [2.3698128e-06],
       [8.3796732e-07],
       [2.7452695e-01],
       [4.7966885e-05],
       [1.9950399e-08],
       [2.6169717e-03],
       [2.5515497e-02],
       [2.0359782e-06],
       [2.6465039e-12],
       [7.2048937e-18],
       [6.4730434e-09]], dtype=float32)
In [18]:
yhat_classes
Out[18]:
array([[0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [1],
       [0],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [1],
       [1],
       [0],
       [1],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [1],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0],
       [0]])
In [19]:
from sklearn.metrics import confusion_matrix, precision_score, recall_score, f1_score, cohen_kappa_score, accuracy_score, roc_auc_score
In [20]:
precision = precision_score(y, yhat_classes)
print('Precision: %f' % precision)
Precision: 0.869565
In [21]:
accuracy = accuracy_score(y, yhat_classes)
print('Accuracy: %f' % accuracy)
Accuracy: 0.964126
In [22]:
recall = recall_score(y, yhat_classes)
print('Recall: %f' % recall)
Recall: 0.800000
In [23]:
f1 = f1_score(y, yhat_classes)
print('F1 score: %f' % f1)
F1 score: 0.833333
In [24]:
kappa = cohen_kappa_score(y, yhat_classes)
print('Cohens kappa: %f' % kappa)
Cohens kappa: 0.813272
In [25]:
auc = roc_auc_score(y, yhat_probs)
print('ROC AUC: %f' % auc)
ROC AUC: 0.992323
In [26]:
matrix = confusion_matrix(y, yhat_classes)
print(matrix)
[[195   3]
 [  5  20]]