from keras.models import Sequential
from keras.layers import Dense
import pandas as pd
import numpy as np
import pickle
data = pd.read_csv("methyl.csv",header=None)
data.shape
small = data.dropna()
small.shape
X = small.iloc[:,0:4].values
y = small.iloc[:,4].values
print(data)
0 1 2 3 4 0 0.9908 -0.6535 0.6548 -2.2655 0 1 -0.3444 0.5817 1.8871 -1.2221 0 2 0.1206 0.9814 1.1101 -1.1757 0 3 0.9666 -0.1179 0.5870 0.2025 0 4 -0.3356 1.3352 1.5351 -0.6308 0 .. ... ... ... ... .. 218 1.7897 -0.1683 -0.2239 0.0128 1 219 0.8280 1.7848 0.4983 -1.0822 1 220 -1.0404 0.0381 -1.8959 -0.4171 1 221 -2.0500 -0.7193 0.3134 -2.6699 1 222 1.4275 -0.1427 0.1080 -2.5215 1 [223 rows x 5 columns]
small.shape
(223, 5)
model = Sequential()
model.add(Dense(12, input_dim=4, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
history=model.fit(X, y, epochs=150, batch_size=10)
Epoch 1/150 23/23 [==============================] - 1s 1ms/step - loss: 0.4167 - accuracy: 0.7982 Epoch 2/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4137 - accuracy: 0.8117 Epoch 3/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4133 - accuracy: 0.8161 Epoch 4/150 23/23 [==============================] - 0s 2ms/step - loss: 0.4126 - accuracy: 0.8117 Epoch 5/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4127 - accuracy: 0.8161 Epoch 6/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4120 - accuracy: 0.8161 Epoch 7/150 23/23 [==============================] - 0s 2ms/step - loss: 0.4129 - accuracy: 0.8072 Epoch 8/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4113 - accuracy: 0.8072 Epoch 9/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4115 - accuracy: 0.8117 Epoch 10/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4112 - accuracy: 0.8117 Epoch 11/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4121 - accuracy: 0.8161 Epoch 12/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4116 - accuracy: 0.8117 Epoch 13/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4113 - accuracy: 0.8117 Epoch 14/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4108 - accuracy: 0.8161 Epoch 15/150 23/23 [==============================] - 0s 2ms/step - loss: 0.4104 - accuracy: 0.8072 Epoch 16/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4092 - accuracy: 0.8117 Epoch 17/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4098 - accuracy: 0.8117 Epoch 18/150 23/23 [==============================] - 0s 2ms/step - loss: 0.4084 - accuracy: 0.8117 Epoch 19/150 23/23 [==============================] - 0s 2ms/step - loss: 0.4088 - accuracy: 0.8117 Epoch 20/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4085 - accuracy: 0.8161 Epoch 21/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4076 - accuracy: 0.8072 Epoch 22/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4073 - accuracy: 0.8117 Epoch 23/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4066 - accuracy: 0.8117 Epoch 24/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4067 - accuracy: 0.8117 Epoch 25/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4073 - accuracy: 0.8117 Epoch 26/150 23/23 [==============================] - 0s 911us/step - loss: 0.4059 - accuracy: 0.8117 Epoch 27/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4054 - accuracy: 0.8161 Epoch 28/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4048 - accuracy: 0.8117 Epoch 29/150 23/23 [==============================] - 0s 2ms/step - loss: 0.4053 - accuracy: 0.8117 Epoch 30/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4051 - accuracy: 0.8072 Epoch 31/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4065 - accuracy: 0.8206 Epoch 32/150 23/23 [==============================] - 0s 2ms/step - loss: 0.4061 - accuracy: 0.8072 Epoch 33/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4039 - accuracy: 0.8117 Epoch 34/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4038 - accuracy: 0.8117 Epoch 35/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4043 - accuracy: 0.8117 Epoch 36/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4047 - accuracy: 0.8072 Epoch 37/150 23/23 [==============================] - 0s 2ms/step - loss: 0.4048 - accuracy: 0.8072 Epoch 38/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4050 - accuracy: 0.8072 Epoch 39/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4035 - accuracy: 0.8161 Epoch 40/150 23/23 [==============================] - 0s 2ms/step - loss: 0.4036 - accuracy: 0.8161 Epoch 41/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4029 - accuracy: 0.8072 Epoch 42/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4030 - accuracy: 0.8117 Epoch 43/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4020 - accuracy: 0.8117 Epoch 44/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4020 - accuracy: 0.8206 Epoch 45/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4022 - accuracy: 0.8117 Epoch 46/150 23/23 [==============================] - 0s 2ms/step - loss: 0.4014 - accuracy: 0.8117 Epoch 47/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4010 - accuracy: 0.8072 Epoch 48/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4012 - accuracy: 0.8117 Epoch 49/150 23/23 [==============================] - 0s 2ms/step - loss: 0.4009 - accuracy: 0.8161 Epoch 50/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4008 - accuracy: 0.8161 Epoch 51/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4020 - accuracy: 0.8117 Epoch 52/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4002 - accuracy: 0.8206 Epoch 53/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4005 - accuracy: 0.8161 Epoch 54/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4000 - accuracy: 0.8117 Epoch 55/150 23/23 [==============================] - 0s 1ms/step - loss: 0.4009 - accuracy: 0.8117 Epoch 56/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3999 - accuracy: 0.8072 Epoch 57/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3989 - accuracy: 0.8161 Epoch 58/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3990 - accuracy: 0.8161 Epoch 59/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3987 - accuracy: 0.8206 Epoch 60/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3981 - accuracy: 0.8206 Epoch 61/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3988 - accuracy: 0.8161 Epoch 62/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3987 - accuracy: 0.8161 Epoch 63/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3986 - accuracy: 0.8161 Epoch 64/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3974 - accuracy: 0.8251 Epoch 65/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3975 - accuracy: 0.8206 Epoch 66/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3986 - accuracy: 0.8206 Epoch 67/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3971 - accuracy: 0.8072 Epoch 68/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3967 - accuracy: 0.8161 Epoch 69/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3977 - accuracy: 0.8206 Epoch 70/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3973 - accuracy: 0.8251 Epoch 71/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3981 - accuracy: 0.8161 Epoch 72/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3981 - accuracy: 0.8117 Epoch 73/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3957 - accuracy: 0.8206 Epoch 74/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3951 - accuracy: 0.8161 Epoch 75/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3951 - accuracy: 0.8161 Epoch 76/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3947 - accuracy: 0.8161 Epoch 77/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3952 - accuracy: 0.8161 Epoch 78/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3955 - accuracy: 0.8161 Epoch 79/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3952 - accuracy: 0.8117 Epoch 80/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3968 - accuracy: 0.8117 Epoch 81/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3935 - accuracy: 0.8161 Epoch 82/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3928 - accuracy: 0.8161 Epoch 83/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3933 - accuracy: 0.8117 Epoch 84/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3928 - accuracy: 0.8206 Epoch 85/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3934 - accuracy: 0.8206 Epoch 86/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3933 - accuracy: 0.8161 Epoch 87/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3930 - accuracy: 0.8161 Epoch 88/150 23/23 [==============================] - 0s 710us/step - loss: 0.3923 - accuracy: 0.8206 Epoch 89/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3909 - accuracy: 0.8206 Epoch 90/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3916 - accuracy: 0.8161 Epoch 91/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3912 - accuracy: 0.8251 Epoch 92/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3902 - accuracy: 0.8161 Epoch 93/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3905 - accuracy: 0.8206 Epoch 94/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3914 - accuracy: 0.8251 Epoch 95/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3899 - accuracy: 0.8206 Epoch 96/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3894 - accuracy: 0.8251 Epoch 97/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3890 - accuracy: 0.8251 Epoch 98/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3889 - accuracy: 0.8251 Epoch 99/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3887 - accuracy: 0.8251 Epoch 100/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3882 - accuracy: 0.8161 Epoch 101/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3880 - accuracy: 0.8206 Epoch 102/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3885 - accuracy: 0.8161 Epoch 103/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3877 - accuracy: 0.8161 Epoch 104/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3878 - accuracy: 0.8251 Epoch 105/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3865 - accuracy: 0.8251 Epoch 106/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3862 - accuracy: 0.8251 Epoch 107/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3866 - accuracy: 0.8251 Epoch 108/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3850 - accuracy: 0.8251 Epoch 109/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3866 - accuracy: 0.8206 Epoch 110/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3843 - accuracy: 0.8251 Epoch 111/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3845 - accuracy: 0.8251 Epoch 112/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3835 - accuracy: 0.8251 Epoch 113/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3834 - accuracy: 0.8296 Epoch 114/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3834 - accuracy: 0.8251 Epoch 115/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3843 - accuracy: 0.8161 Epoch 116/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3835 - accuracy: 0.8251 Epoch 117/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3836 - accuracy: 0.8206 Epoch 118/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3823 - accuracy: 0.8251 Epoch 119/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3817 - accuracy: 0.8296 Epoch 120/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3833 - accuracy: 0.8296 Epoch 121/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3810 - accuracy: 0.8206 Epoch 122/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3817 - accuracy: 0.8251 Epoch 123/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3811 - accuracy: 0.8251 Epoch 124/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3849 - accuracy: 0.8206 Epoch 125/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3806 - accuracy: 0.8296 Epoch 126/150 23/23 [==============================] - 0s 711us/step - loss: 0.3828 - accuracy: 0.8206 Epoch 127/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3797 - accuracy: 0.8251 Epoch 128/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3801 - accuracy: 0.8341 Epoch 129/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3802 - accuracy: 0.8341 Epoch 130/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3800 - accuracy: 0.8296 Epoch 131/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3796 - accuracy: 0.8251 Epoch 132/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3790 - accuracy: 0.8341 Epoch 133/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3782 - accuracy: 0.8296 Epoch 134/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3774 - accuracy: 0.8296 Epoch 135/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3785 - accuracy: 0.8251 Epoch 136/150 23/23 [==============================] - ETA: 0s - loss: 0.3809 - accuracy: 0.90 - 0s 710us/step - loss: 0.3766 - accuracy: 0.8341 Epoch 137/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3767 - accuracy: 0.8296 Epoch 138/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3757 - accuracy: 0.8341 Epoch 139/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3755 - accuracy: 0.8341 Epoch 140/150 23/23 [==============================] - 0s 2ms/step - loss: 0.3760 - accuracy: 0.8341 Epoch 141/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3749 - accuracy: 0.8341 Epoch 142/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3764 - accuracy: 0.8251 Epoch 143/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3793 - accuracy: 0.8206 Epoch 144/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3749 - accuracy: 0.8341 Epoch 145/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3749 - accuracy: 0.8296 Epoch 146/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3754 - accuracy: 0.8341 Epoch 147/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3737 - accuracy: 0.8341 Epoch 148/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3732 - accuracy: 0.8341 Epoch 149/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3733 - accuracy: 0.8341 Epoch 150/150 23/23 [==============================] - 0s 1ms/step - loss: 0.3751 - accuracy: 0.8296
from matplotlib.pyplot import figure
figure(num=None, figsize=(8, 6), dpi=300, facecolor='w', edgecolor='k')
import matplotlib.pyplot as plt
plt.plot(history.history['accuracy'])
plt.title('CRC methylation validation accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['accuracy'], loc='lower right' )
plt.show()
from matplotlib.pyplot import figure
figure(num=None, figsize=(8, 6), dpi=300, facecolor='w', edgecolor='k')
import matplotlib.pyplot as plt
plt.plot(history.history['loss'])
plt.title('CRC mthylation validation loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['loss'], loc='upper right' )
plt.show()
_, accuracy = model.evaluate(X, y)
print('Accuracy: %.2f' % (accuracy*100))
7/7 [==============================] - 0s 2ms/step - loss: 0.3709 - accuracy: 0.8296 Accuracy: 82.96
model.summary()
Model: "sequential_1" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_3 (Dense) (None, 12) 60 dense_4 (Dense) (None, 8) 104 dense_5 (Dense) (None, 1) 9 ================================================================= Total params: 173 Trainable params: 173 Non-trainable params: 0 _________________________________________________________________
yhat_probs = model.predict(X)
yhat_probs
array([[3.36530864e-01], [3.84685576e-01], [4.39630479e-01], [8.21486115e-01], [4.42385882e-01], [3.28345776e-01], [6.71813667e-01], [3.68588418e-01], [5.35172522e-02], [4.95761633e-04], [1.56987011e-01], [1.10928595e-01], [6.16813898e-02], [3.65926296e-01], [3.50777686e-01], [7.57617652e-02], [1.18149459e-01], [2.86580116e-01], [1.98050320e-01], [3.76422405e-02], [1.24247909e-01], [2.90816426e-01], [6.28701985e-01], [5.31508744e-01], [2.09717810e-01], [1.25238508e-01], [8.39780271e-02], [1.84101880e-01], [5.27961254e-02], [4.76523042e-02], [5.44093966e-01], [9.73634422e-02], [1.35138571e-01], [8.62088799e-03], [2.58380830e-01], [7.50419736e-01], [4.61597741e-02], [2.19231993e-01], [1.57490820e-01], [2.10020542e-02], [1.10791415e-01], [9.18094516e-02], [4.50861454e-03], [4.47680795e-05], [6.26516342e-03], [5.01829743e-01], [1.35581642e-01], [1.54152840e-01], [6.14935160e-02], [1.18885010e-01], [1.29692823e-01], [6.04062676e-01], [3.75607610e-03], [3.99449170e-01], [2.81702369e-01], [1.87468082e-01], [1.36267304e-01], [4.21882570e-02], [3.31789255e-04], [7.44262636e-02], [9.61222947e-02], [1.61942154e-01], [3.76229584e-02], [3.04886401e-02], [1.97699398e-01], [2.02128589e-02], [4.62520629e-01], [2.84688115e-01], [9.69774127e-02], [6.25965118e-01], [4.52603132e-01], [4.05480981e-01], [3.43434095e-01], [3.16509604e-02], [1.76272482e-01], [3.24064493e-02], [2.46832371e-02], [5.04689515e-02], [5.32992780e-02], [4.86024588e-01], [2.58386910e-01], [1.58861101e-01], [5.29424727e-01], [5.52512705e-02], [3.22747231e-02], [3.29896212e-02], [6.84781909e-01], [8.92976224e-02], [7.04138279e-02], [1.59124970e-01], [1.02646500e-01], [7.65734613e-02], [2.75682509e-02], [5.40964603e-02], [7.41538405e-03], [3.59541714e-01], [3.31485271e-03], [7.09279180e-02], [9.66963172e-02], [4.15921539e-01], [1.22958481e-01], [1.09969676e-02], [1.24608666e-01], [3.17272127e-01], [4.41006720e-02], [2.91064382e-01], [4.62096632e-02], [1.04879737e-02], [2.00243562e-01], [5.21794081e-01], [6.93114579e-01], [5.88613749e-03], [4.81005967e-01], [7.65150547e-01], [5.09533286e-03], [1.59664243e-01], [1.23550266e-01], [2.86775440e-01], [5.56461215e-02], [8.53777230e-02], [2.77194262e-01], [6.35329485e-02], [6.90956712e-01], [2.44409978e-01], [9.04897749e-02], [6.25168681e-02], [1.31995022e-01], [9.83517766e-02], [1.38708949e-02], [1.37583911e-02], [2.74332851e-01], [2.09793657e-01], [3.10898423e-02], [3.66430581e-02], [9.74312425e-03], [6.13564253e-01], [1.26340717e-01], [3.88605118e-01], [7.32149184e-02], [1.60018295e-01], [2.08185375e-01], [1.08176500e-01], [1.04485005e-01], [9.60093975e-01], [8.70822310e-01], [6.30607486e-01], [9.54826295e-01], [8.95919204e-01], [9.06272113e-01], [7.85126090e-01], [9.02044535e-01], [9.63776350e-01], [7.73964763e-01], [9.79753017e-01], [8.90860915e-01], [8.05143356e-01], [7.83462524e-01], [1.85226858e-01], [8.83749664e-01], [2.10658967e-01], [8.01264763e-01], [9.59755778e-01], [6.11195266e-01], [6.47381961e-01], [9.57109571e-01], [5.38529396e-01], [9.59068060e-01], [3.25831592e-01], [9.23216879e-01], [9.34427023e-01], [6.88303590e-01], [8.99788857e-01], [1.24248385e-01], [6.27010107e-01], [8.19576681e-01], [9.36269462e-01], [6.21980727e-02], [4.78437424e-01], [5.50632536e-01], [9.28165913e-01], [8.25904667e-01], [9.24060822e-01], [7.36541748e-01], [8.05680573e-01], [3.77717704e-01], [7.69075036e-01], [4.11606908e-01], [5.13089895e-01], [7.25270867e-01], [8.21778297e-01], [7.23274291e-01], [4.88011450e-01], [2.92951077e-01], [8.90875638e-01], [7.13969827e-01], [6.13960564e-01], [5.52114606e-01], [8.48590374e-01], [5.63460708e-01], [1.02901399e-01], [4.80137795e-01], [8.99400353e-01], [3.31138611e-01], [1.91182554e-01], [9.91883695e-01], [6.65320575e-01], [4.67653006e-01], [7.25050449e-01], [4.17899579e-01], [5.75657487e-01], [5.89693546e-01], [1.64077640e-01], [4.88948613e-01], [4.71977204e-01], [9.63621616e-01], [5.76952755e-01], [3.67517650e-01], [6.12274349e-01], [8.82048070e-01], [5.69039047e-01], [7.51966119e-01], [8.23014379e-02], [3.49387467e-01]], dtype=float32)
yhat_classes = (model.predict(X) > 0.5).astype("int32")
yhat_classes
array([[0], [0], [0], [1], [0], [0], [1], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [1], [1], [0], [0], [0], [0], [0], [0], [1], [0], [0], [0], [0], [1], [0], [0], [0], [0], [0], [0], [0], [0], [0], [1], [0], [0], [0], [0], [0], [1], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [1], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [1], [0], [0], [0], [1], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [1], [1], [0], [0], [1], [0], [0], [0], [0], [0], [0], [0], [0], [1], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [0], [1], [0], [0], [0], [0], [0], [0], [0], [1], [1], [1], [1], [1], [1], [1], [1], [1], [1], [1], [1], [1], [1], [0], [1], [0], [1], [1], [1], [1], [1], [1], [1], [0], [1], [1], [1], [1], [0], [1], [1], [1], [0], [0], [1], [1], [1], [1], [1], [1], [0], [1], [0], [1], [1], [1], [1], [0], [0], [1], [1], [1], [1], [1], [1], [0], [0], [1], [0], [0], [1], [1], [0], [1], [0], [1], [1], [0], [0], [0], [1], [1], [0], [1], [1], [1], [1], [0], [0]])
from sklearn.metrics import confusion_matrix, precision_score, recall_score, f1_score, cohen_kappa_score, accuracy_score, roc_auc_score
precision = precision_score(y, yhat_classes)
print('Precision: %f' % precision)
Precision: 0.783784
accuracy = accuracy_score(y, yhat_classes)
print('Accuracy: %f' % accuracy)
Accuracy: 0.829596
recall = recall_score(y, yhat_classes)
print('Recall: %f' % recall)
Recall: 0.725000
f1 = f1_score(y, yhat_classes)
print('F1 score: %f' % f1)
F1 score: 0.753247
kappa = cohen_kappa_score(y, yhat_classes)
print('Cohens kappa: %f' % kappa)
Cohens kappa: 0.623411
auc = roc_auc_score(y, yhat_probs)
print('ROC AUC: %f' % auc)
ROC AUC: 0.905245
matrix = confusion_matrix(y, yhat_classes)
print(matrix)
[[127 16] [ 22 58]]