import pandas
spam_all = pandas.read_table("spambase.txt",sep=",",header=0,decimal=".")
from sklearn import metrics
spam_all.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 4601 entries, 0 to 4600
Data columns (total 58 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 word_freq_make 4601 non-null float64
1 word_freq_address 4601 non-null float64
2 word_freq_all 4601 non-null float64
3 word_freq_3d 4601 non-null float64
4 word_freq_our 4601 non-null float64
5 word_freq_over 4601 non-null float64
6 word_freq_remove 4601 non-null float64
7 word_freq_internet 4601 non-null float64
8 word_freq_order 4601 non-null float64
9 word_freq_mail 4601 non-null float64
10 word_freq_receive 4601 non-null float64
11 word_freq_will 4601 non-null float64
12 word_freq_people 4601 non-null float64
13 word_freq_report 4601 non-null float64
14 word_freq_addresses 4601 non-null float64
15 word_freq_free 4601 non-null float64
16 word_freq_business 4601 non-null float64
17 word_freq_email 4601 non-null float64
18 word_freq_you 4601 non-null float64
19 word_freq_credit 4601 non-null float64
20 word_freq_your 4601 non-null float64
21 word_freq_font 4601 non-null float64
22 word_freq_000 4601 non-null float64
23 word_freq_money 4601 non-null float64
24 word_freq_hp 4601 non-null float64
25 word_freq_hpl 4601 non-null float64
26 word_freq_george 4601 non-null float64
27 word_freq_650 4601 non-null float64
28 word_freq_lab 4601 non-null float64
29 word_freq_labs 4601 non-null float64
30 word_freq_telnet 4601 non-null float64
31 word_freq_857 4601 non-null float64
32 word_freq_data 4601 non-null float64
33 word_freq_415 4601 non-null float64
34 word_freq_85 4601 non-null float64
35 word_freq_technology 4601 non-null float64
36 word_freq_1999 4601 non-null float64
37 word_freq_parts 4601 non-null float64
38 word_freq_pm 4601 non-null float64
39 word_freq_direct 4601 non-null float64
40 word_freq_cs 4601 non-null float64
41 word_freq_meeting 4601 non-null float64
42 word_freq_original 4601 non-null float64
43 word_freq_project 4601 non-null float64
44 word_freq_re 4601 non-null float64
45 word_freq_edu 4601 non-null float64
46 word_freq_table 4601 non-null float64
47 word_freq_conference 4601 non-null float64
48 char_freq_; 4601 non-null float64
49 char_freq_( 4601 non-null float64
50 char_freq_[ 4601 non-null float64
51 char_freq_! 4601 non-null float64
52 char_freq_$ 4601 non-null float64
53 char_freq_# 4601 non-null float64
54 capital_run_length_average 4601 non-null float64
55 capital_run_length_longest 4601 non-null int64
56 capital_run_length_total 4601 non-null int64
57 spam 4601 non-null int64
dtypes: float64(55), int64(3)
memory usage: 2.0 MB
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(spam_all.iloc[:,0:57],spam_all.loc[:,"spam"], test_size=0.2175)
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler(with_mean=True,with_std=True)
scaler.fit(X_train)
Z_train = scaler.transform(X_train)
print(Z_train.mean(axis=0))
[ 7.10542736e-17 -1.57898386e-17 -4.90965293e-17 -7.40148683e-18
-6.71068139e-17 7.69754630e-17 3.99680289e-17 -6.01987596e-17
-2.81256500e-17 2.46716228e-18 -4.04614613e-17 3.35534070e-17
-2.22044605e-17 2.96059473e-18 3.65140017e-17 0.00000000e+00
4.73695157e-17 1.97372982e-17 -2.31913254e-17 -1.48029737e-17
1.06581410e-16 3.45402719e-17 -1.38161088e-17 4.44089210e-17
1.97372982e-18 6.90805438e-18 -1.18423789e-17 1.77635684e-17
3.25665421e-17 -2.96059473e-18 5.92118946e-18 -9.37521665e-18
-2.91125149e-17 -9.86864911e-18 -1.97372982e-18 -8.38835174e-18
2.17110280e-17 4.93432455e-18 9.37521665e-18 4.44089210e-17
1.18423789e-17 6.90805438e-18 -3.84877315e-17 -9.86864911e-18
-6.80936788e-17 -4.24351912e-17 -2.07241631e-17 -2.17110280e-17
-9.86864911e-18 1.57898386e-17 -1.87504333e-17 3.84877315e-17
-2.56584877e-17 -5.92118946e-18 1.97372982e-18 9.86864911e-18
-2.86190824e-17]
Z_test = scaler.transform(X_test)
print(Z_test.mean(axis=0))
[-0.0378188 0.00358165 0.06467463 -0.03443819 0.00012075 0.01759126
-0.00263605 0.02212304 -0.00288727 -0.02173335 -0.01619425 -0.02070032
0.00399926 -0.01111788 0.00740293 -0.03752886 -0.04083832 -0.01174155
0.00854839 0.00209437 -0.05678322 0.01281356 0.06288376 -0.04528819
-0.00078119 -0.00640172 0.00746968 -0.04533094 0.045163 -0.00939345
0.0515831 0.04010162 0.03448559 0.03836718 0.0050869 -0.00329967
-0.05297989 0.02497115 -0.05907397 0.05117547 -0.00475573 0.05420181
0.0312495 0.06407833 0.01715653 0.05280285 -0.0275301 -0.0271555
-0.0098211 0.00911014 0.01566219 0.02671501 0.02646856 -0.04073214
0.02068994 0.02206846 0.04057584]
def eval_model(y_true,y_pred,poslabel="yes"):
#matrice de confusion
mc = metrics.confusion_matrix(y_true,y_pred)
print("Confusion matrix :")
print(mc)
#taux d'erreur
err = 1 - metrics.accuracy_score(y_true,y_pred)
print("Err-rate = ",err)
#F1-score
f1 = metrics.f1_score(y_true,y_pred)
print("F1-Score = ",f1)
from sklearn.linear_model import Perceptron
ps_sklearn = Perceptron(random_state=100, max_iter = 1500, tol = None)
ps_sklearn.fit(Z_train,Y_train)
preds_sklearn = ps_sklearn.predict(Z_test)
eval_model(Y_test,preds_sklearn)
print(ps_sklearn.intercept_)
coefs = ps_sklearn.coef_[0,:]
print(coefs)
Confusion matrix :
[[536 47]
[ 80 338]]
Err-rate = 0.1268731268731269
F1-Score = 0.8418430884184308
[-127.]
[-1.13796959e+01 -4.25097686e+00 6.59798995e+00 3.81698345e+01
-7.05297569e-01 -2.08216101e+00 1.38055033e+01 2.24235261e+00
5.25409317e+00 -1.66195405e-03 -6.67012069e+00 -4.34682826e-01
-1.65223151e+00 7.56442062e-01 2.48602899e+00 9.99889858e+00
9.99682046e+00 -4.49292163e+00 -1.10954474e+00 1.03085781e+01
-3.28016234e+00 3.45401716e+00 8.09630980e+00 1.44315768e+00
-2.94452445e+01 -7.51547805e+00 -4.10397932e+02 3.73780322e+00
-1.35463792e+01 -5.82628853e+00 -1.01578215e+01 1.19890646e+01
7.86598548e-01 -2.31751454e+00 -1.28862027e+01 5.92507341e+00
8.42491898e+00 -1.84117464e+00 -4.16355123e+00 -1.95568343e+00
-1.65036794e+02 -1.47217462e+01 -1.34100152e+00 -3.20492105e+00
-7.64457453e+00 -7.25034234e+00 -4.79431435e+00 -1.26801157e+01
-9.54395451e+00 -9.91854324e+00 -1.54084209e+00 5.17460391e+00
1.29701951e+01 9.43505542e+00 -1.37494604e-02 1.91517326e+01
2.72050143e+00]
from sklearn.neural_network import MLPClassifier
pmc_sklearn = MLPClassifier(hidden_layer_sizes=(2),activation='logistic',max_iter=1500)
pmc_sklearn.fit(Z_train,Y_train)
preds_sklearn = pmc_sklearn.predict(Z_test)
eval_model(Y_test,preds_sklearn)
print(pmc_sklearn.coefs_)
Confusion matrix :
[[552 31]
[ 43 375]]
Err-rate = 0.07392607392607398
F1-Score = 0.9101941747572816
[array([[-0.06243585, -0.06972204],
[ 0.13250031, 0.87436484],
[ 0.21657114, 0.10636347],
[-1.36903989, -1.07360363],
[-1.19238625, 0.0240723 ],
[-0.3361549 , 0.02593657],
[-1.62835831, -1.67187932],
[-0.65107769, 0.04250276],
[-0.09706641, -0.10477043],
[ 0.07185751, -0.35673139],
[ 0.2916899 , -1.4292945 ],
[ 0.25042177, 0.44882076],
[ 0.32627815, 0.15686032],
[-0.03986608, 0.38748051],
[ 0.04443746, -1.55455247],
[-1.13080742, -0.74412041],
[-0.87798125, -0.09927781],
[-0.0551511 , 0.16728481],
[ 0.48342836, -0.34262385],
[-0.77003348, -1.17587602],
[ 0.13943419, -1.05115761],
[-0.32932729, -0.12943647],
[-1.21986323, -1.97027382],
[-1.08311562, -0.6709295 ],
[ 2.84898644, 3.22994563],
[ 1.07815561, 1.04307161],
[ 2.79581038, 3.53266315],
[-0.9735323 , 0.47792815],
[ 0.62755065, 0.89005886],
[-0.2101394 , 0.68451678],
[ 1.68139129, 0.81357101],
[ 0.54515367, 0.73424091],
[ 0.75527872, -0.06756066],
[ 0.41776336, 1.06760761],
[ 1.33988984, 1.18188638],
[-0.40609229, -0.59174301],
[ 2.09832975, -0.31277529],
[ 0.16210801, 0.36809088],
[ 0.26527651, 0.19144769],
[ 0.18367503, 0.20975213],
[ 1.81743679, 1.60414177],
[ 1.07415694, 1.60767341],
[-0.12100138, 0.71066534],
[ 0.88311085, 1.65076174],
[ 0.87540491, 0.92428849],
[ 0.12507529, 3.58442148],
[ 0.36745558, 0.44790636],
[ 1.09135617, 0.39265451],
[ 0.39152106, 0.17010817],
[ 0.04839576, 0.08776854],
[ 0.25712437, 0.57072881],
[-0.84029151, -2.47133243],
[-1.65533065, -2.84921178],
[-0.33349119, -0.19547485],
[-1.28334094, -3.38925692],
[-1.86988566, -1.06203154],
[-1.27045417, 0.01986002]]), array([[-4.15219865],
[-4.07585643]])]
print(pmc_sklearn.intercepts_)
[array([1.07143955, 1.17587178]), array([3.26533536])]
import numpy
Z1 = numpy.zeros(Z_test.shape[0])
for i in range(Z1.shape[0]):
Z1[i] = numpy.sum(Z_test[i,:]*pmc_sklearn.coefs_[0][:,0]) + pmc_sklearn.intercepts_[0][0]
Z2 = numpy.zeros(Z_test.shape[0])
for i in range(Z2.shape[0]):
Z2[i] = numpy.sum(Z_test[i,:]*pmc_sklearn.coefs_[0][:,1]) + pmc_sklearn.intercepts_[0][1]
Z1 = 1.0/(1.0+numpy.exp(-Z1))
Z2 = 1.0/(1.0+numpy.exp(-Z2))
import matplotlib.pyplot as plt
plt.scatter(Z1,Z2)
horiz = numpy.linspace(0,1)
vertic = -pmc_sklearn.coefs_[1][0]/pmc_sklearn.coefs_[1][1] * horiz - pmc_sklearn.intercepts_[1]/pmc_sklearn.coefs_[1][1]
plt.scatter(Z1,Z2, c=pandas.Series(['red', 'blue'])[Y_test], s=5)
plt.plot(horiz, vertic, c="black", linewidth="3", linestyle="--")
plt.show()
import keras
print(keras.__version__)
2.4.3
from keras.models import Sequential
from keras.layers import Dense
#instanciation de la structure
pmc_keras = Sequential()
#architeture du réseau
#entree vers couche cachée
pmc_keras.add(Dense(units=2,input_dim=57,activation="sigmoid"))
#couche cachée vers sortie
pmc_keras.add(Dense(units=1,activation="sigmoid"))
pmc_keras.compile(loss="binary_crossentropy",optimizer="adam",metrics=["accuracy"])
pmc_keras.fit(Z_train,Y_train.astype("float"),epochs=150,batch_size=15)
Epoch 1/150
240/240 [==============================] - 0s 850us/step - loss: 0.7042 - accuracy: 0.5243
Epoch 2/150
240/240 [==============================] - 0s 1ms/step - loss: 0.5477 - accuracy: 0.7971
Epoch 3/150
240/240 [==============================] - 0s 1ms/step - loss: 0.4713 - accuracy: 0.8570
Epoch 4/150
240/240 [==============================] - 0s 889us/step - loss: 0.4112 - accuracy: 0.8963
Epoch 5/150
240/240 [==============================] - 0s 856us/step - loss: 0.3729 - accuracy: 0.9010
Epoch 6/150
240/240 [==============================] - 0s 1ms/step - loss: 0.3415 - accuracy: 0.9101
Epoch 7/150
240/240 [==============================] - 0s 931us/step - loss: 0.3118 - accuracy: 0.9159
Epoch 8/150
240/240 [==============================] - 0s 1ms/step - loss: 0.3051 - accuracy: 0.9155
Epoch 9/150
240/240 [==============================] - 0s 892us/step - loss: 0.2894 - accuracy: 0.9127
Epoch 10/150
240/240 [==============================] - 0s 867us/step - loss: 0.2744 - accuracy: 0.9209
Epoch 11/150
240/240 [==============================] - 0s 954us/step - loss: 0.2497 - accuracy: 0.9310
Epoch 12/150
240/240 [==============================] - 0s 849us/step - loss: 0.2360 - accuracy: 0.9323
Epoch 13/150
240/240 [==============================] - 0s 1ms/step - loss: 0.2374 - accuracy: 0.9297
Epoch 14/150
240/240 [==============================] - 0s 885us/step - loss: 0.2251 - accuracy: 0.9326
Epoch 15/150
240/240 [==============================] - 0s 929us/step - loss: 0.2320 - accuracy: 0.9232
Epoch 16/150
240/240 [==============================] - 0s 880us/step - loss: 0.2288 - accuracy: 0.9265
Epoch 17/150
240/240 [==============================] - 0s 878us/step - loss: 0.2213 - accuracy: 0.9326
Epoch 18/150
240/240 [==============================] - 0s 1ms/step - loss: 0.2128 - accuracy: 0.9331
Epoch 19/150
240/240 [==============================] - 0s 934us/step - loss: 0.1965 - accuracy: 0.9401
Epoch 20/150
240/240 [==============================] - 0s 872us/step - loss: 0.2020 - accuracy: 0.9367
Epoch 21/150
240/240 [==============================] - 0s 858us/step - loss: 0.2085 - accuracy: 0.9345
Epoch 22/150
240/240 [==============================] - 0s 878us/step - loss: 0.2093 - accuracy: 0.9318
Epoch 23/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1906 - accuracy: 0.9404
Epoch 24/150
240/240 [==============================] - 0s 896us/step - loss: 0.2030 - accuracy: 0.9368
Epoch 25/150
240/240 [==============================] - 0s 899us/step - loss: 0.2034 - accuracy: 0.9309
Epoch 26/150
240/240 [==============================] - 0s 883us/step - loss: 0.2018 - accuracy: 0.9330
Epoch 27/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1964 - accuracy: 0.9343
Epoch 28/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1950 - accuracy: 0.9359
Epoch 29/150
240/240 [==============================] - 0s 966us/step - loss: 0.1982 - accuracy: 0.9335
Epoch 30/150
240/240 [==============================] - 0s 891us/step - loss: 0.2025 - accuracy: 0.9326
Epoch 31/150
240/240 [==============================] - 0s 869us/step - loss: 0.1915 - accuracy: 0.9376
Epoch 32/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1939 - accuracy: 0.9373
Epoch 33/150
240/240 [==============================] - 0s 963us/step - loss: 0.2066 - accuracy: 0.9323
Epoch 34/150
240/240 [==============================] - 0s 876us/step - loss: 0.1971 - accuracy: 0.9340
Epoch 35/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1862 - accuracy: 0.9383
Epoch 36/150
240/240 [==============================] - 0s 895us/step - loss: 0.1943 - accuracy: 0.9335
Epoch 37/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1979 - accuracy: 0.9342
Epoch 38/150
240/240 [==============================] - 0s 869us/step - loss: 0.1933 - accuracy: 0.9345
Epoch 39/150
240/240 [==============================] - 0s 896us/step - loss: 0.1906 - accuracy: 0.9377
Epoch 40/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1953 - accuracy: 0.9333
Epoch 41/150
240/240 [==============================] - 0s 878us/step - loss: 0.1896 - accuracy: 0.9336
Epoch 42/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1845 - accuracy: 0.9372
Epoch 43/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1820 - accuracy: 0.9349
Epoch 44/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1789 - accuracy: 0.9408
Epoch 45/150
240/240 [==============================] - 0s 881us/step - loss: 0.1760 - accuracy: 0.9446
Epoch 46/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1799 - accuracy: 0.9389
Epoch 47/150
240/240 [==============================] - 0s 920us/step - loss: 0.1860 - accuracy: 0.9387
Epoch 48/150
240/240 [==============================] - 0s 937us/step - loss: 0.1752 - accuracy: 0.9407
Epoch 49/150
240/240 [==============================] - 0s 857us/step - loss: 0.1661 - accuracy: 0.9443
Epoch 50/150
240/240 [==============================] - 0s 882us/step - loss: 0.1619 - accuracy: 0.9472
Epoch 51/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1790 - accuracy: 0.9409
Epoch 52/150
240/240 [==============================] - 0s 887us/step - loss: 0.1832 - accuracy: 0.9358
Epoch 53/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1711 - accuracy: 0.9413
Epoch 54/150
240/240 [==============================] - 0s 912us/step - loss: 0.1720 - accuracy: 0.9405
Epoch 55/150
240/240 [==============================] - 0s 937us/step - loss: 0.1783 - accuracy: 0.9407
Epoch 56/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1732 - accuracy: 0.9423
Epoch 57/150
240/240 [==============================] - 0s 920us/step - loss: 0.1815 - accuracy: 0.9406
Epoch 58/150
240/240 [==============================] - 0s 887us/step - loss: 0.1692 - accuracy: 0.9459
Epoch 59/150
240/240 [==============================] - 0s 878us/step - loss: 0.1654 - accuracy: 0.9466
Epoch 60/150
240/240 [==============================] - 0s 904us/step - loss: 0.1819 - accuracy: 0.9407
Epoch 61/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1901 - accuracy: 0.9362
Epoch 62/150
240/240 [==============================] - 0s 884us/step - loss: 0.1881 - accuracy: 0.9329
Epoch 63/150
240/240 [==============================] - 0s 874us/step - loss: 0.1823 - accuracy: 0.9358
Epoch 64/150
240/240 [==============================] - 0s 922us/step - loss: 0.1843 - accuracy: 0.9344
Epoch 65/150
240/240 [==============================] - 0s 907us/step - loss: 0.1579 - accuracy: 0.9447
Epoch 66/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1631 - accuracy: 0.9454
Epoch 67/150
240/240 [==============================] - 0s 904us/step - loss: 0.1635 - accuracy: 0.9463
Epoch 68/150
240/240 [==============================] - 0s 889us/step - loss: 0.1778 - accuracy: 0.9400
Epoch 69/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1643 - accuracy: 0.9459
Epoch 70/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1791 - accuracy: 0.9367
Epoch 71/150
240/240 [==============================] - 0s 928us/step - loss: 0.1584 - accuracy: 0.9460
Epoch 72/150
240/240 [==============================] - 0s 904us/step - loss: 0.1624 - accuracy: 0.9470
Epoch 73/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1723 - accuracy: 0.9444
Epoch 74/150
240/240 [==============================] - 0s 951us/step - loss: 0.1832 - accuracy: 0.9392
Epoch 75/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1644 - accuracy: 0.9477
Epoch 76/150
240/240 [==============================] - 0s 886us/step - loss: 0.1730 - accuracy: 0.9416
Epoch 77/150
240/240 [==============================] - 0s 892us/step - loss: 0.1831 - accuracy: 0.9394
Epoch 78/150
240/240 [==============================] - 0s 950us/step - loss: 0.1796 - accuracy: 0.9404
Epoch 79/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1686 - accuracy: 0.9447
Epoch 80/150
240/240 [==============================] - 0s 2ms/step - loss: 0.1744 - accuracy: 0.9428
Epoch 81/150
240/240 [==============================] - 0s 867us/step - loss: 0.1702 - accuracy: 0.9401
Epoch 82/150
240/240 [==============================] - 0s 937us/step - loss: 0.1618 - accuracy: 0.9456
Epoch 83/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1523 - accuracy: 0.9482
Epoch 84/150
240/240 [==============================] - 0s 2ms/step - loss: 0.1596 - accuracy: 0.9502
Epoch 85/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1398 - accuracy: 0.9547
Epoch 86/150
240/240 [==============================] - 0s 910us/step - loss: 0.1803 - accuracy: 0.9383
Epoch 87/150
240/240 [==============================] - 0s 879us/step - loss: 0.1738 - accuracy: 0.9432
Epoch 88/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1784 - accuracy: 0.9405
Epoch 89/150
240/240 [==============================] - 0s 964us/step - loss: 0.1651 - accuracy: 0.9449
Epoch 90/150
240/240 [==============================] - 0s 880us/step - loss: 0.1813 - accuracy: 0.9389
Epoch 91/150
240/240 [==============================] - 0s 873us/step - loss: 0.1581 - accuracy: 0.9512
Epoch 92/150
240/240 [==============================] - 0s 883us/step - loss: 0.1510 - accuracy: 0.9503
Epoch 93/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1733 - accuracy: 0.9403
Epoch 94/150
240/240 [==============================] - 0s 919us/step - loss: 0.1701 - accuracy: 0.9416
Epoch 95/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1494 - accuracy: 0.9513
Epoch 96/150
240/240 [==============================] - 0s 980us/step - loss: 0.1553 - accuracy: 0.9489
Epoch 97/150
240/240 [==============================] - 0s 901us/step - loss: 0.1665 - accuracy: 0.9408
Epoch 98/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1628 - accuracy: 0.9435
Epoch 99/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1711 - accuracy: 0.9428
Epoch 100/150
240/240 [==============================] - 0s 886us/step - loss: 0.1646 - accuracy: 0.9477
Epoch 101/150
240/240 [==============================] - 0s 880us/step - loss: 0.1584 - accuracy: 0.9482
Epoch 102/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1625 - accuracy: 0.9455
Epoch 103/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1548 - accuracy: 0.9479
Epoch 104/150
240/240 [==============================] - 0s 963us/step - loss: 0.1639 - accuracy: 0.9412
Epoch 105/150
240/240 [==============================] - 0s 933us/step - loss: 0.1448 - accuracy: 0.9536
Epoch 106/150
240/240 [==============================] - 0s 932us/step - loss: 0.1624 - accuracy: 0.9469
Epoch 107/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1482 - accuracy: 0.9488
Epoch 108/150
240/240 [==============================] - 0s 884us/step - loss: 0.1582 - accuracy: 0.9467
Epoch 109/150
240/240 [==============================] - 0s 878us/step - loss: 0.1650 - accuracy: 0.9476
Epoch 110/150
240/240 [==============================] - 0s 987us/step - loss: 0.1530 - accuracy: 0.9510
Epoch 111/150
240/240 [==============================] - 0s 930us/step - loss: 0.1627 - accuracy: 0.9416
Epoch 112/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1590 - accuracy: 0.9478
Epoch 113/150
240/240 [==============================] - 0s 955us/step - loss: 0.1499 - accuracy: 0.9484
Epoch 114/150
240/240 [==============================] - 0s 924us/step - loss: 0.1687 - accuracy: 0.9421
Epoch 115/150
240/240 [==============================] - 0s 980us/step - loss: 0.1625 - accuracy: 0.9435
Epoch 116/150
240/240 [==============================] - 0s 896us/step - loss: 0.1589 - accuracy: 0.9434
Epoch 117/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1519 - accuracy: 0.9487
Epoch 118/150
240/240 [==============================] - 0s 857us/step - loss: 0.1460 - accuracy: 0.9503
Epoch 119/150
240/240 [==============================] - 0s 938us/step - loss: 0.1438 - accuracy: 0.9533
Epoch 120/150
240/240 [==============================] - 0s 901us/step - loss: 0.1466 - accuracy: 0.9509
Epoch 121/150
240/240 [==============================] - 0s 962us/step - loss: 0.1636 - accuracy: 0.9452
Epoch 122/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1479 - accuracy: 0.9542
Epoch 123/150
240/240 [==============================] - 0s 895us/step - loss: 0.1607 - accuracy: 0.9464
Epoch 124/150
240/240 [==============================] - 0s 882us/step - loss: 0.1587 - accuracy: 0.9448
Epoch 125/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1461 - accuracy: 0.9532
Epoch 126/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1596 - accuracy: 0.9482
Epoch 127/150
240/240 [==============================] - 0s 910us/step - loss: 0.1526 - accuracy: 0.9505
Epoch 128/150
240/240 [==============================] - 0s 935us/step - loss: 0.1470 - accuracy: 0.9515
Epoch 129/150
240/240 [==============================] - 0s 990us/step - loss: 0.1539 - accuracy: 0.9512
Epoch 130/150
240/240 [==============================] - 0s 936us/step - loss: 0.1437 - accuracy: 0.9541
Epoch 131/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1611 - accuracy: 0.9472
Epoch 132/150
240/240 [==============================] - 0s 876us/step - loss: 0.1657 - accuracy: 0.9460
Epoch 133/150
240/240 [==============================] - 0s 871us/step - loss: 0.1533 - accuracy: 0.9491
Epoch 134/150
240/240 [==============================] - 0s 880us/step - loss: 0.1637 - accuracy: 0.9439
Epoch 135/150
240/240 [==============================] - 0s 917us/step - loss: 0.1500 - accuracy: 0.9498
Epoch 136/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1578 - accuracy: 0.9464
Epoch 137/150
240/240 [==============================] - 0s 899us/step - loss: 0.1505 - accuracy: 0.9531
Epoch 138/150
240/240 [==============================] - 0s 921us/step - loss: 0.1580 - accuracy: 0.9486
Epoch 139/150
240/240 [==============================] - 0s 885us/step - loss: 0.1636 - accuracy: 0.9452
Epoch 140/150
240/240 [==============================] - 0s 922us/step - loss: 0.1508 - accuracy: 0.9497
Epoch 141/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1530 - accuracy: 0.9508
Epoch 142/150
240/240 [==============================] - 0s 887us/step - loss: 0.1566 - accuracy: 0.9505
Epoch 143/150
240/240 [==============================] - 0s 873us/step - loss: 0.1547 - accuracy: 0.9506
Epoch 144/150
240/240 [==============================] - 0s 923us/step - loss: 0.1568 - accuracy: 0.9527
Epoch 145/150
240/240 [==============================] - 0s 891us/step - loss: 0.1627 - accuracy: 0.9458
Epoch 146/150
240/240 [==============================] - 0s 1ms/step - loss: 0.1559 - accuracy: 0.9487
Epoch 147/150
240/240 [==============================] - 0s 868us/step - loss: 0.1498 - accuracy: 0.9490
Epoch 148/150
240/240 [==============================] - 0s 913us/step - loss: 0.1537 - accuracy: 0.9494
Epoch 149/150
240/240 [==============================] - 0s 906us/step - loss: 0.1645 - accuracy: 0.9461
Epoch 150/150
240/240 [==============================] - 0s 985us/step - loss: 0.1597 - accuracy: 0.9468
proba_predm_keras = pmc_keras.predict(Z_test)
#dimension
print(proba_predm_keras.shape) #(1000,1)
#10 premières valeurs
print(proba_predm_keras[:10,:])
predm_keras = numpy.repeat(1,Z_test.shape[0])
predm_keras[proba_predm_keras[:,0] < 0.5] = 0
eval_model(Y_test,predm_keras)
(1001, 1)
[[0.771462 ]
[0.00215307]
[0.9225127 ]
[0.00215563]
[0.70727444]
[0.00215653]
[0.9694468 ]
[0.23150545]
[0.00215301]
[0.01367447]]
Confusion matrix :
[[556 27]
[ 41 377]]
Err-rate = 0.06793206793206796
F1-Score = 0.9172749391727495
print(pmc_keras.get_weights())
[array([[-0.21281983, -0.10157181],
[ 0.08139528, -0.74373454],
[-0.05763601, -0.22512633],
[-1.2909224 , 1.3671893 ],
[ 0.04205126, 0.8585134 ],
[-0.36352643, -0.06510309],
[-1.1543103 , 1.2757866 ],
[-0.4628375 , -0.11004866],
[-0.37497628, -0.13042518],
[-0.8186402 , -0.25072148],
[-1.4916872 , -0.37151438],
[ 0.49916327, -0.04988626],
[ 0.1420159 , -0.20927243],
[ 0.3338182 , 0.22187263],
[-1.2657392 , 0.05437269],
[-0.9314528 , 0.399455 ],
[-0.39114612, 0.34221852],
[-0.26668847, -0.24921793],
[ 0.07881838, 0.02725873],
[-0.9276161 , 0.86684674],
[-0.83217156, -0.1329859 ],
[-0.24029939, 0.03876249],
[-1.203158 , 1.4351958 ],
[-0.0646081 , 1.5157334 ],
[ 2.5468788 , -3.486414 ],
[ 0.16601779, -2.224186 ],
[ 3.3256993 , -3.3297658 ],
[ 0.6505888 , 1.0545496 ],
[ 0.670919 , -0.58734 ],
[-0.11943289, 0.0223447 ],
[ 0.2683273 , -0.93258554],
[ 0.2979921 , -0.6718159 ],
[ 0.04215759, -0.11906405],
[ 0.9371995 , -0.03606919],
[ 0.6748611 , -1.4049692 ],
[-0.40292063, 0.43167916],
[ 1.0439144 , 0.20460351],
[ 0.51698035, -0.0682429 ],
[ 0.11260268, -0.26121467],
[ 0.23744704, -0.37326622],
[ 1.057007 , -2.2303345 ],
[ 1.1900942 , -1.2668478 ],
[ 0.36424193, -0.04130829],
[ 1.1576945 , -0.85325146],
[ 0.47151244, -0.9230963 ],
[ 3.093585 , -0.18473561],
[ 0.29042947, -0.2738163 ],
[ 0.28310764, -0.40340564],
[ 0.23464772, -0.12674215],
[ 0.8780015 , 0.4846296 ],
[ 0.02900665, -0.3931549 ],
[-1.7615016 , 0.39485896],
[-2.1185894 , 2.3174436 ],
[-0.19490592, 0.12009343],
[-0.52110606, 4.430523 ],
[-0.35501477, 2.9799716 ],
[-0.0560343 , 0.17795672]], dtype=float32), array([ 0.8921996, -0.6820642], dtype=float32), array([[-4.9392695],
[ 4.8738103]], dtype=float32), array([-1.1994672], dtype=float32)]