import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings('ignore')
df = pd.read_csv('./inventario.csv')
df.head()
df.dtypes
df.drop('Order', axis=1, inplace=True)
df.head()
df.describe()
# Tipo de datos de la base
df_train.dtypes
# Categoricas
df_train.loc[df_train['MarketingType']=='S','MarketingType'] = 0
df_train.loc[df_train['MarketingType']=='D','MarketingType'] = 1
df_test.loc[df_test['MarketingType']=='S','MarketingType'] = 0
df_test.loc[df_test['MarketingType']=='D','MarketingType'] = 1
# Tipos a entero
df_train['MarketingType'] = df_train['MarketingType'].astype('int')
df_test['MarketingType'] = df_test['MarketingType'].astype('int')
df_train.hist(figsize=(20,15), bins=100)
plt.show()
# Correlaciones
corr = df_train.corr()
plt.figure(figsize=(10, 8))
sns.heatmap(corr, vmin=-1,vmax=1)
plt.show()
plt.figure(figsize=(10, 8))
sns.heatmap(np.abs(corr), vmin=0,vmax=1)
plt.show()
#Se separa entrada y salida
X = df_train.drop(columns=['SoldFlag','SoldCount']) # sacamos los posibles target
y = df_train[['SoldFlag']]
y.hist()
plt.show()
#!pip install imblearn
from imblearn.over_sampling import RandomOverSampler
ros = RandomOverSampler(random_state=0)
X_resampled, y_resampled = ros.fit_resample(X, y)
X = pd.DataFrame(columns=X.columns, data=X_resampled)
y = pd.DataFrame(columns=y.columns, data=y_resampled)
y.hist()
plt.plot()
from sklearn.model_selection import train_test_split
X_train, X_val, y_train, y_val = train_test_split(X, y, test_size=0.2, random_state=42)
from sklearn import preprocessing
scaler = preprocessing.RobustScaler().fit(X_train.values)
normalized_train_X = scaler.transform(X_train.values)
normalized_val_X = scaler.transform(X_val.values)
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout
def clasificador(n_var, drop_out):
model = Sequential()
model.add(Dense(10, input_dim=n_var, activation='relu'))
model.add(Dropout(drop_out))
model.add(Dense(10, input_dim=n_var, activation='relu'))
model.add(Dropout(drop_out))
model.add(Dense(1, activation='sigmoid'))
return model
n_epochs = 100
n_batch = 100
n_var = normalized_train_X.shape[1]
#Hay que fundamentar los hiperparámetros??
2021-10-03 01:27:58.358296: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcudart.so.11.0'; dlerror: libcudart.so.11.0: cannot open shared object file: No such file or directory
2021-10-03 01:27:58.358350: I tensorflow/stream_executor/cuda/cudart_stub.cc:29] Ignore above cudart dlerror if you do not have a GPU set up on your machine.
modelo_cd = clasificador(n_var, 0.1)
modelo_cd.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense (Dense) (None, 10) 100
_________________________________________________________________
dropout (Dropout) (None, 10) 0
_________________________________________________________________
dense_1 (Dense) (None, 10) 110
_________________________________________________________________
dropout_1 (Dropout) (None, 10) 0
_________________________________________________________________
dense_2 (Dense) (None, 1) 11
=================================================================
Total params: 221
Trainable params: 221
Non-trainable params: 0
_________________________________________________________________
2021-10-03 01:28:03.374970: I tensorflow/compiler/jit/xla_cpu_device.cc:41] Not creating XLA devices, tf_xla_enable_xla_devices not set
2021-10-03 01:28:03.375242: W tensorflow/stream_executor/platform/default/dso_loader.cc:60] Could not load dynamic library 'libcuda.so.1'; dlerror: libcuda.so.1: cannot open shared object file: No such file or directory
2021-10-03 01:28:03.375265: W tensorflow/stream_executor/cuda/cuda_driver.cc:326] failed call to cuInit: UNKNOWN ERROR (303)
2021-10-03 01:28:03.375290: I tensorflow/stream_executor/cuda/cuda_diagnostics.cc:156] kernel driver does not appear to be running on this host (p-baae438c-c908-4602-8c0a-12c5c956e45b): /proc/driver/nvidia/version does not exist
2021-10-03 01:28:03.375556: I tensorflow/core/platform/cpu_feature_guard.cc:142] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations: AVX2 FMA
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
2021-10-03 01:28:03.375818: I tensorflow/compiler/jit/xla_gpu_device.cc:99] Not creating XLA devices, tf_xla_enable_xla_devices not set
modelo_cd.compile(loss='binary_crossentropy', optimizer = 'adam', metrics=['acc', 'Precision', 'Recall'])
history_cd = modelo_cd.fit(normalized_train_X, y_train, validation_split=0.2, epochs = n_epochs, verbose=1, batch_size=n_batch)
2021-10-03 01:28:03.501052: I tensorflow/compiler/mlir/mlir_graph_optimization_pass.cc:116] None of the MLIR optimization passes are enabled (registered 2)
2021-10-03 01:28:03.520156: I tensorflow/core/platform/profile_utils/cpu_utils.cc:112] CPU Frequency: 2199870000 Hz
Epoch 1/100
807/807 [==============================] - 4s 3ms/step - loss: 0.6591 - acc: 0.6145 - precision: 0.6248 - recall: 0.5789 - val_loss: 0.5955 - val_acc: 0.6893 - val_precision: 0.6900 - val_recall: 0.6913
Epoch 2/100
807/807 [==============================] - 2s 2ms/step - loss: 0.6077 - acc: 0.6828 - precision: 0.6803 - recall: 0.6931 - val_loss: 0.5898 - val_acc: 0.6910 - val_precision: 0.6886 - val_recall: 0.7010
Epoch 3/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5994 - acc: 0.6870 - precision: 0.6867 - recall: 0.6879 - val_loss: 0.5889 - val_acc: 0.6940 - val_precision: 0.6887 - val_recall: 0.7118
Epoch 4/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5968 - acc: 0.6896 - precision: 0.6884 - recall: 0.6913 - val_loss: 0.5869 - val_acc: 0.6943 - val_precision: 0.6937 - val_recall: 0.6996
Epoch 5/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5955 - acc: 0.6921 - precision: 0.6937 - recall: 0.6939 - val_loss: 0.5865 - val_acc: 0.6950 - val_precision: 0.7059 - val_recall: 0.6722
Epoch 6/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5915 - acc: 0.6939 - precision: 0.7002 - recall: 0.6791 - val_loss: 0.5873 - val_acc: 0.6951 - val_precision: 0.6926 - val_recall: 0.7052
Epoch 7/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5896 - acc: 0.6953 - precision: 0.6991 - recall: 0.6911 - val_loss: 0.5855 - val_acc: 0.6971 - val_precision: 0.7010 - val_recall: 0.6909
Epoch 8/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5912 - acc: 0.6943 - precision: 0.6994 - recall: 0.6855 - val_loss: 0.5851 - val_acc: 0.6961 - val_precision: 0.6974 - val_recall: 0.6965
Epoch 9/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5913 - acc: 0.6935 - precision: 0.6973 - recall: 0.6908 - val_loss: 0.5844 - val_acc: 0.6967 - val_precision: 0.7042 - val_recall: 0.6820
Epoch 10/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5897 - acc: 0.6951 - precision: 0.7020 - recall: 0.6827 - val_loss: 0.5840 - val_acc: 0.6964 - val_precision: 0.6998 - val_recall: 0.6916
Epoch 11/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5881 - acc: 0.6953 - precision: 0.6993 - recall: 0.6868 - val_loss: 0.5840 - val_acc: 0.6965 - val_precision: 0.7033 - val_recall: 0.6832
Epoch 12/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5921 - acc: 0.6924 - precision: 0.7023 - recall: 0.6794 - val_loss: 0.5837 - val_acc: 0.6976 - val_precision: 0.7002 - val_recall: 0.6947
Epoch 13/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5894 - acc: 0.6928 - precision: 0.6970 - recall: 0.6835 - val_loss: 0.5825 - val_acc: 0.6983 - val_precision: 0.7061 - val_recall: 0.6828
Epoch 14/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5896 - acc: 0.6957 - precision: 0.7020 - recall: 0.6876 - val_loss: 0.5827 - val_acc: 0.6974 - val_precision: 0.7043 - val_recall: 0.6840
Epoch 15/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5893 - acc: 0.6966 - precision: 0.7030 - recall: 0.6862 - val_loss: 0.5822 - val_acc: 0.6988 - val_precision: 0.7058 - val_recall: 0.6852
Epoch 16/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5880 - acc: 0.6955 - precision: 0.6991 - recall: 0.6842 - val_loss: 0.5831 - val_acc: 0.6976 - val_precision: 0.6962 - val_recall: 0.7046
Epoch 17/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5880 - acc: 0.6949 - precision: 0.6973 - recall: 0.6884 - val_loss: 0.5819 - val_acc: 0.7005 - val_precision: 0.7067 - val_recall: 0.6891
Epoch 18/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5879 - acc: 0.6965 - precision: 0.7017 - recall: 0.6876 - val_loss: 0.5814 - val_acc: 0.7002 - val_precision: 0.7114 - val_recall: 0.6773
Epoch 19/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5874 - acc: 0.6971 - precision: 0.7030 - recall: 0.6845 - val_loss: 0.5820 - val_acc: 0.6995 - val_precision: 0.7066 - val_recall: 0.6856
Epoch 20/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5859 - acc: 0.6984 - precision: 0.7031 - recall: 0.6883 - val_loss: 0.5839 - val_acc: 0.7010 - val_precision: 0.6974 - val_recall: 0.7137
Epoch 21/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5875 - acc: 0.6971 - precision: 0.7023 - recall: 0.6860 - val_loss: 0.5822 - val_acc: 0.6993 - val_precision: 0.6986 - val_recall: 0.7044
Epoch 22/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5866 - acc: 0.6983 - precision: 0.7038 - recall: 0.6925 - val_loss: 0.5811 - val_acc: 0.7002 - val_precision: 0.7086 - val_recall: 0.6837
Epoch 23/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5895 - acc: 0.6945 - precision: 0.7002 - recall: 0.6861 - val_loss: 0.5820 - val_acc: 0.6996 - val_precision: 0.6992 - val_recall: 0.7040
Epoch 24/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5850 - acc: 0.6997 - precision: 0.7030 - recall: 0.6904 - val_loss: 0.5807 - val_acc: 0.6997 - val_precision: 0.7054 - val_recall: 0.6892
Epoch 25/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5853 - acc: 0.6986 - precision: 0.7014 - recall: 0.6876 - val_loss: 0.5831 - val_acc: 0.6997 - val_precision: 0.6963 - val_recall: 0.7119
Epoch 26/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5865 - acc: 0.6975 - precision: 0.7014 - recall: 0.6880 - val_loss: 0.5814 - val_acc: 0.6999 - val_precision: 0.7019 - val_recall: 0.6985
Epoch 27/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5853 - acc: 0.6975 - precision: 0.7027 - recall: 0.6848 - val_loss: 0.5823 - val_acc: 0.6998 - val_precision: 0.7003 - val_recall: 0.7020
Epoch 28/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5856 - acc: 0.6953 - precision: 0.6999 - recall: 0.6863 - val_loss: 0.5812 - val_acc: 0.7001 - val_precision: 0.7042 - val_recall: 0.6938
Epoch 29/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5862 - acc: 0.6977 - precision: 0.7063 - recall: 0.6874 - val_loss: 0.5803 - val_acc: 0.7014 - val_precision: 0.7119 - val_recall: 0.6799
Epoch 30/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5890 - acc: 0.6959 - precision: 0.7022 - recall: 0.6769 - val_loss: 0.5808 - val_acc: 0.7000 - val_precision: 0.7061 - val_recall: 0.6886
Epoch 31/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5843 - acc: 0.6994 - precision: 0.7052 - recall: 0.6846 - val_loss: 0.5810 - val_acc: 0.6998 - val_precision: 0.7033 - val_recall: 0.6946
Epoch 32/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5869 - acc: 0.6968 - precision: 0.7034 - recall: 0.6884 - val_loss: 0.5806 - val_acc: 0.7006 - val_precision: 0.7080 - val_recall: 0.6865
Epoch 33/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5856 - acc: 0.6970 - precision: 0.7039 - recall: 0.6896 - val_loss: 0.5807 - val_acc: 0.7002 - val_precision: 0.7050 - val_recall: 0.6920
Epoch 34/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5845 - acc: 0.7002 - precision: 0.7039 - recall: 0.6931 - val_loss: 0.5822 - val_acc: 0.6993 - val_precision: 0.6984 - val_recall: 0.7050
Epoch 35/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5863 - acc: 0.6997 - precision: 0.7043 - recall: 0.6934 - val_loss: 0.5807 - val_acc: 0.6996 - val_precision: 0.7033 - val_recall: 0.6940
Epoch 36/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5846 - acc: 0.7006 - precision: 0.7068 - recall: 0.6883 - val_loss: 0.5818 - val_acc: 0.7003 - val_precision: 0.7003 - val_recall: 0.7038
Epoch 37/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5849 - acc: 0.6970 - precision: 0.7016 - recall: 0.6901 - val_loss: 0.5813 - val_acc: 0.7010 - val_precision: 0.7069 - val_recall: 0.6901
Epoch 38/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5861 - acc: 0.6980 - precision: 0.7055 - recall: 0.6824 - val_loss: 0.5821 - val_acc: 0.7017 - val_precision: 0.6982 - val_recall: 0.7143
Epoch 39/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5872 - acc: 0.6966 - precision: 0.6995 - recall: 0.6881 - val_loss: 0.5805 - val_acc: 0.7014 - val_precision: 0.7040 - val_recall: 0.6985
Epoch 40/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5838 - acc: 0.6998 - precision: 0.7031 - recall: 0.6901 - val_loss: 0.5824 - val_acc: 0.7008 - val_precision: 0.6995 - val_recall: 0.7076
Epoch 41/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5862 - acc: 0.6973 - precision: 0.7050 - recall: 0.6882 - val_loss: 0.5810 - val_acc: 0.6996 - val_precision: 0.6995 - val_recall: 0.7035
Epoch 42/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5852 - acc: 0.7002 - precision: 0.7046 - recall: 0.6944 - val_loss: 0.5810 - val_acc: 0.7009 - val_precision: 0.7043 - val_recall: 0.6961
Epoch 43/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5867 - acc: 0.6970 - precision: 0.7022 - recall: 0.6842 - val_loss: 0.5810 - val_acc: 0.7007 - val_precision: 0.6984 - val_recall: 0.7101
Epoch 44/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5820 - acc: 0.7026 - precision: 0.7049 - recall: 0.6952 - val_loss: 0.5806 - val_acc: 0.7003 - val_precision: 0.7021 - val_recall: 0.6993
Epoch 45/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5844 - acc: 0.7003 - precision: 0.7048 - recall: 0.6917 - val_loss: 0.5819 - val_acc: 0.7010 - val_precision: 0.6976 - val_recall: 0.7134
Epoch 46/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5841 - acc: 0.7007 - precision: 0.7047 - recall: 0.6958 - val_loss: 0.5806 - val_acc: 0.6993 - val_precision: 0.7040 - val_recall: 0.6913
Epoch 47/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5854 - acc: 0.7009 - precision: 0.7041 - recall: 0.6908 - val_loss: 0.5809 - val_acc: 0.7002 - val_precision: 0.7020 - val_recall: 0.6992
Epoch 48/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5860 - acc: 0.6976 - precision: 0.7010 - recall: 0.6881 - val_loss: 0.5802 - val_acc: 0.6993 - val_precision: 0.7016 - val_recall: 0.6969
Epoch 49/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5868 - acc: 0.6970 - precision: 0.6999 - recall: 0.6906 - val_loss: 0.5819 - val_acc: 0.7005 - val_precision: 0.6991 - val_recall: 0.7076
Epoch 50/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5829 - acc: 0.7013 - precision: 0.7034 - recall: 0.6922 - val_loss: 0.5800 - val_acc: 0.7000 - val_precision: 0.7035 - val_recall: 0.6948
Epoch 51/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5858 - acc: 0.6986 - precision: 0.7037 - recall: 0.6876 - val_loss: 0.5801 - val_acc: 0.7004 - val_precision: 0.7022 - val_recall: 0.6995
Epoch 52/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5866 - acc: 0.6979 - precision: 0.7009 - recall: 0.6898 - val_loss: 0.5815 - val_acc: 0.7006 - val_precision: 0.6964 - val_recall: 0.7149
Epoch 53/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5874 - acc: 0.6951 - precision: 0.6962 - recall: 0.6932 - val_loss: 0.5801 - val_acc: 0.7004 - val_precision: 0.7020 - val_recall: 0.6998
Epoch 54/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5850 - acc: 0.6991 - precision: 0.7031 - recall: 0.6885 - val_loss: 0.5805 - val_acc: 0.7002 - val_precision: 0.6993 - val_recall: 0.7061
Epoch 55/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5870 - acc: 0.6972 - precision: 0.7034 - recall: 0.6914 - val_loss: 0.5804 - val_acc: 0.6999 - val_precision: 0.6999 - val_recall: 0.7034
Epoch 56/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5877 - acc: 0.6981 - precision: 0.7021 - recall: 0.6887 - val_loss: 0.5804 - val_acc: 0.6995 - val_precision: 0.6944 - val_recall: 0.7160
Epoch 57/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5858 - acc: 0.6975 - precision: 0.7003 - recall: 0.6966 - val_loss: 0.5816 - val_acc: 0.7005 - val_precision: 0.6986 - val_recall: 0.7089
Epoch 58/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5860 - acc: 0.6966 - precision: 0.6976 - recall: 0.6945 - val_loss: 0.5811 - val_acc: 0.7021 - val_precision: 0.6968 - val_recall: 0.7191
Epoch 59/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5823 - acc: 0.7010 - precision: 0.7013 - recall: 0.7000 - val_loss: 0.5802 - val_acc: 0.6996 - val_precision: 0.7010 - val_recall: 0.6995
Epoch 60/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5846 - acc: 0.6997 - precision: 0.7054 - recall: 0.6919 - val_loss: 0.5812 - val_acc: 0.7006 - val_precision: 0.6966 - val_recall: 0.7144
Epoch 61/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5848 - acc: 0.6989 - precision: 0.7031 - recall: 0.6959 - val_loss: 0.5800 - val_acc: 0.6996 - val_precision: 0.7011 - val_recall: 0.6994
Epoch 62/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5841 - acc: 0.7000 - precision: 0.7026 - recall: 0.6926 - val_loss: 0.5810 - val_acc: 0.7002 - val_precision: 0.6980 - val_recall: 0.7094
Epoch 63/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5846 - acc: 0.7004 - precision: 0.7030 - recall: 0.6941 - val_loss: 0.5818 - val_acc: 0.7007 - val_precision: 0.6975 - val_recall: 0.7126
Epoch 64/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5863 - acc: 0.6969 - precision: 0.6988 - recall: 0.6896 - val_loss: 0.5811 - val_acc: 0.7003 - val_precision: 0.6964 - val_recall: 0.7139
Epoch 65/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5828 - acc: 0.7023 - precision: 0.7063 - recall: 0.6981 - val_loss: 0.5801 - val_acc: 0.7005 - val_precision: 0.7012 - val_recall: 0.7022
Epoch 66/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5830 - acc: 0.6992 - precision: 0.7048 - recall: 0.6976 - val_loss: 0.5813 - val_acc: 0.7004 - val_precision: 0.6953 - val_recall: 0.7171
Epoch 67/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5842 - acc: 0.6994 - precision: 0.7010 - recall: 0.6949 - val_loss: 0.5801 - val_acc: 0.7014 - val_precision: 0.7022 - val_recall: 0.7030
Epoch 68/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5852 - acc: 0.6986 - precision: 0.7013 - recall: 0.6941 - val_loss: 0.5801 - val_acc: 0.7007 - val_precision: 0.6994 - val_recall: 0.7076
Epoch 69/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5836 - acc: 0.7000 - precision: 0.7032 - recall: 0.6957 - val_loss: 0.5800 - val_acc: 0.6996 - val_precision: 0.7036 - val_recall: 0.6933
Epoch 70/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5836 - acc: 0.6993 - precision: 0.7030 - recall: 0.6930 - val_loss: 0.5815 - val_acc: 0.7011 - val_precision: 0.6963 - val_recall: 0.7168
Epoch 71/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5836 - acc: 0.7032 - precision: 0.7042 - recall: 0.7012 - val_loss: 0.5828 - val_acc: 0.7015 - val_precision: 0.6943 - val_recall: 0.7237
Epoch 72/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5840 - acc: 0.6990 - precision: 0.7003 - recall: 0.6956 - val_loss: 0.5801 - val_acc: 0.7007 - val_precision: 0.6983 - val_recall: 0.7102
Epoch 73/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5842 - acc: 0.6983 - precision: 0.7018 - recall: 0.6971 - val_loss: 0.5813 - val_acc: 0.6993 - val_precision: 0.6947 - val_recall: 0.7149
Epoch 74/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5851 - acc: 0.6986 - precision: 0.7018 - recall: 0.6945 - val_loss: 0.5797 - val_acc: 0.7011 - val_precision: 0.7004 - val_recall: 0.7062
Epoch 75/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5862 - acc: 0.6986 - precision: 0.7021 - recall: 0.6956 - val_loss: 0.5798 - val_acc: 0.7022 - val_precision: 0.7012 - val_recall: 0.7082
Epoch 76/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5827 - acc: 0.6997 - precision: 0.7023 - recall: 0.6932 - val_loss: 0.5798 - val_acc: 0.7009 - val_precision: 0.6995 - val_recall: 0.7080
Epoch 77/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5859 - acc: 0.6966 - precision: 0.6975 - recall: 0.6945 - val_loss: 0.5808 - val_acc: 0.7014 - val_precision: 0.6994 - val_recall: 0.7099
Epoch 78/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5854 - acc: 0.6984 - precision: 0.6987 - recall: 0.6940 - val_loss: 0.5817 - val_acc: 0.7004 - val_precision: 0.6955 - val_recall: 0.7165
Epoch 79/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5844 - acc: 0.7018 - precision: 0.7051 - recall: 0.7025 - val_loss: 0.5804 - val_acc: 0.7017 - val_precision: 0.7006 - val_recall: 0.7081
Epoch 80/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5829 - acc: 0.7007 - precision: 0.7024 - recall: 0.6981 - val_loss: 0.5818 - val_acc: 0.7018 - val_precision: 0.6967 - val_recall: 0.7182
Epoch 81/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5836 - acc: 0.7002 - precision: 0.7003 - recall: 0.6994 - val_loss: 0.5812 - val_acc: 0.7002 - val_precision: 0.6980 - val_recall: 0.7095
Epoch 82/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5873 - acc: 0.6970 - precision: 0.6993 - recall: 0.6949 - val_loss: 0.5805 - val_acc: 0.7017 - val_precision: 0.7013 - val_recall: 0.7062
Epoch 83/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5817 - acc: 0.7016 - precision: 0.7045 - recall: 0.6979 - val_loss: 0.5812 - val_acc: 0.7020 - val_precision: 0.6980 - val_recall: 0.7155
Epoch 84/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5835 - acc: 0.7019 - precision: 0.7026 - recall: 0.7016 - val_loss: 0.5808 - val_acc: 0.7017 - val_precision: 0.6956 - val_recall: 0.7209
Epoch 85/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5829 - acc: 0.7011 - precision: 0.7017 - recall: 0.7031 - val_loss: 0.5818 - val_acc: 0.7009 - val_precision: 0.6929 - val_recall: 0.7252
Epoch 86/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5852 - acc: 0.6989 - precision: 0.6990 - recall: 0.7023 - val_loss: 0.5805 - val_acc: 0.7012 - val_precision: 0.6998 - val_recall: 0.7081
Epoch 87/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5834 - acc: 0.7004 - precision: 0.7018 - recall: 0.6969 - val_loss: 0.5816 - val_acc: 0.7016 - val_precision: 0.6944 - val_recall: 0.7237
Epoch 88/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5880 - acc: 0.6947 - precision: 0.6959 - recall: 0.6948 - val_loss: 0.5796 - val_acc: 0.7019 - val_precision: 0.6997 - val_recall: 0.7111
Epoch 89/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5835 - acc: 0.7007 - precision: 0.7034 - recall: 0.6985 - val_loss: 0.5820 - val_acc: 0.7010 - val_precision: 0.6947 - val_recall: 0.7207
Epoch 90/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5867 - acc: 0.6971 - precision: 0.6999 - recall: 0.6969 - val_loss: 0.5809 - val_acc: 0.7016 - val_precision: 0.6973 - val_recall: 0.7159
Epoch 91/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5832 - acc: 0.7010 - precision: 0.7016 - recall: 0.7003 - val_loss: 0.5791 - val_acc: 0.7002 - val_precision: 0.7016 - val_recall: 0.7005
Epoch 92/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5845 - acc: 0.6994 - precision: 0.7016 - recall: 0.6944 - val_loss: 0.5799 - val_acc: 0.7009 - val_precision: 0.7013 - val_recall: 0.7035
Epoch 93/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5828 - acc: 0.6999 - precision: 0.7028 - recall: 0.6995 - val_loss: 0.5809 - val_acc: 0.7012 - val_precision: 0.6957 - val_recall: 0.7188
Epoch 94/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5853 - acc: 0.6970 - precision: 0.6986 - recall: 0.6930 - val_loss: 0.5801 - val_acc: 0.6985 - val_precision: 0.6982 - val_recall: 0.7027
Epoch 95/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5830 - acc: 0.7007 - precision: 0.7015 - recall: 0.6983 - val_loss: 0.5799 - val_acc: 0.7017 - val_precision: 0.7001 - val_recall: 0.7091
Epoch 96/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5845 - acc: 0.6993 - precision: 0.7002 - recall: 0.6956 - val_loss: 0.5800 - val_acc: 0.7011 - val_precision: 0.6962 - val_recall: 0.7174
Epoch 97/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5868 - acc: 0.6971 - precision: 0.6975 - recall: 0.6957 - val_loss: 0.5810 - val_acc: 0.7016 - val_precision: 0.6972 - val_recall: 0.7163
Epoch 98/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5853 - acc: 0.6978 - precision: 0.7012 - recall: 0.6974 - val_loss: 0.5803 - val_acc: 0.7023 - val_precision: 0.6987 - val_recall: 0.7149
Epoch 99/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5858 - acc: 0.6982 - precision: 0.6996 - recall: 0.7016 - val_loss: 0.5803 - val_acc: 0.7010 - val_precision: 0.6969 - val_recall: 0.7151
Epoch 100/100
807/807 [==============================] - 2s 2ms/step - loss: 0.5849 - acc: 0.6987 - precision: 0.7009 - recall: 0.6962 - val_loss: 0.5812 - val_acc: 0.7019 - val_precision: 0.6957 - val_recall: 0.7212
y_pred = modelo_cd.predict(normalized_val_X).round()
#!pip install mlxtend
from sklearn.metrics import confusion_matrix
from mlxtend.plotting import plot_confusion_matrix
fig, ax = plot_confusion_matrix(conf_mat=confusion_matrix(y_val, y_pred),figsize=(6,6), show_absolute=True, show_normed=False, colorbar=False)
plt.title('Matriz de confusión')
plt.show()
from sklearn.metrics import classification_report
print(classification_report(y_val, y_pred))
precision recall f1-score support
0.0 0.71 0.69 0.70 12723
1.0 0.69 0.71 0.70 12477
accuracy 0.70 25200
macro avg 0.70 0.70 0.70 25200
weighted avg 0.70 0.70 0.70 25200
# ajustamos los datos activos
X_test = df_test.drop(columns=['SoldFlag','SoldCount']) # no sirven para predecir o no están contenidos en la data activa
normalized_X_test = scaler.transform(X_test.values)
# predicción de clases para test
y_test_pred = modelo_cd.predict(normalized_X_test).round()
# probabilidades de clases para test
y_test_pred_prob = modelo_cd.predict(normalized_X_test).round(3)
df_test.loc[:,['SaleProb']] = y_test_pred_prob # asociamos probabilidades a la base con datos activos
rankings = df_test['SaleProb'].rank(method='min',ascending=False).values
df_test.loc[:,['Rank']] = rankings # rank según el orden de estas probabilidades
# Ranking
df_ranks = df_test.sort_values(by=["SaleProb"], ascending=False)[['SaleProb','Rank']]
display(df_ranks)
df_train.loc[:,['RankPercentile']] = df_train['SoldCount'].rank(method='first')
df_train.loc[:,['decile']] = pd.qcut(df_train['RankPercentile'].values, 10).codes
X = df_train.drop(columns=['SoldFlag','SoldCount','RankPercentile','decile']) # no sirven para predecir o no están contenidos en la data activa
y_cant = pd.get_dummies(df_train['decile'])
y_pred_2 = modelo_cd_2.predict(normalized_val_X_2)
print('predicciones para una observación de validación:', np.round(y_pred_2[0],3))
predicciones para una observación de validación: [0. 0. 0. 0.084 0.06 0.159 0.26 0.301 0.071 0.064]