import keras
import tensorflow as tf
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D, AveragePooling2D
import numpy as np
import pandas as pd
from sklearn.metrics import classification_report, confusion_matrix
import matplotlib.pyplot as plt
import seaborn as sn
from tensorflow.keras.preprocessing.image import load_img, img_to_array, array_to_img
batch_size = 32 # Exemplos utilizados para um determinado processo
num_classes = 2 # classe de imagens possui 2 classes [Cachorros e Gatos]
dropout = 0.5
epochs = 25
lr = 0.001 #taxa de aprendizado ou otimização de erro
base_model = tf.keras.applications.ResNet50(weights='imagenet', include_top=False)
x = base_model.output # pegamos todos os pesos de saída do modelo pré treinado e jogamos para a variavel x
base_model = tf.keras.applications.ResNet50(weights='imagenet', include_top=False)
x = base_model.output # pegamos todos os pesos de saída do modelo pré treinado e jogamos para a variavel x
# #adiciona apos x uma camada densa com 2 neuronios (duas classes) com funcao de ativacao softmax (distribuicao de probabilidade). Atribui este no a preds
# preds=tf.keras.layers.Dense(2, activation='softmax')(x)
# #criando o objeto do modelo
# model=tf.keras.models.Model(inputs=base_model.input,outputs=preds)
#adiciona apos x uma camada AveragePooling2D e atribui este no a x novamente (logo x e o topo novamente)
x=tf.keras.layers.GlobalAveragePooling2D()(x)
#adiciona apos x uma camada densa com 32 neuronios com funcao de ativacao relu. Atribui este no a x novamente
x=tf.keras.layers.Dense(256,activation='relu')(x)
#adiciona apos x uma camada densa com 64 neuronios com funcao de ativacao relu. Atribui este no a x novamente
x=tf.keras.layers.Dense(128,activation='relu')(x)
#adiciona apos x uma camada densa com 128 neuronios com funcao de ativacao relu. Atribui este no a x novamente
x=tf.keras.layers.Dense(64,activation='relu')(x)
#adiciona após x os neurônios que devem ser utilizados, nesse caso foram desligados 20% dos neuronios
x=tf.keras.layers.Dropout(dropout)(x)
#adiciona apos x uma camada densa com 2 neuronios (duas classes) com funcao de ativacao softmax (distribuicao de probabilidade). Atribui este no a preds
preds=tf.keras.layers.Dense(2,activation='softmax')(x)
#definindo modelo final
model=tf.keras.models.Model(inputs=base_model.input,outputs=preds)
model.summary()
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_2 (InputLayer) [(None, None, None, 0
__________________________________________________________________________________________________
conv1_pad (ZeroPadding2D) (None, None, None, 3 0 input_2[0][0]
__________________________________________________________________________________________________
conv1_conv (Conv2D) (None, None, None, 6 9472 conv1_pad[0][0]
__________________________________________________________________________________________________
conv1_bn (BatchNormalization) (None, None, None, 6 256 conv1_conv[0][0]
__________________________________________________________________________________________________
conv1_relu (Activation) (None, None, None, 6 0 conv1_bn[0][0]
__________________________________________________________________________________________________
pool1_pad (ZeroPadding2D) (None, None, None, 6 0 conv1_relu[0][0]
__________________________________________________________________________________________________
pool1_pool (MaxPooling2D) (None, None, None, 6 0 pool1_pad[0][0]
__________________________________________________________________________________________________
conv2_block1_1_conv (Conv2D) (None, None, None, 6 4160 pool1_pool[0][0]
__________________________________________________________________________________________________
conv2_block1_1_bn (BatchNormali (None, None, None, 6 256 conv2_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_1_relu (Activation (None, None, None, 6 0 conv2_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_2_conv (Conv2D) (None, None, None, 6 36928 conv2_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_2_bn (BatchNormali (None, None, None, 6 256 conv2_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_2_relu (Activation (None, None, None, 6 0 conv2_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_0_conv (Conv2D) (None, None, None, 2 16640 pool1_pool[0][0]
__________________________________________________________________________________________________
conv2_block1_3_conv (Conv2D) (None, None, None, 2 16640 conv2_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv2_block1_0_bn (BatchNormali (None, None, None, 2 1024 conv2_block1_0_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_3_bn (BatchNormali (None, None, None, 2 1024 conv2_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv2_block1_add (Add) (None, None, None, 2 0 conv2_block1_0_bn[0][0]
conv2_block1_3_bn[0][0]
__________________________________________________________________________________________________
conv2_block1_out (Activation) (None, None, None, 2 0 conv2_block1_add[0][0]
__________________________________________________________________________________________________
conv2_block2_1_conv (Conv2D) (None, None, None, 6 16448 conv2_block1_out[0][0]
__________________________________________________________________________________________________
conv2_block2_1_bn (BatchNormali (None, None, None, 6 256 conv2_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_1_relu (Activation (None, None, None, 6 0 conv2_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_2_conv (Conv2D) (None, None, None, 6 36928 conv2_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_2_bn (BatchNormali (None, None, None, 6 256 conv2_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_2_relu (Activation (None, None, None, 6 0 conv2_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_3_conv (Conv2D) (None, None, None, 2 16640 conv2_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv2_block2_3_bn (BatchNormali (None, None, None, 2 1024 conv2_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv2_block2_add (Add) (None, None, None, 2 0 conv2_block1_out[0][0]
conv2_block2_3_bn[0][0]
__________________________________________________________________________________________________
conv2_block2_out (Activation) (None, None, None, 2 0 conv2_block2_add[0][0]
__________________________________________________________________________________________________
conv2_block3_1_conv (Conv2D) (None, None, None, 6 16448 conv2_block2_out[0][0]
__________________________________________________________________________________________________
conv2_block3_1_bn (BatchNormali (None, None, None, 6 256 conv2_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_1_relu (Activation (None, None, None, 6 0 conv2_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_2_conv (Conv2D) (None, None, None, 6 36928 conv2_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_2_bn (BatchNormali (None, None, None, 6 256 conv2_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_2_relu (Activation (None, None, None, 6 0 conv2_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_3_conv (Conv2D) (None, None, None, 2 16640 conv2_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv2_block3_3_bn (BatchNormali (None, None, None, 2 1024 conv2_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv2_block3_add (Add) (None, None, None, 2 0 conv2_block2_out[0][0]
conv2_block3_3_bn[0][0]
__________________________________________________________________________________________________
conv2_block3_out (Activation) (None, None, None, 2 0 conv2_block3_add[0][0]
__________________________________________________________________________________________________
conv3_block1_1_conv (Conv2D) (None, None, None, 1 32896 conv2_block3_out[0][0]
__________________________________________________________________________________________________
conv3_block1_1_bn (BatchNormali (None, None, None, 1 512 conv3_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_1_relu (Activation (None, None, None, 1 0 conv3_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_2_conv (Conv2D) (None, None, None, 1 147584 conv3_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_2_bn (BatchNormali (None, None, None, 1 512 conv3_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_2_relu (Activation (None, None, None, 1 0 conv3_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_0_conv (Conv2D) (None, None, None, 5 131584 conv2_block3_out[0][0]
__________________________________________________________________________________________________
conv3_block1_3_conv (Conv2D) (None, None, None, 5 66048 conv3_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block1_0_bn (BatchNormali (None, None, None, 5 2048 conv3_block1_0_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_3_bn (BatchNormali (None, None, None, 5 2048 conv3_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block1_add (Add) (None, None, None, 5 0 conv3_block1_0_bn[0][0]
conv3_block1_3_bn[0][0]
__________________________________________________________________________________________________
conv3_block1_out (Activation) (None, None, None, 5 0 conv3_block1_add[0][0]
__________________________________________________________________________________________________
conv3_block2_1_conv (Conv2D) (None, None, None, 1 65664 conv3_block1_out[0][0]
__________________________________________________________________________________________________
conv3_block2_1_bn (BatchNormali (None, None, None, 1 512 conv3_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_1_relu (Activation (None, None, None, 1 0 conv3_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_2_conv (Conv2D) (None, None, None, 1 147584 conv3_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_2_bn (BatchNormali (None, None, None, 1 512 conv3_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_2_relu (Activation (None, None, None, 1 0 conv3_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_3_conv (Conv2D) (None, None, None, 5 66048 conv3_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block2_3_bn (BatchNormali (None, None, None, 5 2048 conv3_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block2_add (Add) (None, None, None, 5 0 conv3_block1_out[0][0]
conv3_block2_3_bn[0][0]
__________________________________________________________________________________________________
conv3_block2_out (Activation) (None, None, None, 5 0 conv3_block2_add[0][0]
__________________________________________________________________________________________________
conv3_block3_1_conv (Conv2D) (None, None, None, 1 65664 conv3_block2_out[0][0]
__________________________________________________________________________________________________
conv3_block3_1_bn (BatchNormali (None, None, None, 1 512 conv3_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_1_relu (Activation (None, None, None, 1 0 conv3_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_2_conv (Conv2D) (None, None, None, 1 147584 conv3_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_2_bn (BatchNormali (None, None, None, 1 512 conv3_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_2_relu (Activation (None, None, None, 1 0 conv3_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_3_conv (Conv2D) (None, None, None, 5 66048 conv3_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block3_3_bn (BatchNormali (None, None, None, 5 2048 conv3_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block3_add (Add) (None, None, None, 5 0 conv3_block2_out[0][0]
conv3_block3_3_bn[0][0]
__________________________________________________________________________________________________
conv3_block3_out (Activation) (None, None, None, 5 0 conv3_block3_add[0][0]
__________________________________________________________________________________________________
conv3_block4_1_conv (Conv2D) (None, None, None, 1 65664 conv3_block3_out[0][0]
__________________________________________________________________________________________________
conv3_block4_1_bn (BatchNormali (None, None, None, 1 512 conv3_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_1_relu (Activation (None, None, None, 1 0 conv3_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_2_conv (Conv2D) (None, None, None, 1 147584 conv3_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_2_bn (BatchNormali (None, None, None, 1 512 conv3_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_2_relu (Activation (None, None, None, 1 0 conv3_block4_2_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_3_conv (Conv2D) (None, None, None, 5 66048 conv3_block4_2_relu[0][0]
__________________________________________________________________________________________________
conv3_block4_3_bn (BatchNormali (None, None, None, 5 2048 conv3_block4_3_conv[0][0]
__________________________________________________________________________________________________
conv3_block4_add (Add) (None, None, None, 5 0 conv3_block3_out[0][0]
conv3_block4_3_bn[0][0]
__________________________________________________________________________________________________
conv3_block4_out (Activation) (None, None, None, 5 0 conv3_block4_add[0][0]
__________________________________________________________________________________________________
conv4_block1_1_conv (Conv2D) (None, None, None, 2 131328 conv3_block4_out[0][0]
__________________________________________________________________________________________________
conv4_block1_1_bn (BatchNormali (None, None, None, 2 1024 conv4_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_1_relu (Activation (None, None, None, 2 0 conv4_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_2_conv (Conv2D) (None, None, None, 2 590080 conv4_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_2_bn (BatchNormali (None, None, None, 2 1024 conv4_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_2_relu (Activation (None, None, None, 2 0 conv4_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_0_conv (Conv2D) (None, None, None, 1 525312 conv3_block4_out[0][0]
__________________________________________________________________________________________________
conv4_block1_3_conv (Conv2D) (None, None, None, 1 263168 conv4_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block1_0_bn (BatchNormali (None, None, None, 1 4096 conv4_block1_0_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_3_bn (BatchNormali (None, None, None, 1 4096 conv4_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block1_add (Add) (None, None, None, 1 0 conv4_block1_0_bn[0][0]
conv4_block1_3_bn[0][0]
__________________________________________________________________________________________________
conv4_block1_out (Activation) (None, None, None, 1 0 conv4_block1_add[0][0]
__________________________________________________________________________________________________
conv4_block2_1_conv (Conv2D) (None, None, None, 2 262400 conv4_block1_out[0][0]
__________________________________________________________________________________________________
conv4_block2_1_bn (BatchNormali (None, None, None, 2 1024 conv4_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_1_relu (Activation (None, None, None, 2 0 conv4_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_2_conv (Conv2D) (None, None, None, 2 590080 conv4_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block2_2_bn (BatchNormali (None, None, None, 2 1024 conv4_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_2_relu (Activation (None, None, None, 2 0 conv4_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_3_conv (Conv2D) (None, None, None, 1 263168 conv4_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block2_3_bn (BatchNormali (None, None, None, 1 4096 conv4_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block2_add (Add) (None, None, None, 1 0 conv4_block1_out[0][0]
conv4_block2_3_bn[0][0]
__________________________________________________________________________________________________
conv4_block2_out (Activation) (None, None, None, 1 0 conv4_block2_add[0][0]
__________________________________________________________________________________________________
conv4_block3_1_conv (Conv2D) (None, None, None, 2 262400 conv4_block2_out[0][0]
__________________________________________________________________________________________________
conv4_block3_1_bn (BatchNormali (None, None, None, 2 1024 conv4_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_1_relu (Activation (None, None, None, 2 0 conv4_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_2_conv (Conv2D) (None, None, None, 2 590080 conv4_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block3_2_bn (BatchNormali (None, None, None, 2 1024 conv4_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_2_relu (Activation (None, None, None, 2 0 conv4_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_3_conv (Conv2D) (None, None, None, 1 263168 conv4_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block3_3_bn (BatchNormali (None, None, None, 1 4096 conv4_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block3_add (Add) (None, None, None, 1 0 conv4_block2_out[0][0]
conv4_block3_3_bn[0][0]
__________________________________________________________________________________________________
conv4_block3_out (Activation) (None, None, None, 1 0 conv4_block3_add[0][0]
__________________________________________________________________________________________________
conv4_block4_1_conv (Conv2D) (None, None, None, 2 262400 conv4_block3_out[0][0]
__________________________________________________________________________________________________
conv4_block4_1_bn (BatchNormali (None, None, None, 2 1024 conv4_block4_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_1_relu (Activation (None, None, None, 2 0 conv4_block4_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_2_conv (Conv2D) (None, None, None, 2 590080 conv4_block4_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block4_2_bn (BatchNormali (None, None, None, 2 1024 conv4_block4_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_2_relu (Activation (None, None, None, 2 0 conv4_block4_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_3_conv (Conv2D) (None, None, None, 1 263168 conv4_block4_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block4_3_bn (BatchNormali (None, None, None, 1 4096 conv4_block4_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block4_add (Add) (None, None, None, 1 0 conv4_block3_out[0][0]
conv4_block4_3_bn[0][0]
__________________________________________________________________________________________________
conv4_block4_out (Activation) (None, None, None, 1 0 conv4_block4_add[0][0]
__________________________________________________________________________________________________
conv4_block5_1_conv (Conv2D) (None, None, None, 2 262400 conv4_block4_out[0][0]
__________________________________________________________________________________________________
conv4_block5_1_bn (BatchNormali (None, None, None, 2 1024 conv4_block5_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_1_relu (Activation (None, None, None, 2 0 conv4_block5_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_2_conv (Conv2D) (None, None, None, 2 590080 conv4_block5_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block5_2_bn (BatchNormali (None, None, None, 2 1024 conv4_block5_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_2_relu (Activation (None, None, None, 2 0 conv4_block5_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_3_conv (Conv2D) (None, None, None, 1 263168 conv4_block5_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block5_3_bn (BatchNormali (None, None, None, 1 4096 conv4_block5_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block5_add (Add) (None, None, None, 1 0 conv4_block4_out[0][0]
conv4_block5_3_bn[0][0]
__________________________________________________________________________________________________
conv4_block5_out (Activation) (None, None, None, 1 0 conv4_block5_add[0][0]
__________________________________________________________________________________________________
conv4_block6_1_conv (Conv2D) (None, None, None, 2 262400 conv4_block5_out[0][0]
__________________________________________________________________________________________________
conv4_block6_1_bn (BatchNormali (None, None, None, 2 1024 conv4_block6_1_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_1_relu (Activation (None, None, None, 2 0 conv4_block6_1_bn[0][0]
__________________________________________________________________________________________________
conv4_block6_2_conv (Conv2D) (None, None, None, 2 590080 conv4_block6_1_relu[0][0]
__________________________________________________________________________________________________
conv4_block6_2_bn (BatchNormali (None, None, None, 2 1024 conv4_block6_2_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_2_relu (Activation (None, None, None, 2 0 conv4_block6_2_bn[0][0]
__________________________________________________________________________________________________
conv4_block6_3_conv (Conv2D) (None, None, None, 1 263168 conv4_block6_2_relu[0][0]
__________________________________________________________________________________________________
conv4_block6_3_bn (BatchNormali (None, None, None, 1 4096 conv4_block6_3_conv[0][0]
__________________________________________________________________________________________________
conv4_block6_add (Add) (None, None, None, 1 0 conv4_block5_out[0][0]
conv4_block6_3_bn[0][0]
__________________________________________________________________________________________________
conv4_block6_out (Activation) (None, None, None, 1 0 conv4_block6_add[0][0]
__________________________________________________________________________________________________
conv5_block1_1_conv (Conv2D) (None, None, None, 5 524800 conv4_block6_out[0][0]
__________________________________________________________________________________________________
conv5_block1_1_bn (BatchNormali (None, None, None, 5 2048 conv5_block1_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_1_relu (Activation (None, None, None, 5 0 conv5_block1_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_2_conv (Conv2D) (None, None, None, 5 2359808 conv5_block1_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_2_bn (BatchNormali (None, None, None, 5 2048 conv5_block1_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_2_relu (Activation (None, None, None, 5 0 conv5_block1_2_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_0_conv (Conv2D) (None, None, None, 2 2099200 conv4_block6_out[0][0]
__________________________________________________________________________________________________
conv5_block1_3_conv (Conv2D) (None, None, None, 2 1050624 conv5_block1_2_relu[0][0]
__________________________________________________________________________________________________
conv5_block1_0_bn (BatchNormali (None, None, None, 2 8192 conv5_block1_0_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_3_bn (BatchNormali (None, None, None, 2 8192 conv5_block1_3_conv[0][0]
__________________________________________________________________________________________________
conv5_block1_add (Add) (None, None, None, 2 0 conv5_block1_0_bn[0][0]
conv5_block1_3_bn[0][0]
__________________________________________________________________________________________________
conv5_block1_out (Activation) (None, None, None, 2 0 conv5_block1_add[0][0]
__________________________________________________________________________________________________
conv5_block2_1_conv (Conv2D) (None, None, None, 5 1049088 conv5_block1_out[0][0]
__________________________________________________________________________________________________
conv5_block2_1_bn (BatchNormali (None, None, None, 5 2048 conv5_block2_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_1_relu (Activation (None, None, None, 5 0 conv5_block2_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_2_conv (Conv2D) (None, None, None, 5 2359808 conv5_block2_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block2_2_bn (BatchNormali (None, None, None, 5 2048 conv5_block2_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_2_relu (Activation (None, None, None, 5 0 conv5_block2_2_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_3_conv (Conv2D) (None, None, None, 2 1050624 conv5_block2_2_relu[0][0]
__________________________________________________________________________________________________
conv5_block2_3_bn (BatchNormali (None, None, None, 2 8192 conv5_block2_3_conv[0][0]
__________________________________________________________________________________________________
conv5_block2_add (Add) (None, None, None, 2 0 conv5_block1_out[0][0]
conv5_block2_3_bn[0][0]
__________________________________________________________________________________________________
conv5_block2_out (Activation) (None, None, None, 2 0 conv5_block2_add[0][0]
__________________________________________________________________________________________________
conv5_block3_1_conv (Conv2D) (None, None, None, 5 1049088 conv5_block2_out[0][0]
__________________________________________________________________________________________________
conv5_block3_1_bn (BatchNormali (None, None, None, 5 2048 conv5_block3_1_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_1_relu (Activation (None, None, None, 5 0 conv5_block3_1_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_2_conv (Conv2D) (None, None, None, 5 2359808 conv5_block3_1_relu[0][0]
__________________________________________________________________________________________________
conv5_block3_2_bn (BatchNormali (None, None, None, 5 2048 conv5_block3_2_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_2_relu (Activation (None, None, None, 5 0 conv5_block3_2_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_3_conv (Conv2D) (None, None, None, 2 1050624 conv5_block3_2_relu[0][0]
__________________________________________________________________________________________________
conv5_block3_3_bn (BatchNormali (None, None, None, 2 8192 conv5_block3_3_conv[0][0]
__________________________________________________________________________________________________
conv5_block3_add (Add) (None, None, None, 2 0 conv5_block2_out[0][0]
conv5_block3_3_bn[0][0]
__________________________________________________________________________________________________
conv5_block3_out (Activation) (None, None, None, 2 0 conv5_block3_add[0][0]
__________________________________________________________________________________________________
global_average_pooling2d (Globa (None, 2048) 0 conv5_block3_out[0][0]
__________________________________________________________________________________________________
dense (Dense) (None, 256) 524544 global_average_pooling2d[0][0]
__________________________________________________________________________________________________
dense_1 (Dense) (None, 128) 32896 dense[0][0]
__________________________________________________________________________________________________
dense_2 (Dense) (None, 64) 8256 dense_1[0][0]
__________________________________________________________________________________________________
dropout (Dropout) (None, 64) 0 dense_2[0][0]
__________________________________________________________________________________________________
dense_3 (Dense) (None, 2) 130 dropout[0][0]
==================================================================================================
Total params: 24,153,538
Trainable params: 24,100,418
Non-trainable params: 53,120
__________________________________________________________________________________________________
for l in model.layers:
if l.name.split('_')[0] != 'dense':
l.trainable=False
else:
l.trainable=True
#inicializando o objeto que ira recuperar as amostras de treino com a funcao de pre-processamento da InceptionV3
train_data_gen = tf.keras.preprocessing.image.ImageDataGenerator(preprocessing_function=tf.keras.applications.inception_v3.preprocess_input) #included in our dependencies
#inicializando o objeto que ira recuperar as amostras de teste com a funcao de pre-processamento da InceptionV3
test_data_gen = tf.keras.preprocessing.image.ImageDataGenerator(preprocessing_function=tf.keras.applications.inception_v3.preprocess_input)
#definindo a origem das imagens de treino. Este ponto de origem deve possuir pastas separando as amostras em classes, onde os nomes das pastas devem ser os mesmos das classes [TREINO]
train_generator = train_data_gen.flow_from_directory('archive/dataset/training_set',
target_size=(28, 28),
batch_size=batch_size,
shuffle=True)
#definindo a origem das imagens de test. Este ponto de origem deve possuir pastas separando as amostras em classes, onde os nomes das pastas devem ser os mesmos das classes [TESTE]
test_generator = train_data_gen.flow_from_directory('archive/dataset/test_set',
target_size=(28, 28),
batch_size=16,
shuffle=True)
Found 8000 images belonging to 2 classes.
Found 2000 images belonging to 2 classes.
#definicao do otimizador, fucao de custo e metrica
model.compile(optimizer='Adam',loss='categorical_crossentropy',metrics=['accuracy'])
#definicao dos steps
step_size_train=train_generator.n//train_generator.batch_size
step_size_test = test_generator.n//test_generator.batch_size
#treinando e testando o modelo
history = model.fit_generator(generator=train_generator,
steps_per_epoch=step_size_train,
epochs=epochs,
validation_data=test_generator,
validation_steps=step_size_test)
/shared-libs/python3.7/py/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:1844: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.
warnings.warn('`Model.fit_generator` is deprecated and '
Epoch 1/25
250/250 [==============================] - 60s 228ms/step - loss: 0.7130 - accuracy: 0.5081 - val_loss: 0.6883 - val_accuracy: 0.5825
Epoch 2/25
250/250 [==============================] - 53s 213ms/step - loss: 0.6883 - accuracy: 0.5402 - val_loss: 0.6731 - val_accuracy: 0.5970
Epoch 3/25
250/250 [==============================] - 53s 212ms/step - loss: 0.6734 - accuracy: 0.5918 - val_loss: 0.6585 - val_accuracy: 0.6095
Epoch 4/25
250/250 [==============================] - 53s 213ms/step - loss: 0.6618 - accuracy: 0.6065 - val_loss: 0.6440 - val_accuracy: 0.6340
Epoch 5/25
250/250 [==============================] - 53s 213ms/step - loss: 0.6431 - accuracy: 0.6233 - val_loss: 0.6421 - val_accuracy: 0.6410
Epoch 6/25
250/250 [==============================] - 54s 215ms/step - loss: 0.6436 - accuracy: 0.6323 - val_loss: 0.6413 - val_accuracy: 0.6380
Epoch 7/25
250/250 [==============================] - 53s 213ms/step - loss: 0.6399 - accuracy: 0.6482 - val_loss: 0.6509 - val_accuracy: 0.6060
Epoch 8/25
250/250 [==============================] - 54s 217ms/step - loss: 0.6307 - accuracy: 0.6444 - val_loss: 0.6377 - val_accuracy: 0.6480
Epoch 9/25
250/250 [==============================] - 54s 216ms/step - loss: 0.6311 - accuracy: 0.6470 - val_loss: 0.6336 - val_accuracy: 0.6515
Epoch 10/25
250/250 [==============================] - 54s 215ms/step - loss: 0.6331 - accuracy: 0.6389 - val_loss: 0.6317 - val_accuracy: 0.6465
Epoch 11/25
250/250 [==============================] - 53s 212ms/step - loss: 0.6305 - accuracy: 0.6463 - val_loss: 0.6854 - val_accuracy: 0.5965
Epoch 12/25
250/250 [==============================] - 53s 213ms/step - loss: 0.6312 - accuracy: 0.6438 - val_loss: 0.6419 - val_accuracy: 0.6280
Epoch 13/25
250/250 [==============================] - 54s 216ms/step - loss: 0.6209 - accuracy: 0.6639 - val_loss: 0.6311 - val_accuracy: 0.6435
Epoch 14/25
250/250 [==============================] - 53s 211ms/step - loss: 0.6275 - accuracy: 0.6582 - val_loss: 0.6380 - val_accuracy: 0.6400
Epoch 15/25
250/250 [==============================] - 54s 215ms/step - loss: 0.6181 - accuracy: 0.6632 - val_loss: 0.6268 - val_accuracy: 0.6515
Epoch 16/25
250/250 [==============================] - 53s 214ms/step - loss: 0.6194 - accuracy: 0.6610 - val_loss: 0.6575 - val_accuracy: 0.6160
Epoch 17/25
250/250 [==============================] - 53s 212ms/step - loss: 0.6250 - accuracy: 0.6533 - val_loss: 0.6255 - val_accuracy: 0.6610
Epoch 18/25
250/250 [==============================] - 68s 274ms/step - loss: 0.6162 - accuracy: 0.6614 - val_loss: 0.6349 - val_accuracy: 0.6415
Epoch 19/25
250/250 [==============================] - 54s 214ms/step - loss: 0.6119 - accuracy: 0.6710 - val_loss: 0.6266 - val_accuracy: 0.6595
Epoch 20/25
250/250 [==============================] - 53s 214ms/step - loss: 0.6086 - accuracy: 0.6784 - val_loss: 0.6344 - val_accuracy: 0.6400
Epoch 21/25
250/250 [==============================] - 53s 214ms/step - loss: 0.6135 - accuracy: 0.6533 - val_loss: 0.6484 - val_accuracy: 0.6400
Epoch 22/25
250/250 [==============================] - 54s 214ms/step - loss: 0.6103 - accuracy: 0.6662 - val_loss: 0.6307 - val_accuracy: 0.6545
Epoch 23/25
250/250 [==============================] - 53s 214ms/step - loss: 0.6005 - accuracy: 0.6719 - val_loss: 0.6474 - val_accuracy: 0.6315
Epoch 24/25
250/250 [==============================] - 53s 213ms/step - loss: 0.6058 - accuracy: 0.6707 - val_loss: 0.6245 - val_accuracy: 0.6520
Epoch 25/25
250/250 [==============================] - 53s 214ms/step - loss: 0.6054 - accuracy: 0.6649 - val_loss: 0.6357 - val_accuracy: 0.6520
results = model.evaluate_generator(test_generator, 15)
print('Acc: %.3f, Loss: %.3f' % (results[1], results[0]))
/shared-libs/python3.7/py/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:1877: UserWarning: `Model.evaluate_generator` is deprecated and will be removed in a future version. Please use `Model.evaluate`, which supports generators.
warnings.warn('`Model.evaluate_generator` is deprecated and '
Acc: 0.663, Loss: 0.676
plt.title('Loss')
plt.plot(history.history['loss'], label='train')
plt.plot(history.history['val_loss'], label='test')
plt.legend()
plt.show()
# Criando graficos para visualização dos resultados
print()
print()
plt.title('Accuracy')
plt.plot(history.history['accuracy'], label='train')
plt.plot(history.history['val_accuracy'], label='test')
plt.legend()
plt.show()
predictions = model.predict_generator(test_generator)
y_pred = (predictions > 0.5)
print(y_pred.argmax(axis=1))
classification = classification_report(test_generator.classes, y_pred.argmax(axis=1), target_names=['cats', 'dogs'])
print('----------------CLASSIFICATION--------------')
print(classification)
print('----------------Matrix de Confusão--------------')
matrix = confusion_matrix(test_generator.classes, y_pred.argmax(axis=1))
df_cm = pd.DataFrame(matrix, index = [i for i in range(2)],
columns = [i for i in range(2)])
plt.figure(figsize = (10,7))
sn.heatmap(df_cm, annot=True, linewidths=2.5)
/shared-libs/python3.7/py/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:1905: UserWarning: `Model.predict_generator` is deprecated and will be removed in a future version. Please use `Model.predict`, which supports generators.
warnings.warn('`Model.predict_generator` is deprecated and '
[0 1 1 ... 1 0 1]
----------------CLASSIFICATION--------------
precision recall f1-score support
cats 0.50 0.41 0.45 1000
dogs 0.50 0.60 0.54 1000
accuracy 0.50 2000
macro avg 0.50 0.50 0.50 2000
weighted avg 0.50 0.50 0.50 2000
----------------Matrix de Confusão--------------