from tensorflow.keras.datasets import cifar10
import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
plt.imshow(x_train[462])
x_train_n = (x_train - np.mean(x_train))/np.std(x_train)
x_test_n = (x_test - np.mean(x_train))/np.std(x_train)
plt.imshow(x_train_n[462])
from tensorflow.keras.layers import Dense, Conv2D, MaxPooling2D, Flatten
from tensorflow.keras import Sequential
import pandas as pd
import numpy as np
df = pd.DataFrame(y_train, columns=['train'])
df['train'].unique().shape
model = Sequential()
model.add(Conv2D(filters=32, kernel_size=(3,3), activation='relu', input_shape=[32,32,3]))
model.add(Flatten())
model.add(Dense(units=300, activation='relu'))
model.add(Dense(units=10))
model.compile(optimizer='sgd', loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), metrics=['accuracy'])
x_train_n.shape
model.summary()
model.fit(x_train_n, y_train, epochs=10, batch_size=32, validation_data=(x_test_n, y_test))
model4 = Sequential()
model4.add(Conv2D(filters=32, kernel_size=(3,3), activation='relu', input_shape=[32,32,3]))
model4.add(Conv2D(filters=32, kernel_size=(3,3), activation='relu'))
model4.add(Flatten())
model4.add(Dense(units=300, activation='relu'))
model4.add(Dense(units=300, activation='relu'))
model4.add(Dense(units=10))
model4.compile(optimizer='sgd', loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), metrics=['accuracy'])
model4.summary()
model4.fit(x_train_n, y_train, epochs=10, batch_size=32, validation_data=(x_test_n, y_test))
model.summary()
model2 = Sequential()
model2.add(Conv2D(filters=32, kernel_size=(3,3), activation='relu', input_shape=[32,32,3]))
model2.add(MaxPooling2D(pool_size=(2,2)))
model2.add(Conv2D(filters=32, kernel_size=(3,3), activation='relu'))
model2.add(Flatten())
model2.add(Dense(units=300, activation='relu'))
model2.add(Dense(units=300, activation='relu'))
model2.add(Dense(units=10))
model2.summary()
model3 = Sequential()
model3.add(Conv2D(filters=32, kernel_size=(3,3), activation='relu', input_shape=[32,32,3]))
model3.add(MaxPooling2D(pool_size=(4,4)))
model3.add(Conv2D(filters=32, kernel_size=(3,3), activation='relu'))
model3.add(Flatten())
model3.add(Dense(units=300, activation='relu'))
model3.add(Dense(units=300, activation='relu'))
model3.add(Dense(units=10))
model3.summary()
model3.compile(optimizer='sgd', loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), metrics=['accuracy'])
model3.fit(x_train_n, y_train, epochs=10, batch_size=32, validation_data=(x_test_n, y_test))