import tensorflow as tf
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.fashion_mnist.load_data()
import matplotlib.pyplot as plt
plt.imshow(x_train[0], 'gray')
import pandas as pd
df = pd.DataFrame(y_train,columns=["a"])
len(df["a"].unique())
from tensorflow.keras.layers import Dense
layer1 = Dense(units=300,activation='sigmoid')
layer2 = Dense(units=100,activation='sigmoid')
layer3 = Dense(units=10,activation='softmax') #because there's 10 classes
from tensorflow.keras import Sequential
model = Sequential()
model.add(layer1)
model.add(layer2)
model.add(layer3)
model.compile(optimizer="sgd",loss="sparse_categorical_crossentropy")
from sklearn.preprocessing import MinMaxScaler
import numpy as np
vec_x_train = list(map(lambda x: x.flatten() ,x_train))
vec_x_train = np.asarray(vec_x_train)
#scaler = MinMaxScaler())
vec_x_train.shape
history = model.fit(vec_x_train, y_train, epochs=10)
plt.plot(history.history['loss'])
plt.show()
model1 = Sequential()
model1.add(Dense(units=300,activation='relu'))
model1.add(Dense(units=100,activation='relu'))
model1.add(Dense(units=10,activation='softmax'))
model1.compile(optimizer="sgd",loss="sparse_categorical_crossentropy")
history1 = model1.fit(vec_x_train, y_train, epochs=10)
plt.plot(history1.history['loss'])
plt.show()
lecun_normal = tf.keras.initializers.LecunNormal
lecun_normal
model2 = Sequential()
model2.add(Dense(units=300,kernel_initializer=lecun_normal,activation='selu'))
model2.add(Dense(units=100,kernel_initializer=lecun_normal,activation='selu'))
model2.add(Dense(units=10,activation='softmax'))
model2.compile(optimizer="sgd",loss= "sparse_categorical_crossentropy")
history2 = model2.fit(vec_x_train, y_train, epochs=10)
plt.plot(history2.history['loss'])
plt.show()
model3 = Sequential()
model3.add(Dense(units=800,activation='sigmoid'))
model3.add(Dense(units=500,activation='sigmoid'))
model3.add(Dense(units=10,activation='softmax'))
model3.compile(optimizer="sgd",loss="sparse_categorical_crossentropy")
history3 = model3.fit(vec_x_train, y_train, epochs=10)
plt.plot(history3.history['loss'])
plt.show()
from tensorflow.keras import initializers
initializer = tf.keras.initializers.Zeros()
model4 = Sequential()
model4.add(Dense(units=800,activation='sigmoid',kernel_initializer=initializer))
model4.add(Dense(units=500,activation='sigmoid'))
model4.add(Dense(units=10,activation='softmax'))
model4.compile(optimizer="sgd",loss="sparse_categorical_crossentropy")
history4 = model4.fit(vec_x_train, y_train, epochs=10)
plt.plot(history4.history['loss'])
plt.show()
from tensorflow.keras.optimizers import SGD
initializer = tf.keras.initializers.Zeros()
model5 = Sequential()
model5.add(Dense(units=800,activation='sigmoid',kernel_initializer=initializer))
model5.add(Dense(units=500,activation='sigmoid'))
model5.add(Dense(units=10,activation='softmax'))
opt = SGD(lr=0.1)
model5.compile(optimizer=opt,loss="sparse_categorical_crossentropy")
history5 = model5.fit(vec_x_train, y_train, epochs=10)
plt.plot(history5.history['loss'])
plt.show()
initializer = tf.keras.initializers.Zeros()
model6 = Sequential()
model6.add(Dense(units=800,activation='sigmoid',kernel_initializer=initializer))
model6.add(Dense(units=500,activation='sigmoid'))
model6.add(Dense(units=10,activation='softmax'))
opt = SGD(lr=0.01)
model6.compile(optimizer=opt,loss="sparse_categorical_crossentropy")
history6 = model6.fit(vec_x_train, y_train, epochs=10)
plt.plot(history6.history['loss'])
plt.show()
initializer = tf.keras.initializers.Zeros()
model7 = Sequential()
model7.add(Dense(units=800,activation='sigmoid',kernel_initializer=initializer))
model7.add(Dense(units=500,activation='sigmoid'))
model7.add(Dense(units=10,activation='softmax'))
opt = SGD(lr=0.1)
model7.compile(optimizer=opt,loss="sparse_categorical_crossentropy")
history7 = model7.fit(vec_x_train, y_train, epochs=10)
plt.plot(history7.history['loss'])
plt.show()
initializer = tf.keras.initializers.Zeros()
model8 = Sequential()
model8.add(Dense(units=800,activation='sigmoid',kernel_initializer=initializer))
model8.add(Dense(units=500,activation='sigmoid'))
model8.add(Dense(units=10,activation='softmax'))
opt = SGD(lr=9)
model8.compile(optimizer=opt,loss="sparse_categorical_crossentropy")
reduce_lr = tf.keras.callbacks.ReduceLROnPlateau(monitor='loss', min_lr=0.001)
history8 = model8.fit(vec_x_train, y_train, epochs=10, callbacks=[reduce_lr])
plt.plot(history8.history['loss'])
plt.show()