#imports
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout
from tensorflow.keras.backend import clear_session
!pip install optuna
import optuna
#Add the model 
dropout_rate = [0] * 2
def create_model(trial):
    num_layers = trial.suggest_int("num_layers", 1, 5)
    activation = trial.suggest_categorical("activation", ["relu", "linear", "selu", "elu", "exponential"])
    dropout_rate[0] = trial.suggest_uniform('dropout_rate'+str(0), 0.0, 0.5)
    dropout_rate[1] = trial.suggest_uniform('dropout_rate'+str(1), 0.0, 0.5)
    mid_units = int(trial.suggest_discrete_uniform("mid_units", 100, 300, 100))
    filters=trial.suggest_categorical("filters", [32, 64])
    kernel_size=trial.suggest_categorical("kernel_size", [3, 3])
    strides=trial.suggest_categorical("strides", [1, 2])
    classifier = Sequential()
    #step 1  - Convolution Layers
   
    classifier.add(
        Conv2D(
            filters=filters,
            kernel_size=kernel_size,
            strides=strides,
            activation = activation,
            input_shape=(64, 64, 3),
        )
    )
    
    classifier.add(MaxPooling2D(pool_size=(2, 2)))
    for i in range(1, num_layers):
        classifier.add(
            Conv2D(
                filters=filters,
                kernel_size=kernel_size,
                strides=strides,
                activation = activation,              
            )
        )
    classifier.add(MaxPooling2D(pool_size=(2, 2)))
    classifier.add(Dropout(dropout_rate[0]))
    classifier.add(Flatten())
    classifier.add(Dense(units = mid_units, activation = activation))
    classifier.add(Dropout(dropout_rate[1]))
    classifier.add(Dense(units = 1, activation ='sigmoid'))
    return classifier
#image augumentation
from keras.preprocessing.image import ImageDataGenerator
#Data Preparation
train_datagen = ImageDataGenerator(rescale = 1./255,
                                   shear_range = 0.2,
                                   zoom_range = 0.2,
                                   horizontal_flip = True)
test_datagen = ImageDataGenerator(rescale = 1./255)
training_set = train_datagen.flow_from_directory('cats_and_dogs_filtered/train',
                                                 target_size = (64, 64),
                                                 batch_size = 32,
                                                 class_mode = 'binary')
test_set = test_datagen.flow_from_directory('cats_and_dogs_filtered/validation',
                                            target_size = (64, 64),
                                            batch_size = 32,
                                            class_mode = 'binary')
training_set
#create objective 
def objective(trial):
    #clear session
    clear_session()
    optimizer = trial.suggest_categorical("optimizer", ["sgd", "adam", "rmsprop", "adadelta", "adagrad", "adamax"])
    classifier = create_model(trial)
    classifier.compile(optimizer = optimizer, loss = 'binary_crossentropy', metrics = ['accuracy'])  
    history = classifier.fit_generator(training_set,
                         steps_per_epoch = 30, # num_samples // batch_size
                         epochs = 1, # entire iteration over dataset
                         validation_data = test_set,    
                         validation_steps = 20) #https://keras.io/api/models/model_training_apis/
    return 1 - history.history["accuracy"][-1]
#perform study
study = optuna.create_study()
study.optimize(objective, n_trials=5)
study.best_params
study.best_value
 print("Number of finished trials: {}".format(len(study.trials)))
    print("Best trial:")
    trial = study.best_trial
    print("  Value: {}".format(trial.value))
    print("  Params: ")
    for key, value in trial.params.items():
        print("    {}: {}".format(key, value))