import pandas as pd
import numpy as np
# Make numpy values easier to read.
np.set_printoptions(precision=3, suppress=True)
import tensorflow as tf
from tensorflow.keras import layers
abalone_train = pd.read_csv(
"https://storage.googleapis.com/download.tensorflow.org/data/abalone_train.csv",
names=["Length", "Diameter", "Height", "Whole weight", "Shucked weight",
"Viscera weight", "Shell weight", "Age"])
abalone_train.head()
Lengthfloat64
Diameterfloat64
0
0.435
0.335
1
0.585
0.45
2
0.655
0.51
3
0.545
0.425
4
0.545
0.42
abalone_features = abalone_train.copy()
abalone_labels = abalone_features.pop('Age')
abalone_features = np.array(abalone_features)
abalone_features
abalone_model = tf.keras.Sequential()
abalone_model.add(layers.Dense(64))
abalone_model.add(layers.Dense(32))
abalone_model.add(layers.Dense(1))
abalone_model.compile(optimizer=tf.optimizers.Adam(), loss=tf.losses.MeanSquaredError())
abalone_model.fit(abalone_features, abalone_labels, epochs=100)
Epoch 1/100
104/104 [==============================] - 1s 3ms/step - loss: 62.5985
Epoch 2/100
104/104 [==============================] - 0s 2ms/step - loss: 8.7691
Epoch 3/100
104/104 [==============================] - 0s 2ms/step - loss: 7.0980
Epoch 4/100
104/104 [==============================] - 0s 4ms/step - loss: 6.4565
Epoch 5/100
104/104 [==============================] - 0s 2ms/step - loss: 6.1961
Epoch 6/100
104/104 [==============================] - 0s 4ms/step - loss: 6.1643
Epoch 7/100
104/104 [==============================] - 0s 3ms/step - loss: 5.9111
Epoch 8/100
104/104 [==============================] - 0s 3ms/step - loss: 5.9324
Epoch 9/100
104/104 [==============================] - 1s 6ms/step - loss: 5.4520
Epoch 10/100
104/104 [==============================] - 0s 4ms/step - loss: 5.7469
Epoch 11/100
104/104 [==============================] - 0s 3ms/step - loss: 5.3228
Epoch 12/100
104/104 [==============================] - 0s 2ms/step - loss: 5.7612
Epoch 13/100
104/104 [==============================] - 0s 905us/step - loss: 4.9080
Epoch 14/100
104/104 [==============================] - 0s 913us/step - loss: 4.8025
Epoch 15/100
104/104 [==============================] - 0s 2ms/step - loss: 4.9850
Epoch 16/100
104/104 [==============================] - 0s 1ms/step - loss: 4.7137
Epoch 17/100
104/104 [==============================] - 0s 990us/step - loss: 5.0973
Epoch 18/100
104/104 [==============================] - 0s 952us/step - loss: 4.6982
Epoch 19/100
104/104 [==============================] - 0s 1ms/step - loss: 4.8640
Epoch 20/100
104/104 [==============================] - 0s 1ms/step - loss: 4.9487
Epoch 21/100
104/104 [==============================] - 0s 1ms/step - loss: 4.9153
Epoch 22/100
104/104 [==============================] - 0s 3ms/step - loss: 5.1784
Epoch 23/100
104/104 [==============================] - 1s 6ms/step - loss: 5.1451
Epoch 24/100
104/104 [==============================] - 0s 2ms/step - loss: 4.8418
Epoch 25/100
104/104 [==============================] - 0s 3ms/step - loss: 4.7112
Epoch 26/100
104/104 [==============================] - 0s 3ms/step - loss: 4.8722
Epoch 27/100
104/104 [==============================] - 0s 3ms/step - loss: 5.0359
Epoch 28/100
104/104 [==============================] - 0s 2ms/step - loss: 5.0155
Epoch 29/100
104/104 [==============================] - 0s 3ms/step - loss: 4.8277
Epoch 30/100
104/104 [==============================] - 0s 2ms/step - loss: 5.0131
Epoch 31/100
104/104 [==============================] - 0s 4ms/step - loss: 4.5730
Epoch 32/100
104/104 [==============================] - 0s 4ms/step - loss: 4.8077
Epoch 33/100
104/104 [==============================] - 1s 5ms/step - loss: 4.9493
Epoch 34/100
104/104 [==============================] - 0s 3ms/step - loss: 5.0120
Epoch 35/100
104/104 [==============================] - 0s 3ms/step - loss: 4.9530
Epoch 36/100
104/104 [==============================] - 0s 4ms/step - loss: 5.0877
Epoch 37/100
104/104 [==============================] - 0s 4ms/step - loss: 4.8606
Epoch 38/100
104/104 [==============================] - 0s 2ms/step - loss: 4.7137
Epoch 39/100
104/104 [==============================] - 1s 6ms/step - loss: 4.9092
Epoch 40/100
104/104 [==============================] - 1s 6ms/step - loss: 4.6120
Epoch 41/100
104/104 [==============================] - 1s 5ms/step - loss: 5.1152
Epoch 42/100
104/104 [==============================] - 0s 2ms/step - loss: 4.9731
Epoch 43/100
104/104 [==============================] - 0s 4ms/step - loss: 5.3777
Epoch 44/100
104/104 [==============================] - 0s 2ms/step - loss: 4.8418
Epoch 45/100
104/104 [==============================] - 0s 4ms/step - loss: 4.9188
Epoch 46/100
104/104 [==============================] - 0s 2ms/step - loss: 4.8081
Epoch 47/100
104/104 [==============================] - 0s 5ms/step - loss: 4.7050
Epoch 48/100
104/104 [==============================] - 0s 3ms/step - loss: 4.8106
Epoch 49/100
104/104 [==============================] - 0s 5ms/step - loss: 4.6459
Epoch 50/100
104/104 [==============================] - 0s 2ms/step - loss: 4.7909
Epoch 51/100
104/104 [==============================] - 0s 2ms/step - loss: 5.0240
Epoch 52/100
104/104 [==============================] - 0s 2ms/step - loss: 5.1919
Epoch 53/100
104/104 [==============================] - 0s 2ms/step - loss: 4.9428
Epoch 54/100
104/104 [==============================] - 0s 3ms/step - loss: 4.7051
Epoch 55/100
104/104 [==============================] - 0s 5ms/step - loss: 4.8088
Epoch 56/100
104/104 [==============================] - 0s 4ms/step - loss: 4.5501
Epoch 57/100
104/104 [==============================] - 0s 2ms/step - loss: 4.7657
Epoch 58/100
104/104 [==============================] - 1s 6ms/step - loss: 5.1210
Epoch 59/100
104/104 [==============================] - 0s 3ms/step - loss: 4.9367
Epoch 60/100
104/104 [==============================] - 0s 3ms/step - loss: 4.5094
Epoch 61/100
104/104 [==============================] - 0s 2ms/step - loss: 5.1483
Epoch 62/100
104/104 [==============================] - 0s 3ms/step - loss: 4.8824
Epoch 63/100
104/104 [==============================] - 0s 4ms/step - loss: 4.8487
Epoch 64/100
104/104 [==============================] - 1s 5ms/step - loss: 4.8097
Epoch 65/100
104/104 [==============================] - 1s 5ms/step - loss: 4.8972
Epoch 66/100
104/104 [==============================] - 1s 5ms/step - loss: 4.6130
Epoch 67/100
104/104 [==============================] - 0s 3ms/step - loss: 4.8770
Epoch 68/100
104/104 [==============================] - 0s 3ms/step - loss: 4.9020
Epoch 69/100
104/104 [==============================] - 0s 4ms/step - loss: 4.9147
Epoch 70/100
104/104 [==============================] - 0s 3ms/step - loss: 5.0495
Epoch 71/100
104/104 [==============================] - 0s 2ms/step - loss: 4.9791
Epoch 72/100
104/104 [==============================] - 0s 3ms/step - loss: 4.8846
Epoch 73/100
104/104 [==============================] - 0s 3ms/step - loss: 4.7914
Epoch 74/100
104/104 [==============================] - 0s 3ms/step - loss: 4.7507
Epoch 75/100
104/104 [==============================] - 0s 2ms/step - loss: 5.0251
Epoch 76/100
104/104 [==============================] - 0s 2ms/step - loss: 4.5907
Epoch 77/100
104/104 [==============================] - 0s 2ms/step - loss: 5.0407
Epoch 78/100
104/104 [==============================] - 0s 3ms/step - loss: 4.9456
Epoch 79/100
104/104 [==============================] - 0s 4ms/step - loss: 4.5607
Epoch 80/100
104/104 [==============================] - 0s 4ms/step - loss: 4.9981
Epoch 81/100
104/104 [==============================] - 0s 4ms/step - loss: 5.0964
Epoch 82/100
104/104 [==============================] - 0s 3ms/step - loss: 5.0416
Epoch 83/100
104/104 [==============================] - 0s 4ms/step - loss: 4.8724
Epoch 84/100
104/104 [==============================] - 0s 2ms/step - loss: 5.1000
Epoch 85/100
104/104 [==============================] - 0s 3ms/step - loss: 4.9587
Epoch 86/100
104/104 [==============================] - 1s 5ms/step - loss: 4.7654
Epoch 87/100
104/104 [==============================] - 1s 7ms/step - loss: 4.8178
Epoch 88/100
104/104 [==============================] - 0s 3ms/step - loss: 4.8531
Epoch 89/100
104/104 [==============================] - 0s 3ms/step - loss: 5.1502
Epoch 90/100
104/104 [==============================] - 0s 2ms/step - loss: 4.5733
Epoch 91/100
104/104 [==============================] - 0s 4ms/step - loss: 5.0112
Epoch 92/100
104/104 [==============================] - 0s 3ms/step - loss: 4.8169
Epoch 93/100
104/104 [==============================] - 0s 4ms/step - loss: 4.8681
Epoch 94/100
104/104 [==============================] - 0s 5ms/step - loss: 4.7790
Epoch 95/100
104/104 [==============================] - 0s 4ms/step - loss: 5.0676
Epoch 96/100
104/104 [==============================] - 1s 5ms/step - loss: 4.8875
Epoch 97/100
104/104 [==============================] - 0s 3ms/step - loss: 4.7771
Epoch 98/100
104/104 [==============================] - 0s 3ms/step - loss: 4.9237
Epoch 99/100
104/104 [==============================] - 0s 2ms/step - loss: 4.7065
Epoch 100/100
104/104 [==============================] - 0s 5ms/step - loss: 4.8190
abalone_model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
dense (Dense) (None, 64) 512
_________________________________________________________________
dense_1 (Dense) (None, 32) 2080
_________________________________________________________________
dense_2 (Dense) (None, 1) 33
=================================================================
Total params: 2,625
Trainable params: 2,625
Non-trainable params: 0
_________________________________________________________________
abalone_test = np.array([[0.545,0.130,0.120,0.3515,0.1145,0.0665,0.1600]])
abalone_test
def predict_age_test(in_data):
if in_data is not None:
abalone_age = abalone_model.predict(in_data)
else:
print("The input is empty...")
return abalone_age
print("The age is " + str(predict_age_test(abalone_test)[[0]])+ "years")
The age is [[8.158]]years
#install gradio
# if you are running on a local machine then only once.
# if running on colab then every single time you reset or open the notebook.
! pip install gradio
import gradio as gr
# a wrapper function builds input and output for the model
def predict_age(f1,f2,f3,f4,f5,f6,f7):
input_feature = np.array([[f1,f2,f3,f4,f5,f6,f7]])
if input_feature is not None:
abalone_age = abalone_model.predict(input_feature)
else:
pass
return "The approx age of the abalone is" + str(abalone_age[[0]])
#Build inputs slider optins
inputs_app = [gr.inputs.Slider(0,1, step=0.001, label='Length', default=0.545),
gr.inputs.Slider(0,1, step=0.001, label='Diameter', default=0.13),
gr.inputs.Slider(0,1, step=0.001, label='Height', default=0.12),
gr.inputs.Slider(0,1, step=0.001, label='Whole weight', default=0.351),
gr.inputs.Slider(0,1, step=0.001, label='Stucked weight', default=0.115),
gr.inputs.Slider(0,1, step=0.001, label='Viscera weight', default=0.067),
gr.inputs.Slider(0,1, step=0.001, label='Shell weight', default=0.16)]
#Build output
outputs_app = ["text"]
#Build the interface
age_predictor_app = gr.Interface(fn=predict_age,
inputs=inputs_app,
outputs=outputs_app,
live=True,
theme="dark-peach",
description="Enter parameters using sliders provided to predict the age of abalone."
)
#Launch the app
age_predictor_app.launch(share=True)