import tensorflow as tf
print(tf.__version__)
2.4.1
import numpy as np
import matplotlib.pyplot as plt
def plot_series(time, series, format="-", start=0, end=None):
plt.plot(time[start:end], series[start:end], format)
plt.xlabel("Time")
plt.ylabel("Value")
plt.grid(True)
!wget --no-check-certificate \
https://raw.githubusercontent.com/jbrownlee/Datasets/master/daily-min-temperatures.csv \
-O /tmp/daily-min-temperatures.csv
--2021-06-07 18:29:47-- https://raw.githubusercontent.com/jbrownlee/Datasets/master/daily-min-temperatures.csv
Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.111.133, 185.199.108.133, 185.199.109.133, ...
Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.111.133|:443... connected.
HTTP request sent, awaiting response... 200 OK
Length: 67921 (66K) [text/plain]
Saving to: ‘/tmp/daily-min-temperatures.csv’
/tmp/daily-min-temp 100%[===================>] 66.33K --.-KB/s in 0.001s
2021-06-07 18:29:47 (48.7 MB/s) - ‘/tmp/daily-min-temperatures.csv’ saved [67921/67921]
import csv
time_step = []
temps = []
with open('/tmp/daily-min-temperatures.csv') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
next(reader)
step=0
for row in reader:
temps.append(float(row[1]))
time_step.append(step)
step = step + 1
series = np.array(temps)
time = np.array(time_step)
plt.figure(figsize=(10, 6))
plot_series(time, series)
## variables para la técnica de la ventana temporal
split_time = 2500
window_size = 30
batch_size = 32
shuffle_buffer_size = 1000
## Split del dataset en entrenamiento y validación
time_t = time[:split_time]
x_t = series[:split_time]
time_v = time[split_time:]
x_v = series[split_time:]
def windowed_dataset(series, window_size, batch_size, shuffle_buffer):
series = tf.expand_dims(series, axis=-1)
ds = tf.data.Dataset.from_tensor_slices(series)
ds = ds.window(window_size + 1, shift=1, drop_remainder=True)
ds = ds.flat_map(lambda w: w.batch(window_size + 1))
ds = ds.shuffle(shuffle_buffer)
ds = ds.map(lambda w: (w[:-1], w[1:]))
return ds.batch(batch_size).prefetch(1)
def model_forecast(model, series, window_size):
ds = tf.data.Dataset.from_tensor_slices(series)
ds = ds.window(window_size, drop_remainder=True)
ds = ds.flat_map(lambda window: window.batch(window_size))
ds = ds.batch(32).prefetch(1)
forecast = model.predict(ds, verbose=1)
return forecast
tf.keras.backend.clear_session()
tf.random.set_seed(51)
np.random.seed(51)
window_size = 64
batch_size = 256
train_set = windowed_dataset(x_t, window_size, batch_size, shuffle_buffer_size)
model = tf.keras.models.Sequential([
tf.keras.layers.Conv1D(filters=32, kernel_size=5,
strides=1, padding="causal",
activation="relu",
input_shape=[None, 1]),
tf.keras.layers.LSTM(64, return_sequences=True),
tf.keras.layers.LSTM(64, return_sequences=True),
tf.keras.layers.Dense(30),
tf.keras.layers.Dense(10),
tf.keras.layers.Dense(1),
tf.keras.layers.Lambda(lambda x: x * 400)
])
## tu código para crear la variable lr_schedule aquí
lr_schedule = tf.keras.callbacks.LearningRateScheduler(lambda epoch: 1e-8 * 10 * (epoch / 20), verbose=1)
model.compile(loss=tf.keras.losses.Huber(),
optimizer=tf.keras.optimizers.SGD(lr=1e-8, momentum=0.9),
metrics=["mae"])
history = model.fit(train_set, epochs=100, callbacks=[lr_schedule])
Epoch 1/100
Epoch 00001: LearningRateScheduler reducing learning rate to 0.0.
10/10 [==============================] - 8s 423ms/step - loss: 114.8675 - mae: 115.3671
Epoch 2/100
Epoch 00002: LearningRateScheduler reducing learning rate to 5e-09.
10/10 [==============================] - 5s 425ms/step - loss: 114.5800 - mae: 115.0796
Epoch 3/100
Epoch 00003: LearningRateScheduler reducing learning rate to 1e-08.
10/10 [==============================] - 4s 420ms/step - loss: 111.0869 - mae: 111.5865
Epoch 4/100
Epoch 00004: LearningRateScheduler reducing learning rate to 1.5e-08.
10/10 [==============================] - 5s 455ms/step - loss: 102.3569 - mae: 102.8567
Epoch 5/100
Epoch 00005: LearningRateScheduler reducing learning rate to 2e-08.
10/10 [==============================] - 4s 415ms/step - loss: 88.4683 - mae: 88.9680
Epoch 6/100
Epoch 00006: LearningRateScheduler reducing learning rate to 2.5e-08.
10/10 [==============================] - 4s 420ms/step - loss: 69.9235 - mae: 70.4234
Epoch 7/100
Epoch 00007: LearningRateScheduler reducing learning rate to 3e-08.
10/10 [==============================] - 4s 418ms/step - loss: 46.9920 - mae: 47.4917
Epoch 8/100
Epoch 00008: LearningRateScheduler reducing learning rate to 3.4999999999999996e-08.
10/10 [==============================] - 5s 423ms/step - loss: 22.5335 - mae: 23.0290
Epoch 9/100
Epoch 00009: LearningRateScheduler reducing learning rate to 4e-08.
10/10 [==============================] - 4s 416ms/step - loss: 17.0329 - mae: 17.5274
Epoch 10/100
Epoch 00010: LearningRateScheduler reducing learning rate to 4.5e-08.
10/10 [==============================] - 5s 427ms/step - loss: 16.4659 - mae: 16.9602
Epoch 11/100
Epoch 00011: LearningRateScheduler reducing learning rate to 5e-08.
10/10 [==============================] - 5s 420ms/step - loss: 14.3301 - mae: 14.8219
Epoch 12/100
Epoch 00012: LearningRateScheduler reducing learning rate to 5.5e-08.
10/10 [==============================] - 4s 417ms/step - loss: 13.3915 - mae: 13.8826
Epoch 13/100
Epoch 00013: LearningRateScheduler reducing learning rate to 6e-08.
10/10 [==============================] - 5s 419ms/step - loss: 12.3807 - mae: 12.8719
Epoch 14/100
Epoch 00014: LearningRateScheduler reducing learning rate to 6.5e-08.
10/10 [==============================] - 4s 419ms/step - loss: 11.6439 - mae: 12.1341
Epoch 15/100
Epoch 00015: LearningRateScheduler reducing learning rate to 6.999999999999999e-08.
10/10 [==============================] - 5s 427ms/step - loss: 10.8585 - mae: 11.3484
Epoch 16/100
Epoch 00016: LearningRateScheduler reducing learning rate to 7.5e-08.
10/10 [==============================] - 4s 415ms/step - loss: 10.3662 - mae: 10.8553
Epoch 17/100
Epoch 00017: LearningRateScheduler reducing learning rate to 8e-08.
10/10 [==============================] - 5s 434ms/step - loss: 9.8107 - mae: 10.2990
Epoch 18/100
Epoch 00018: LearningRateScheduler reducing learning rate to 8.5e-08.
10/10 [==============================] - 4s 411ms/step - loss: 9.4318 - mae: 9.9200
Epoch 19/100
Epoch 00019: LearningRateScheduler reducing learning rate to 9e-08.
10/10 [==============================] - 4s 417ms/step - loss: 9.1240 - mae: 9.6120
Epoch 20/100
Epoch 00020: LearningRateScheduler reducing learning rate to 9.499999999999999e-08.
10/10 [==============================] - 4s 413ms/step - loss: 8.8290 - mae: 9.3166
Epoch 21/100
Epoch 00021: LearningRateScheduler reducing learning rate to 1e-07.
10/10 [==============================] - 4s 414ms/step - loss: 8.6099 - mae: 9.0973
Epoch 22/100
Epoch 00022: LearningRateScheduler reducing learning rate to 1.05e-07.
10/10 [==============================] - 5s 417ms/step - loss: 8.3510 - mae: 8.8382
Epoch 23/100
Epoch 00023: LearningRateScheduler reducing learning rate to 1.1e-07.
10/10 [==============================] - 4s 416ms/step - loss: 8.1361 - mae: 8.6227
Epoch 24/100
Epoch 00024: LearningRateScheduler reducing learning rate to 1.1499999999999998e-07.
10/10 [==============================] - 5s 423ms/step - loss: 7.9339 - mae: 8.4204
Epoch 25/100
Epoch 00025: LearningRateScheduler reducing learning rate to 1.2e-07.
10/10 [==============================] - 4s 415ms/step - loss: 7.7562 - mae: 8.2430
Epoch 26/100
Epoch 00026: LearningRateScheduler reducing learning rate to 1.25e-07.
10/10 [==============================] - 4s 419ms/step - loss: 7.5885 - mae: 8.0749
Epoch 27/100
Epoch 00027: LearningRateScheduler reducing learning rate to 1.3e-07.
10/10 [==============================] - 4s 413ms/step - loss: 7.3754 - mae: 7.8615
Epoch 28/100
Epoch 00028: LearningRateScheduler reducing learning rate to 1.35e-07.
10/10 [==============================] - 5s 430ms/step - loss: 7.2499 - mae: 7.7350
Epoch 29/100
Epoch 00029: LearningRateScheduler reducing learning rate to 1.3999999999999998e-07.
10/10 [==============================] - 4s 420ms/step - loss: 7.1448 - mae: 7.6302
Epoch 30/100
Epoch 00030: LearningRateScheduler reducing learning rate to 1.45e-07.
10/10 [==============================] - 5s 428ms/step - loss: 6.9414 - mae: 7.4262
Epoch 31/100
Epoch 00031: LearningRateScheduler reducing learning rate to 1.5e-07.
10/10 [==============================] - 4s 417ms/step - loss: 6.8107 - mae: 7.2954
Epoch 32/100
Epoch 00032: LearningRateScheduler reducing learning rate to 1.55e-07.
10/10 [==============================] - 4s 417ms/step - loss: 6.7215 - mae: 7.2059
Epoch 33/100
Epoch 00033: LearningRateScheduler reducing learning rate to 1.6e-07.
10/10 [==============================] - 4s 415ms/step - loss: 6.5802 - mae: 7.0642
Epoch 34/100
Epoch 00034: LearningRateScheduler reducing learning rate to 1.6499999999999998e-07.
10/10 [==============================] - 5s 421ms/step - loss: 6.4655 - mae: 6.9495
Epoch 35/100
Epoch 00035: LearningRateScheduler reducing learning rate to 1.7e-07.
10/10 [==============================] - 5s 419ms/step - loss: 6.3354 - mae: 6.8190
Epoch 36/100
Epoch 00036: LearningRateScheduler reducing learning rate to 1.75e-07.
10/10 [==============================] - 4s 410ms/step - loss: 6.2017 - mae: 6.6847
Epoch 37/100
Epoch 00037: LearningRateScheduler reducing learning rate to 1.8e-07.
10/10 [==============================] - 5s 429ms/step - loss: 6.1225 - mae: 6.6049
Epoch 38/100
Epoch 00038: LearningRateScheduler reducing learning rate to 1.85e-07.
10/10 [==============================] - 5s 423ms/step - loss: 6.0007 - mae: 6.4825
Epoch 39/100
Epoch 00039: LearningRateScheduler reducing learning rate to 1.8999999999999998e-07.
10/10 [==============================] - 5s 426ms/step - loss: 5.8324 - mae: 6.3135
Epoch 40/100
Epoch 00040: LearningRateScheduler reducing learning rate to 1.9499999999999999e-07.
10/10 [==============================] - 5s 424ms/step - loss: 5.7879 - mae: 6.2685
Epoch 41/100
Epoch 00041: LearningRateScheduler reducing learning rate to 2e-07.
10/10 [==============================] - 5s 427ms/step - loss: 5.6621 - mae: 6.1422
Epoch 42/100
Epoch 00042: LearningRateScheduler reducing learning rate to 2.0499999999999997e-07.
10/10 [==============================] - 5s 418ms/step - loss: 5.5409 - mae: 6.0206
Epoch 43/100
Epoch 00043: LearningRateScheduler reducing learning rate to 2.1e-07.
10/10 [==============================] - 4s 417ms/step - loss: 5.4634 - mae: 5.9430
Epoch 44/100
Epoch 00044: LearningRateScheduler reducing learning rate to 2.1499999999999998e-07.
10/10 [==============================] - 4s 411ms/step - loss: 5.3894 - mae: 5.8686
Epoch 45/100
Epoch 00045: LearningRateScheduler reducing learning rate to 2.2e-07.
10/10 [==============================] - 4s 415ms/step - loss: 5.2563 - mae: 5.7349
Epoch 46/100
Epoch 00046: LearningRateScheduler reducing learning rate to 2.25e-07.
10/10 [==============================] - 5s 422ms/step - loss: 5.1664 - mae: 5.6446
Epoch 47/100
Epoch 00047: LearningRateScheduler reducing learning rate to 2.2999999999999997e-07.
10/10 [==============================] - 5s 424ms/step - loss: 5.0903 - mae: 5.5680
Epoch 48/100
Epoch 00048: LearningRateScheduler reducing learning rate to 2.35e-07.
10/10 [==============================] - 4s 420ms/step - loss: 4.9809 - mae: 5.4583
Epoch 49/100
Epoch 00049: LearningRateScheduler reducing learning rate to 2.4e-07.
10/10 [==============================] - 4s 410ms/step - loss: 4.8931 - mae: 5.3699
Epoch 50/100
Epoch 00050: LearningRateScheduler reducing learning rate to 2.45e-07.
10/10 [==============================] - 5s 423ms/step - loss: 4.8179 - mae: 5.2941
Epoch 51/100
Epoch 00051: LearningRateScheduler reducing learning rate to 2.5e-07.
10/10 [==============================] - 4s 412ms/step - loss: 4.7272 - mae: 5.2022
Epoch 52/100
Epoch 00052: LearningRateScheduler reducing learning rate to 2.55e-07.
10/10 [==============================] - 4s 411ms/step - loss: 4.6597 - mae: 5.1348
Epoch 53/100
Epoch 00053: LearningRateScheduler reducing learning rate to 2.6e-07.
10/10 [==============================] - 5s 424ms/step - loss: 4.5685 - mae: 5.0433
Epoch 54/100
Epoch 00054: LearningRateScheduler reducing learning rate to 2.65e-07.
10/10 [==============================] - 4s 417ms/step - loss: 4.5165 - mae: 4.9912
Epoch 55/100
Epoch 00055: LearningRateScheduler reducing learning rate to 2.7e-07.
10/10 [==============================] - 5s 431ms/step - loss: 4.4296 - mae: 4.9038
Epoch 56/100
Epoch 00056: LearningRateScheduler reducing learning rate to 2.75e-07.
10/10 [==============================] - 4s 415ms/step - loss: 4.3579 - mae: 4.8313
Epoch 57/100
Epoch 00057: LearningRateScheduler reducing learning rate to 2.7999999999999997e-07.
10/10 [==============================] - 5s 421ms/step - loss: 4.3049 - mae: 4.7780
Epoch 58/100
Epoch 00058: LearningRateScheduler reducing learning rate to 2.8499999999999997e-07.
10/10 [==============================] - 4s 413ms/step - loss: 4.2127 - mae: 4.6852
Epoch 59/100
Epoch 00059: LearningRateScheduler reducing learning rate to 2.9e-07.
10/10 [==============================] - 5s 423ms/step - loss: 4.1759 - mae: 4.6487
Epoch 60/100
Epoch 00060: LearningRateScheduler reducing learning rate to 2.95e-07.
10/10 [==============================] - 4s 414ms/step - loss: 4.0790 - mae: 4.5513
Epoch 61/100
Epoch 00061: LearningRateScheduler reducing learning rate to 3e-07.
10/10 [==============================] - 4s 413ms/step - loss: 4.0134 - mae: 4.4849
Epoch 62/100
Epoch 00062: LearningRateScheduler reducing learning rate to 3.05e-07.
10/10 [==============================] - 4s 412ms/step - loss: 3.9778 - mae: 4.4492
Epoch 63/100
Epoch 00063: LearningRateScheduler reducing learning rate to 3.1e-07.
10/10 [==============================] - 4s 416ms/step - loss: 3.8932 - mae: 4.3638
Epoch 64/100
Epoch 00064: LearningRateScheduler reducing learning rate to 3.15e-07.
10/10 [==============================] - 4s 411ms/step - loss: 3.8364 - mae: 4.3070
Epoch 65/100
Epoch 00065: LearningRateScheduler reducing learning rate to 3.2e-07.
10/10 [==============================] - 4s 415ms/step - loss: 3.7954 - mae: 4.2657
Epoch 66/100
Epoch 00066: LearningRateScheduler reducing learning rate to 3.2499999999999996e-07.
10/10 [==============================] - 4s 419ms/step - loss: 3.7200 - mae: 4.1898
Epoch 67/100
Epoch 00067: LearningRateScheduler reducing learning rate to 3.2999999999999996e-07.
10/10 [==============================] - 4s 412ms/step - loss: 3.6899 - mae: 4.1593
Epoch 68/100
Epoch 00068: LearningRateScheduler reducing learning rate to 3.3499999999999997e-07.
10/10 [==============================] - 5s 431ms/step - loss: 3.6232 - mae: 4.0924
Epoch 69/100
Epoch 00069: LearningRateScheduler reducing learning rate to 3.4e-07.
10/10 [==============================] - 4s 417ms/step - loss: 3.5696 - mae: 4.0381
Epoch 70/100
Epoch 00070: LearningRateScheduler reducing learning rate to 3.45e-07.
10/10 [==============================] - 5s 422ms/step - loss: 3.5072 - mae: 3.9756
Epoch 71/100
Epoch 00071: LearningRateScheduler reducing learning rate to 3.5e-07.
10/10 [==============================] - 5s 418ms/step - loss: 3.4622 - mae: 3.9298
Epoch 72/100
Epoch 00072: LearningRateScheduler reducing learning rate to 3.55e-07.
10/10 [==============================] - 4s 419ms/step - loss: 3.4199 - mae: 3.8875
Epoch 73/100
Epoch 00073: LearningRateScheduler reducing learning rate to 3.6e-07.
10/10 [==============================] - 4s 414ms/step - loss: 3.3491 - mae: 3.8161
Epoch 74/100
Epoch 00074: LearningRateScheduler reducing learning rate to 3.65e-07.
10/10 [==============================] - 5s 416ms/step - loss: 3.3136 - mae: 3.7807
Epoch 75/100
Epoch 00075: LearningRateScheduler reducing learning rate to 3.7e-07.
10/10 [==============================] - 4s 419ms/step - loss: 3.2845 - mae: 3.7518
Epoch 76/100
Epoch 00076: LearningRateScheduler reducing learning rate to 3.75e-07.
10/10 [==============================] - 4s 416ms/step - loss: 3.2290 - mae: 3.6961
Epoch 77/100
Epoch 00077: LearningRateScheduler reducing learning rate to 3.7999999999999996e-07.
10/10 [==============================] - 5s 423ms/step - loss: 3.2103 - mae: 3.6774
Epoch 78/100
Epoch 00078: LearningRateScheduler reducing learning rate to 3.8499999999999997e-07.
10/10 [==============================] - 4s 418ms/step - loss: 3.1822 - mae: 3.6488
Epoch 79/100
Epoch 00079: LearningRateScheduler reducing learning rate to 3.8999999999999997e-07.
10/10 [==============================] - 4s 413ms/step - loss: 3.1356 - mae: 3.6019
Epoch 80/100
Epoch 00080: LearningRateScheduler reducing learning rate to 3.95e-07.
10/10 [==============================] - 4s 414ms/step - loss: 3.1072 - mae: 3.5732
Epoch 81/100
Epoch 00081: LearningRateScheduler reducing learning rate to 4e-07.
10/10 [==============================] - 4s 419ms/step - loss: 3.0636 - mae: 3.5290
Epoch 82/100
Epoch 00082: LearningRateScheduler reducing learning rate to 4.05e-07.
10/10 [==============================] - 5s 424ms/step - loss: 3.0524 - mae: 3.5179
Epoch 83/100
Epoch 00083: LearningRateScheduler reducing learning rate to 4.0999999999999994e-07.
10/10 [==============================] - 4s 418ms/step - loss: 2.9982 - mae: 3.4633
Epoch 84/100
Epoch 00084: LearningRateScheduler reducing learning rate to 4.15e-07.
10/10 [==============================] - 5s 415ms/step - loss: 2.9807 - mae: 3.4455
Epoch 85/100
Epoch 00085: LearningRateScheduler reducing learning rate to 4.2e-07.
10/10 [==============================] - 4s 409ms/step - loss: 2.9612 - mae: 3.4263
Epoch 86/100
Epoch 00086: LearningRateScheduler reducing learning rate to 4.2499999999999995e-07.
10/10 [==============================] - 4s 416ms/step - loss: 2.9152 - mae: 3.3794
Epoch 87/100
Epoch 00087: LearningRateScheduler reducing learning rate to 4.2999999999999996e-07.
10/10 [==============================] - 4s 412ms/step - loss: 2.9111 - mae: 3.3758
Epoch 88/100
Epoch 00088: LearningRateScheduler reducing learning rate to 4.3499999999999996e-07.
10/10 [==============================] - 5s 423ms/step - loss: 2.8789 - mae: 3.3436
Epoch 89/100
Epoch 00089: LearningRateScheduler reducing learning rate to 4.4e-07.
10/10 [==============================] - 4s 413ms/step - loss: 2.8544 - mae: 3.3189
Epoch 90/100
Epoch 00090: LearningRateScheduler reducing learning rate to 4.45e-07.
10/10 [==============================] - 5s 426ms/step - loss: 2.8453 - mae: 3.3095
Epoch 91/100
Epoch 00091: LearningRateScheduler reducing learning rate to 4.5e-07.
10/10 [==============================] - 4s 417ms/step - loss: 2.8248 - mae: 3.2899
Epoch 92/100
Epoch 00092: LearningRateScheduler reducing learning rate to 4.55e-07.
10/10 [==============================] - 4s 412ms/step - loss: 2.7913 - mae: 3.2548
Epoch 93/100
Epoch 00093: LearningRateScheduler reducing learning rate to 4.5999999999999994e-07.
10/10 [==============================] - 5s 417ms/step - loss: 2.7680 - mae: 3.2321
Epoch 94/100
Epoch 00094: LearningRateScheduler reducing learning rate to 4.65e-07.
10/10 [==============================] - 5s 422ms/step - loss: 2.7545 - mae: 3.2182
Epoch 95/100
Epoch 00095: LearningRateScheduler reducing learning rate to 4.7e-07.
10/10 [==============================] - 5s 435ms/step - loss: 2.7045 - mae: 3.1683
Epoch 96/100
Epoch 00096: LearningRateScheduler reducing learning rate to 4.75e-07.
10/10 [==============================] - 4s 416ms/step - loss: 2.7025 - mae: 3.1661
Epoch 97/100
Epoch 00097: LearningRateScheduler reducing learning rate to 4.8e-07.
10/10 [==============================] - 5s 428ms/step - loss: 2.6898 - mae: 3.1528
Epoch 98/100
Epoch 00098: LearningRateScheduler reducing learning rate to 4.849999999999999e-07.
10/10 [==============================] - 4s 413ms/step - loss: 2.6802 - mae: 3.1442
Epoch 99/100
Epoch 00099: LearningRateScheduler reducing learning rate to 4.9e-07.
10/10 [==============================] - 4s 421ms/step - loss: 2.6465 - mae: 3.1089
Epoch 100/100
Epoch 00100: LearningRateScheduler reducing learning rate to 4.95e-07.
10/10 [==============================] - 4s 410ms/step - loss: 2.6350 - mae: 3.0976
plt.semilogx(history.history["lr"], history.history["loss"])
plt.axis([1e-8, 1e-4, 0, 60])
tf.keras.backend.clear_session()
tf.random.set_seed(51)
np.random.seed(51)
train_set = windowed_dataset(x_t, window_size=60, batch_size=100, shuffle_buffer=shuffle_buffer_size)
model = tf.keras.models.Sequential([
tf.keras.layers.Conv1D(filters=60, kernel_size=5,
strides=1, padding="causal",
activation="relu",
input_shape=[None, 1]),
tf.keras.layers.LSTM(64, return_sequences=True),
tf.keras.layers.LSTM(64, return_sequences=True),
tf.keras.layers.Dense(30),
tf.keras.layers.Dense(10),
tf.keras.layers.Dense(1),
tf.keras.layers.Lambda(lambda x: x * 400)
])
model.compile(loss=tf.keras.losses.Huber(),
optimizer=tf.keras.optimizers.SGD(lr=1e-5, momentum=0.9),
metrics=["mae"])
history = model.fit(train_set,epochs=150)
Epoch 1/150
25/25 [==============================] - 10s 222ms/step - loss: 63.9024 - mae: 64.4014
Epoch 2/150
25/25 [==============================] - 6s 226ms/step - loss: 27.1890 - mae: 27.6856
Epoch 3/150
25/25 [==============================] - 6s 217ms/step - loss: 26.3741 - mae: 26.8738
Epoch 4/150
25/25 [==============================] - 6s 222ms/step - loss: 20.3019 - mae: 20.8014
Epoch 5/150
25/25 [==============================] - 6s 217ms/step - loss: 16.0599 - mae: 16.5591
Epoch 6/150
25/25 [==============================] - 6s 218ms/step - loss: 12.5092 - mae: 13.0076
Epoch 7/150
25/25 [==============================] - 6s 219ms/step - loss: 11.4753 - mae: 11.9734
Epoch 8/150
25/25 [==============================] - 6s 219ms/step - loss: 10.2815 - mae: 10.7791
Epoch 9/150
25/25 [==============================] - 6s 225ms/step - loss: 8.6659 - mae: 9.1628
Epoch 10/150
25/25 [==============================] - 6s 219ms/step - loss: 8.4001 - mae: 8.8977
Epoch 11/150
25/25 [==============================] - 6s 223ms/step - loss: 7.7691 - mae: 8.2655
Epoch 12/150
25/25 [==============================] - 6s 219ms/step - loss: 6.6498 - mae: 7.1446
Epoch 13/150
25/25 [==============================] - 6s 222ms/step - loss: 6.4552 - mae: 6.9494
Epoch 14/150
25/25 [==============================] - 6s 224ms/step - loss: 6.1524 - mae: 6.6449
Epoch 15/150
25/25 [==============================] - 6s 217ms/step - loss: 5.2840 - mae: 5.7735
Epoch 16/150
25/25 [==============================] - 6s 219ms/step - loss: 5.4354 - mae: 5.9257
Epoch 17/150
25/25 [==============================] - 6s 220ms/step - loss: 4.8719 - mae: 5.3576
Epoch 18/150
25/25 [==============================] - 6s 220ms/step - loss: 2.2579 - mae: 2.7196
Epoch 19/150
25/25 [==============================] - 6s 220ms/step - loss: 1.7702 - mae: 2.2202
Epoch 20/150
25/25 [==============================] - 6s 220ms/step - loss: 1.7074 - mae: 2.1565
Epoch 21/150
25/25 [==============================] - 6s 223ms/step - loss: 1.6922 - mae: 2.1397
Epoch 22/150
25/25 [==============================] - 6s 218ms/step - loss: 1.6800 - mae: 2.1267
Epoch 23/150
25/25 [==============================] - 6s 230ms/step - loss: 1.6525 - mae: 2.0985
Epoch 24/150
25/25 [==============================] - 6s 219ms/step - loss: 1.6284 - mae: 2.0738
Epoch 25/150
25/25 [==============================] - 6s 223ms/step - loss: 1.6275 - mae: 2.0731
Epoch 26/150
25/25 [==============================] - 6s 221ms/step - loss: 1.6015 - mae: 2.0461
Epoch 27/150
25/25 [==============================] - 6s 218ms/step - loss: 1.5993 - mae: 2.0437
Epoch 28/150
25/25 [==============================] - 6s 218ms/step - loss: 1.5834 - mae: 2.0269
Epoch 29/150
25/25 [==============================] - 6s 217ms/step - loss: 1.5797 - mae: 2.0227
Epoch 30/150
25/25 [==============================] - 6s 223ms/step - loss: 1.5729 - mae: 2.0157
Epoch 31/150
25/25 [==============================] - 6s 216ms/step - loss: 1.5708 - mae: 2.0137
Epoch 32/150
25/25 [==============================] - 6s 219ms/step - loss: 1.5757 - mae: 2.0198
Epoch 33/150
25/25 [==============================] - 6s 222ms/step - loss: 1.5581 - mae: 2.0014
Epoch 34/150
25/25 [==============================] - 6s 222ms/step - loss: 1.5593 - mae: 2.0021
Epoch 35/150
25/25 [==============================] - 6s 224ms/step - loss: 1.5427 - mae: 1.9849
Epoch 36/150
25/25 [==============================] - 6s 219ms/step - loss: 1.5427 - mae: 1.9843
Epoch 37/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5506 - mae: 1.9942
Epoch 38/150
25/25 [==============================] - 6s 217ms/step - loss: 1.5418 - mae: 1.9841
Epoch 39/150
25/25 [==============================] - 6s 220ms/step - loss: 1.5420 - mae: 1.9848
Epoch 40/150
25/25 [==============================] - 6s 223ms/step - loss: 1.5399 - mae: 1.9823
Epoch 41/150
25/25 [==============================] - 6s 216ms/step - loss: 1.5319 - mae: 1.9736
Epoch 42/150
25/25 [==============================] - 6s 220ms/step - loss: 1.5340 - mae: 1.9765
Epoch 43/150
25/25 [==============================] - 6s 218ms/step - loss: 1.5340 - mae: 1.9772
Epoch 44/150
25/25 [==============================] - 6s 223ms/step - loss: 1.5463 - mae: 1.9899
Epoch 45/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5225 - mae: 1.9638
Epoch 46/150
25/25 [==============================] - 6s 219ms/step - loss: 1.5247 - mae: 1.9665
Epoch 47/150
25/25 [==============================] - 6s 220ms/step - loss: 1.5264 - mae: 1.9683
Epoch 48/150
25/25 [==============================] - 6s 217ms/step - loss: 1.5269 - mae: 1.9688
Epoch 49/150
25/25 [==============================] - 6s 220ms/step - loss: 1.5337 - mae: 1.9764
Epoch 50/150
25/25 [==============================] - 6s 217ms/step - loss: 1.5295 - mae: 1.9719
Epoch 51/150
25/25 [==============================] - 6s 223ms/step - loss: 1.5203 - mae: 1.9618
Epoch 52/150
25/25 [==============================] - 6s 216ms/step - loss: 1.5205 - mae: 1.9613
Epoch 53/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5183 - mae: 1.9595
Epoch 54/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5122 - mae: 1.9530
Epoch 55/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5115 - mae: 1.9522
Epoch 56/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5185 - mae: 1.9593
Epoch 57/150
25/25 [==============================] - 6s 218ms/step - loss: 1.5257 - mae: 1.9672
Epoch 58/150
25/25 [==============================] - 6s 219ms/step - loss: 1.5198 - mae: 1.9618
Epoch 59/150
25/25 [==============================] - 6s 232ms/step - loss: 1.5158 - mae: 1.9569
Epoch 60/150
25/25 [==============================] - 6s 219ms/step - loss: 1.5062 - mae: 1.9463
Epoch 61/150
25/25 [==============================] - 6s 222ms/step - loss: 1.5170 - mae: 1.9581
Epoch 62/150
25/25 [==============================] - 6s 218ms/step - loss: 1.5122 - mae: 1.9525
Epoch 63/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5102 - mae: 1.9508
Epoch 64/150
25/25 [==============================] - 6s 217ms/step - loss: 1.5023 - mae: 1.9430
Epoch 65/150
25/25 [==============================] - 6s 222ms/step - loss: 1.5377 - mae: 1.9815
Epoch 66/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5059 - mae: 1.9461
Epoch 67/150
25/25 [==============================] - 6s 223ms/step - loss: 1.5226 - mae: 1.9639
Epoch 68/150
25/25 [==============================] - 6s 219ms/step - loss: 1.5061 - mae: 1.9465
Epoch 69/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5045 - mae: 1.9442
Epoch 70/150
25/25 [==============================] - 6s 220ms/step - loss: 1.5093 - mae: 1.9502
Epoch 71/150
25/25 [==============================] - 6s 217ms/step - loss: 1.4992 - mae: 1.9391
Epoch 72/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5075 - mae: 1.9484
Epoch 73/150
25/25 [==============================] - 6s 222ms/step - loss: 1.4958 - mae: 1.9353
Epoch 74/150
25/25 [==============================] - 6s 218ms/step - loss: 1.5092 - mae: 1.9503
Epoch 75/150
25/25 [==============================] - 6s 225ms/step - loss: 1.5091 - mae: 1.9495
Epoch 76/150
25/25 [==============================] - 6s 220ms/step - loss: 1.5059 - mae: 1.9471
Epoch 77/150
25/25 [==============================] - 6s 222ms/step - loss: 1.5131 - mae: 1.9540
Epoch 78/150
25/25 [==============================] - 6s 222ms/step - loss: 1.5039 - mae: 1.9440
Epoch 79/150
25/25 [==============================] - 6s 220ms/step - loss: 1.5039 - mae: 1.9444
Epoch 80/150
25/25 [==============================] - 6s 220ms/step - loss: 1.5046 - mae: 1.9448
Epoch 81/150
25/25 [==============================] - 6s 218ms/step - loss: 1.5032 - mae: 1.9432
Epoch 82/150
25/25 [==============================] - 6s 224ms/step - loss: 1.5093 - mae: 1.9498
Epoch 83/150
25/25 [==============================] - 6s 218ms/step - loss: 1.5105 - mae: 1.9505
Epoch 84/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5068 - mae: 1.9473
Epoch 85/150
25/25 [==============================] - 6s 218ms/step - loss: 1.5101 - mae: 1.9512
Epoch 86/150
25/25 [==============================] - 6s 223ms/step - loss: 1.4994 - mae: 1.9393
Epoch 87/150
25/25 [==============================] - 6s 225ms/step - loss: 1.5086 - mae: 1.9489
Epoch 88/150
25/25 [==============================] - 6s 218ms/step - loss: 1.5012 - mae: 1.9412
Epoch 89/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5040 - mae: 1.9438
Epoch 90/150
25/25 [==============================] - 6s 217ms/step - loss: 1.5061 - mae: 1.9466
Epoch 91/150
25/25 [==============================] - 6s 219ms/step - loss: 1.4949 - mae: 1.9345
Epoch 92/150
25/25 [==============================] - 6s 220ms/step - loss: 1.4973 - mae: 1.9376
Epoch 93/150
25/25 [==============================] - 6s 222ms/step - loss: 1.4943 - mae: 1.9342
Epoch 94/150
25/25 [==============================] - 6s 219ms/step - loss: 1.5018 - mae: 1.9419
Epoch 95/150
25/25 [==============================] - 6s 221ms/step - loss: 1.4937 - mae: 1.9332
Epoch 96/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5037 - mae: 1.9444
Epoch 97/150
25/25 [==============================] - 6s 220ms/step - loss: 1.5012 - mae: 1.9409
Epoch 98/150
25/25 [==============================] - 6s 224ms/step - loss: 1.5035 - mae: 1.9435
Epoch 99/150
25/25 [==============================] - 6s 217ms/step - loss: 1.4997 - mae: 1.9402
Epoch 100/150
25/25 [==============================] - 6s 223ms/step - loss: 1.5002 - mae: 1.9397
Epoch 101/150
25/25 [==============================] - 6s 218ms/step - loss: 1.5086 - mae: 1.9483
Epoch 102/150
25/25 [==============================] - 6s 217ms/step - loss: 1.4930 - mae: 1.9324
Epoch 103/150
25/25 [==============================] - 6s 223ms/step - loss: 1.4985 - mae: 1.9385
Epoch 104/150
25/25 [==============================] - 6s 220ms/step - loss: 1.4925 - mae: 1.9317
Epoch 105/150
25/25 [==============================] - 6s 220ms/step - loss: 1.4950 - mae: 1.9345
Epoch 106/150
25/25 [==============================] - 6s 218ms/step - loss: 1.4938 - mae: 1.9338
Epoch 107/150
25/25 [==============================] - 6s 223ms/step - loss: 1.4932 - mae: 1.9325
Epoch 108/150
25/25 [==============================] - 6s 224ms/step - loss: 1.5016 - mae: 1.9422
Epoch 109/150
25/25 [==============================] - 7s 270ms/step - loss: 1.4995 - mae: 1.9401
Epoch 110/150
25/25 [==============================] - 6s 219ms/step - loss: 1.4977 - mae: 1.9366
Epoch 111/150
25/25 [==============================] - 6s 217ms/step - loss: 1.4920 - mae: 1.9311
Epoch 112/150
25/25 [==============================] - 6s 219ms/step - loss: 1.4946 - mae: 1.9338
Epoch 113/150
25/25 [==============================] - 8s 317ms/step - loss: 1.4933 - mae: 1.9324
Epoch 114/150
25/25 [==============================] - 10s 396ms/step - loss: 1.4945 - mae: 1.9339
Epoch 115/150
25/25 [==============================] - 10s 385ms/step - loss: 1.4854 - mae: 1.9244
Epoch 116/150
25/25 [==============================] - 10s 386ms/step - loss: 1.4938 - mae: 1.9328
Epoch 117/150
25/25 [==============================] - 6s 218ms/step - loss: 1.4955 - mae: 1.9348
Epoch 118/150
25/25 [==============================] - 6s 219ms/step - loss: 1.4922 - mae: 1.9310
Epoch 119/150
25/25 [==============================] - 6s 219ms/step - loss: 1.4892 - mae: 1.9282
Epoch 120/150
25/25 [==============================] - 6s 233ms/step - loss: 1.4991 - mae: 1.9390
Epoch 121/150
25/25 [==============================] - 6s 221ms/step - loss: 1.5030 - mae: 1.9433
Epoch 122/150
25/25 [==============================] - 6s 219ms/step - loss: 1.4952 - mae: 1.9341
Epoch 123/150
25/25 [==============================] - 6s 216ms/step - loss: 1.5045 - mae: 1.9442
Epoch 124/150
25/25 [==============================] - 6s 221ms/step - loss: 1.4898 - mae: 1.9288
Epoch 125/150
25/25 [==============================] - 6s 222ms/step - loss: 1.4919 - mae: 1.9310
Epoch 126/150
25/25 [==============================] - 6s 226ms/step - loss: 1.4964 - mae: 1.9357
Epoch 127/150
25/25 [==============================] - 6s 228ms/step - loss: 1.4959 - mae: 1.9345
Epoch 128/150
25/25 [==============================] - 6s 226ms/step - loss: 1.4957 - mae: 1.9354
Epoch 129/150
25/25 [==============================] - 6s 220ms/step - loss: 1.4946 - mae: 1.9338
Epoch 130/150
25/25 [==============================] - 6s 232ms/step - loss: 1.5073 - mae: 1.9469
Epoch 131/150
25/25 [==============================] - 6s 226ms/step - loss: 1.4961 - mae: 1.9353
Epoch 132/150
25/25 [==============================] - 6s 221ms/step - loss: 1.4977 - mae: 1.9368
Epoch 133/150
25/25 [==============================] - 6s 229ms/step - loss: 1.4832 - mae: 1.9218
Epoch 134/150
25/25 [==============================] - 6s 228ms/step - loss: 1.4963 - mae: 1.9351
Epoch 135/150
25/25 [==============================] - 6s 228ms/step - loss: 1.4949 - mae: 1.9333
Epoch 136/150
25/25 [==============================] - 6s 232ms/step - loss: 1.4931 - mae: 1.9322
Epoch 137/150
25/25 [==============================] - 6s 221ms/step - loss: 1.4926 - mae: 1.9313
Epoch 138/150
25/25 [==============================] - 6s 222ms/step - loss: 1.4952 - mae: 1.9343
Epoch 139/150
25/25 [==============================] - 6s 234ms/step - loss: 1.4914 - mae: 1.9302
Epoch 140/150
25/25 [==============================] - 6s 220ms/step - loss: 1.4976 - mae: 1.9367
Epoch 141/150
25/25 [==============================] - 6s 234ms/step - loss: 1.5002 - mae: 1.9400
Epoch 142/150
25/25 [==============================] - 6s 230ms/step - loss: 1.4864 - mae: 1.9243
Epoch 143/150
25/25 [==============================] - 6s 225ms/step - loss: 1.4888 - mae: 1.9275
Epoch 144/150
25/25 [==============================] - 6s 226ms/step - loss: 1.5002 - mae: 1.9391
Epoch 145/150
25/25 [==============================] - 6s 223ms/step - loss: 1.4884 - mae: 1.9271
Epoch 146/150
25/25 [==============================] - 6s 222ms/step - loss: 1.4929 - mae: 1.9319
Epoch 147/150
25/25 [==============================] - 6s 233ms/step - loss: 1.4946 - mae: 1.9336
Epoch 148/150
25/25 [==============================] - 6s 225ms/step - loss: 1.4944 - mae: 1.9333
Epoch 149/150
25/25 [==============================] - 6s 219ms/step - loss: 1.4894 - mae: 1.9283
Epoch 150/150
25/25 [==============================] - 6s 221ms/step - loss: 1.4874 - mae: 1.9256
rnn_forecast = model_forecast(model, x_v[..., np.newaxis], window_size)
# rnn_forecast = rnn_forecast[split_time - window_size:-1, -1, 0]
2/2 [==============================] - 0s 34ms/step
plt.figure(figsize=(10, 6))
plot_series(time_v, x_v)
plot_series(time_v[:-10], np.ravel(rnn_forecast))
tf.keras.metrics.mean_absolute_error(x_v, rnn_forecast).numpy()
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
#-----------------------------------------------------------
# Recuperar una lista de resultados de la lista de datos de entrenamiento y pruebas para cada epoch de entrenamiento
#-----------------------------------------------------------
loss=history.history['loss']
epochs=range(len(loss)) # Get number of epochs
#------------------------------------------------
# Pérdida de entrenamiento y validación por epoch
#------------------------------------------------
plt.plot(epochs, loss, 'r')
plt.title('Training loss')
plt.xlabel("Epochs")
plt.ylabel("Loss")
plt.legend(["Loss"])
plt.figure()
#------------------------------------------------
# Pérdida de entrenamiento y validación por epoch con zoom
#------------------------------------------------
zoomed_loss = loss[20:]
zoomed_epochs = range(20,150)
#
## tu código para el plot con zoom del ejercicio 8 aquí
plt.plot(zoomed_epochs, zoomed_loss)