pip install -r requirements.txt
Requirement already satisfied: covidcast in /root/venv/lib/python3.7/site-packages (from -r requirements.txt (line 1)) (0.1.5)
Requirement already satisfied: tqdm in /shared-libs/python3.7/py/lib/python3.7/site-packages (from covidcast->-r requirements.txt (line 1)) (4.62.3)
Requirement already satisfied: pandas in /shared-libs/python3.7/py/lib/python3.7/site-packages (from covidcast->-r requirements.txt (line 1)) (1.2.5)
Requirement already satisfied: delphi-epidata>=0.0.11 in /root/venv/lib/python3.7/site-packages (from covidcast->-r requirements.txt (line 1)) (0.3.2)
Requirement already satisfied: numpy in /shared-libs/python3.7/py/lib/python3.7/site-packages (from covidcast->-r requirements.txt (line 1)) (1.19.5)
Requirement already satisfied: matplotlib in /shared-libs/python3.7/py/lib/python3.7/site-packages (from covidcast->-r requirements.txt (line 1)) (3.4.3)
Requirement already satisfied: geopandas in /root/venv/lib/python3.7/site-packages (from covidcast->-r requirements.txt (line 1)) (0.10.2)
Requirement already satisfied: descartes in /root/venv/lib/python3.7/site-packages (from covidcast->-r requirements.txt (line 1)) (1.1.0)
Requirement already satisfied: epiweeks in /root/venv/lib/python3.7/site-packages (from covidcast->-r requirements.txt (line 1)) (2.1.3)
Requirement already satisfied: imageio-ffmpeg in /root/venv/lib/python3.7/site-packages (from covidcast->-r requirements.txt (line 1)) (0.4.5)
Requirement already satisfied: imageio in /root/venv/lib/python3.7/site-packages (from covidcast->-r requirements.txt (line 1)) (2.13.1)
Requirement already satisfied: requests in /shared-libs/python3.7/py/lib/python3.7/site-packages (from covidcast->-r requirements.txt (line 1)) (2.26.0)
Requirement already satisfied: pytz>=2017.3 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from pandas->covidcast->-r requirements.txt (line 1)) (2021.3)
Requirement already satisfied: python-dateutil>=2.7.3 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from pandas->covidcast->-r requirements.txt (line 1)) (2.8.2)
Requirement already satisfied: tenacity in /shared-libs/python3.7/py/lib/python3.7/site-packages (from delphi-epidata>=0.0.11->covidcast->-r requirements.txt (line 1)) (8.0.1)
Requirement already satisfied: aiohttp in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from delphi-epidata>=0.0.11->covidcast->-r requirements.txt (line 1)) (3.8.0)
Requirement already satisfied: pyparsing>=2.2.1 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from matplotlib->covidcast->-r requirements.txt (line 1)) (2.4.7)
Requirement already satisfied: kiwisolver>=1.0.1 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from matplotlib->covidcast->-r requirements.txt (line 1)) (1.3.2)
Requirement already satisfied: cycler>=0.10 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from matplotlib->covidcast->-r requirements.txt (line 1)) (0.11.0)
Requirement already satisfied: pillow>=6.2.0 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from matplotlib->covidcast->-r requirements.txt (line 1)) (8.4.0)
Requirement already satisfied: pyproj>=2.2.0 in /root/venv/lib/python3.7/site-packages (from geopandas->covidcast->-r requirements.txt (line 1)) (3.2.1)
Requirement already satisfied: shapely>=1.6 in /root/venv/lib/python3.7/site-packages (from geopandas->covidcast->-r requirements.txt (line 1)) (1.8.0)
Requirement already satisfied: fiona>=1.8 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from geopandas->covidcast->-r requirements.txt (line 1)) (1.8.20)
Requirement already satisfied: idna<4,>=2.5; python_version >= "3" in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from requests->covidcast->-r requirements.txt (line 1)) (3.3)
Requirement already satisfied: certifi>=2017.4.17 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from requests->covidcast->-r requirements.txt (line 1)) (2021.10.8)
Requirement already satisfied: charset-normalizer~=2.0.0; python_version >= "3" in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from requests->covidcast->-r requirements.txt (line 1)) (2.0.7)
Requirement already satisfied: urllib3<1.27,>=1.21.1 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from requests->covidcast->-r requirements.txt (line 1)) (1.26.7)
Requirement already satisfied: six>=1.5 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from python-dateutil>=2.7.3->pandas->covidcast->-r requirements.txt (line 1)) (1.16.0)
Requirement already satisfied: asynctest==0.13.0; python_version < "3.8" in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from aiohttp->delphi-epidata>=0.0.11->covidcast->-r requirements.txt (line 1)) (0.13.0)
Requirement already satisfied: aiosignal>=1.1.2 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from aiohttp->delphi-epidata>=0.0.11->covidcast->-r requirements.txt (line 1)) (1.2.0)
Requirement already satisfied: multidict<7.0,>=4.5 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from aiohttp->delphi-epidata>=0.0.11->covidcast->-r requirements.txt (line 1)) (5.2.0)
Requirement already satisfied: typing-extensions>=3.7.4; python_version < "3.8" in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from aiohttp->delphi-epidata>=0.0.11->covidcast->-r requirements.txt (line 1)) (3.10.0.2)
Requirement already satisfied: attrs>=17.3.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from aiohttp->delphi-epidata>=0.0.11->covidcast->-r requirements.txt (line 1)) (21.2.0)
Requirement already satisfied: yarl<2.0,>=1.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from aiohttp->delphi-epidata>=0.0.11->covidcast->-r requirements.txt (line 1)) (1.7.2)
Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from aiohttp->delphi-epidata>=0.0.11->covidcast->-r requirements.txt (line 1)) (4.0.1)
Requirement already satisfied: frozenlist>=1.1.1 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from aiohttp->delphi-epidata>=0.0.11->covidcast->-r requirements.txt (line 1)) (1.2.0)
Requirement already satisfied: setuptools in /root/venv/lib/python3.7/site-packages (from fiona>=1.8->geopandas->covidcast->-r requirements.txt (line 1)) (47.1.0)
Requirement already satisfied: munch in /shared-libs/python3.7/py/lib/python3.7/site-packages (from fiona>=1.8->geopandas->covidcast->-r requirements.txt (line 1)) (2.5.0)
Requirement already satisfied: cligj>=0.5 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from fiona>=1.8->geopandas->covidcast->-r requirements.txt (line 1)) (0.7.2)
Requirement already satisfied: click>=4.0 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from fiona>=1.8->geopandas->covidcast->-r requirements.txt (line 1)) (8.0.3)
Requirement already satisfied: click-plugins>=1.0 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from fiona>=1.8->geopandas->covidcast->-r requirements.txt (line 1)) (1.1.1)
Requirement already satisfied: importlib-metadata; python_version < "3.8" in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from click>=4.0->fiona>=1.8->geopandas->covidcast->-r requirements.txt (line 1)) (4.8.2)
Requirement already satisfied: zipp>=0.5 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from importlib-metadata; python_version < "3.8"->click>=4.0->fiona>=1.8->geopandas->covidcast->-r requirements.txt (line 1)) (3.6.0)
WARNING: You are using pip version 20.1.1; however, version 21.3.1 is available.
You should consider upgrading via the '/usr/local/bin/python -m pip install --upgrade pip' command.
Note: you may need to restart the kernel to use updated packages.
# Start writing code here...
import pandas as pd
import numpy as np
from datetime import date
from datetime import timedelta
import covidcast
import math
from sklearn.model_selection import TimeSeriesSplit
import torch
from torch.utils.data import TensorDataset
from torch import optim
from sklearn.metrics import mean_squared_error
import matplotlib.pyplot as plt
def getCounty(df,county):
return df[df["fips"]==county]
import torch
from torch import nn
# Any Pytorch's network class is an extension of the torch.nn.Module parent class.
# To define a network class, you need to define at least 2 methods: an __init__() method (constructor) and a forward() method
class LSTM(nn.Module):
def __init__(self, input_dim, hidden_dim, n_layers):
super().__init__()
self.lstm = nn.LSTM(input_size=input_dim,
hidden_size=hidden_dim*2,
num_layers=n_layers,
batch_first=True)
#LSTM(neurons, batch_input_shape=(batch_size, X.shape[1], X.shape[2]), stateful=True))
self.linear = nn.Linear(hidden_dim*2, hidden_dim)
self.relu = nn.ReLU()
self.sigmoid = nn.Sigmoid()
self.predictor = nn.Linear(hidden_dim, 1)
self.dropout = nn.Dropout(0.5)
def forward(self, input):
lstm_out, (hidden, cell) = self.lstm(input)
# hidden = self.linear(hidden)
hidden = self.linear(hidden)
#hidden = self.sigmoid(hidden)
hidden = self.relu(hidden)
out = self.predictor(hidden)
return out
county_avg_valid_MSEs
#Cross Validate the model with all counties data
cv = 5
EPOCHS = 100
lrs = [0.001,0.01,0.1,1]
HIDDEN_DIM = 4
train_data = pd.read_csv("training_data.csv",dtype=object,index_col=False)
train_data['time']= pd.to_datetime(train_data['time'])
headers = ['dailyCases_chng_t-1', 'dailyCases_chng_t-2',
'outpatient_cli_t-1', 'outpatient_cli_t-2', 'hospitalAdm_claim_t-1',
'hospitalAdm_claim_t-2', 'googleSym_sum_t-1', 'googleSym_sum_t-2',
'doctorVisits_t-1', 'doctorVisits_t-2','dailyCases_t']
mean_dailyCases = train_data['dailyCases_t'].astype(float).mean()
for column_name in headers:
train_data[column_name] = train_data[column_name].astype(float)
train_data[column_name] = train_data[column_name]-train_data[column_name].mean()
y = train_data["dailyCases_t"]
y = np.array(y,dtype=np.float32)
X = train_data.drop(columns=['dailyCases_t','time','fips'])
X = np.array(X,dtype=np.float32)
X = X.reshape(X.shape[0], 1, X.shape[1])
county_avg_valid_MSEs = []
for lr in lrs:
print("\n*********** CURRENT LEARNING RATE :", lr, "***********")
i=0
tscv = TimeSeriesSplit(n_splits=cv)
avg_valid_mse = 0.0
for train_index, valid_index in tscv.split(X):
X_train, X_valid = X[train_index], X[valid_index]
y_train, y_valid = y[train_index], y[valid_index]
training_set = TensorDataset(torch.from_numpy(X_train),
torch.from_numpy(y_train))
training_dataloader = torch.utils.data.DataLoader(training_set, batch_size=16, shuffle=False)
valid_set = TensorDataset(torch.from_numpy(X_valid),
torch.from_numpy(y_valid))
validation_dataloader = torch.utils.data.DataLoader(valid_set, batch_size=16, shuffle=False)
print("SPLIT",i+1,":")
LEARNING_RATE = lr
model = LSTM(input_dim=10, hidden_dim=HIDDEN_DIM,n_layers=1)
# optimizer = torch.optim.Adam(model.parameters(), lr=LEARNING_RATE)
optimizer = torch.optim.SGD(model.parameters(), lr=LEARNING_RATE)
loss_fn = nn.MSELoss()
model.train()
for epoch in range(EPOCHS):
# Train the network by filling in this block of code
epoch_loss = 0.0
#epoch_loss = []
#train_mse = 0.0
train_prediction = []
train_ground_truth = []
for inputs, labels in training_dataloader:
optimizer.zero_grad()
output = model(inputs)
loss = torch.sqrt(loss_fn(output, torch.reshape(labels,output.shape)))
loss.backward()
optimizer.step()
epoch_loss += loss.item()
train_prediction = np.concatenate((train_prediction,torch.flatten(output).detach().numpy()))
train_ground_truth = np.concatenate((train_ground_truth,labels.detach().numpy()))
train_mse = np.sqrt(mean_squared_error(train_prediction,train_ground_truth))
print("Epoch", epoch, ", Loss:", epoch_loss, ", Train RMSE:", train_mse)
model.eval()
valid_prediction=[]
valid_ground_truth =[]
with torch.set_grad_enabled(False):
for inputs, labels in validation_dataloader:
output = model(inputs)
valid_prediction = np.concatenate((valid_prediction,torch.flatten(output).detach().numpy()))
valid_ground_truth = np.concatenate((valid_ground_truth,labels.detach().numpy()))
valid_mse = np.sqrt(mean_squared_error(valid_prediction,valid_ground_truth))
avg_valid_mse+=valid_mse
print("Validation MSE: ", valid_mse)
plt.plot(np.arange(len(valid_prediction)),valid_prediction,label="prediction")
plt.plot(np.arange(len(valid_ground_truth)),valid_ground_truth,label="ground truth")
plt.ylabel("Daily Cases")
plt.title("Cross Validation Result")
plt.legend()
plt.show()
i += 1
# if i==cv:
# test_prediction=[]
# test_ground_truth =[]
# test_mse = 0.0
# with torch.set_grad_enabled(False):
# for inputs, labels in test_dataloader:
# output = model(inputs)
# test_prediction = np.concatenate((test_prediction,torch.flatten(output).detach().numpy()))
# test_ground_truth = np.concatenate((test_ground_truth,labels.detach().numpy()))
# test_mse = np.sqrt(mean_squared_error(test_prediction,test_ground_truth))
# print("Test MSE: ", test_mse)
# plt.plot(np.arange(len(test_prediction)),test_prediction+mean_dailyCases,label="prediction")
# plt.plot(np.arange(len(test_ground_truth)),test_ground_truth+mean_dailyCases,label="ground truth")
# plt.title("Test data Prediction")
# plt.legend()
# plt.show()
avg_valid_mse /= cv
county_avg_valid_MSEs.append(avg_valid_mse)
print("lr:",lr)
print("Average validation MSE:",avg_valid_mse)
print()
*********** CURRENT LEARNING RATE : 0.001 ***********
SPLIT 1 :
Epoch 0 , Loss: 22796.41227722168 , Train RMSE: 436.3850854403946
Epoch 1 , Loss: 22794.334579467773 , Train RMSE: 436.3548383315615
Epoch 2 , Loss: 22792.295989990234 , Train RMSE: 436.32525009791743
Epoch 3 , Loss: 22790.27507019043 , Train RMSE: 436.2959523705273
Epoch 4 , Loss: 22788.2557220459 , Train RMSE: 436.2667027789174
Epoch 5 , Loss: 22786.22785949707 , Train RMSE: 436.2373541042305
Epoch 6 , Loss: 22784.187957763672 , Train RMSE: 436.20785873694695
Epoch 7 , Loss: 22782.134826660156 , Train RMSE: 436.1781764768053
Epoch 8 , Loss: 22780.066833496094 , Train RMSE: 436.14829929240756
Epoch 9 , Loss: 22777.9786529541 , Train RMSE: 436.1181437232085
Epoch 10 , Loss: 22775.874588012695 , Train RMSE: 436.0877502393756
Epoch 11 , Loss: 22773.752685546875 , Train RMSE: 436.05709171092775
Epoch 12 , Loss: 22771.60905456543 , Train RMSE: 436.0261228326133
Epoch 13 , Loss: 22769.440887451172 , Train RMSE: 435.9948021022412
Epoch 14 , Loss: 22767.246475219727 , Train RMSE: 435.96310053806377
Epoch 15 , Loss: 22765.022583007812 , Train RMSE: 435.93096954332276
Epoch 16 , Loss: 22762.764862060547 , Train RMSE: 435.8983588720767
Epoch 17 , Loss: 22760.470489501953 , Train RMSE: 435.8652212605451
Epoch 18 , Loss: 22758.13540649414 , Train RMSE: 435.83149601399344
Epoch 19 , Loss: 22755.755737304688 , Train RMSE: 435.7971234486429
Epoch 20 , Loss: 22753.326126098633 , Train RMSE: 435.76204043026837
Epoch 21 , Loss: 22750.842254638672 , Train RMSE: 435.7261736542678
Epoch 22 , Loss: 22748.298400878906 , Train RMSE: 435.6894460212213
Epoch 23 , Loss: 22745.68878173828 , Train RMSE: 435.6517745013152
Epoch 24 , Loss: 22743.007110595703 , Train RMSE: 435.6130707837448
Epoch 25 , Loss: 22740.2470703125 , Train RMSE: 435.57323931124074
Epoch 26 , Loss: 22737.40106201172 , Train RMSE: 435.53217266394773
Epoch 27 , Loss: 22734.460510253906 , Train RMSE: 435.4897548169898
Epoch 28 , Loss: 22731.416885375977 , Train RMSE: 435.4458579908143
Epoch 29 , Loss: 22728.260208129883 , Train RMSE: 435.4003459138222
Epoch 30 , Loss: 22724.98013305664 , Train RMSE: 435.3530691526919
Epoch 31 , Loss: 22721.565475463867 , Train RMSE: 435.30386376868955
Epoch 32 , Loss: 22718.00326538086 , Train RMSE: 435.25254750161105
Epoch 33 , Loss: 22714.279663085938 , Train RMSE: 435.1989207286292
Epoch 34 , Loss: 22710.379516601562 , Train RMSE: 435.1427780103157
Epoch 35 , Loss: 22706.28646850586 , Train RMSE: 435.0838845112961
Epoch 36 , Loss: 22701.98304748535 , Train RMSE: 435.0219835807146
Epoch 37 , Loss: 22697.449768066406 , Train RMSE: 434.95679788165296
Epoch 38 , Loss: 22692.663116455078 , Train RMSE: 434.88799976181986
Epoch 39 , Loss: 22687.600662231445 , Train RMSE: 434.81526641234075
Epoch 40 , Loss: 22682.2364654541 , Train RMSE: 434.7382240250083
Epoch 41 , Loss: 22676.54051208496 , Train RMSE: 434.6564464166778
Epoch 42 , Loss: 22670.4806060791 , Train RMSE: 434.5694696003321
Epoch 43 , Loss: 22664.021270751953 , Train RMSE: 434.476779249891
Epoch 44 , Loss: 22657.122756958008 , Train RMSE: 434.3777934201398
Epoch 45 , Loss: 22649.74169921875 , Train RMSE: 434.27188644277373
Epoch 46 , Loss: 22641.827728271484 , Train RMSE: 434.1583229578812
Epoch 47 , Loss: 22633.325485229492 , Train RMSE: 434.0362733318162
Epoch 48 , Loss: 22624.17189025879 , Train RMSE: 433.9048131541747
Epoch 49 , Loss: 22614.29591369629 , Train RMSE: 433.76288953926854
Epoch 50 , Loss: 22603.617935180664 , Train RMSE: 433.60931666103215
Epoch 51 , Loss: 22592.048141479492 , Train RMSE: 433.4427554925553
Epoch 52 , Loss: 22579.48406982422 , Train RMSE: 433.2617066778555
Epoch 53 , Loss: 22565.813262939453 , Train RMSE: 433.0644812690233
Epoch 54 , Loss: 22550.913055419922 , Train RMSE: 432.8492585791314
Epoch 55 , Loss: 22534.649642944336 , Train RMSE: 432.6140694083321
Epoch 56 , Loss: 22516.8709564209 , Train RMSE: 432.3566790577466
Epoch 57 , Loss: 22497.406143188477 , Train RMSE: 432.0745671201899
Epoch 58 , Loss: 22476.062530517578 , Train RMSE: 431.7649016699002
Epoch 59 , Loss: 22452.622802734375 , Train RMSE: 431.4244911125071
Epoch 60 , Loss: 22426.84391784668 , Train RMSE: 431.0497591479313
Epoch 61 , Loss: 22398.457244873047 , Train RMSE: 430.6367652240897
Epoch 62 , Loss: 22367.167602539062 , Train RMSE: 430.1811623526758
Epoch 63 , Loss: 22332.644241333008 , Train RMSE: 429.6780495321737
Epoch 64 , Loss: 22294.521591186523 , Train RMSE: 429.1220538619404
Epoch 65 , Loss: 22252.365615844727 , Train RMSE: 428.50665660751315
Epoch 66 , Loss: 22205.65769958496 , Train RMSE: 427.82389137660675
Epoch 67 , Loss: 22153.755142211914 , Train RMSE: 427.06364918018716
Epoch 68 , Loss: 22095.843688964844 , Train RMSE: 426.2127904252229
Epoch 69 , Loss: 22030.863891601562 , Train RMSE: 425.2538763571649
Epoch 70 , Loss: 21957.567443847656 , Train RMSE: 424.17119674327415
Epoch 71 , Loss: 21874.492385864258 , Train RMSE: 422.94700421205215
Epoch 72 , Loss: 21780.148406982422 , Train RMSE: 421.557521356485
Epoch 73 , Loss: 21673.473052978516 , Train RMSE: 419.99273639779176
Epoch 74 , Loss: 21553.085510253906 , Train RMSE: 418.2361361045707
Epoch 75 , Loss: 21416.851791381836 , Train RMSE: 416.25964159691597
Epoch 76 , Loss: 21261.827880859375 , Train RMSE: 414.02398270629055
Epoch 77 , Loss: 21084.54278564453 , Train RMSE: 411.4742016451341
Epoch 78 , Loss: 20881.748413085938 , Train RMSE: 408.5677994580381
Epoch 79 , Loss: 20650.17318725586 , Train RMSE: 405.2728135300582
Epoch 80 , Loss: 20385.27695465088 , Train RMSE: 401.5442761262026
Epoch 81 , Loss: 20079.8367767334 , Train RMSE: 397.28786878620974
Epoch 82 , Loss: 19723.976676940918 , Train RMSE: 392.3674997104283
Epoch 83 , Loss: 19306.452224731445 , Train RMSE: 386.68264355339994
Epoch 84 , Loss: 18813.888999938965 , Train RMSE: 380.12547077588704
Epoch 85 , Loss: 18237.6459274292 , Train RMSE: 372.6230762956668
Epoch 86 , Loss: 17576.117805480957 , Train RMSE: 364.2431268741891
Epoch 87 , Loss: 16829.265670776367 , Train RMSE: 355.14865734742216
Epoch 88 , Loss: 15998.738258361816 , Train RMSE: 345.5401983298639
Epoch 89 , Loss: 15087.707633972168 , Train RMSE: 335.6778027047966
Epoch 90 , Loss: 14104.078704833984 , Train RMSE: 325.9113845232822
Epoch 91 , Loss: 13072.54672241211 , Train RMSE: 317.01808517403845
Epoch 92 , Loss: 12047.184211730957 , Train RMSE: 309.8605199049391
Epoch 93 , Loss: 11098.994106292725 , Train RMSE: 305.1187828215289
Epoch 94 , Loss: 10308.932712554932 , Train RMSE: 303.0769153429877
Epoch 95 , Loss: 9806.55578994751 , Train RMSE: 303.21146544094705
Epoch 96 , Loss: 9645.734302520752 , Train RMSE: 303.58718929314455
Epoch 97 , Loss: 9632.36516571045 , Train RMSE: 303.69560667793786
Epoch 98 , Loss: 9607.597551345825 , Train RMSE: 303.5541388631407
Epoch 99 , Loss: 9593.283862113953 , Train RMSE: 303.34618847416306
Validation MSE: 3148.666230203476
SPLIT 2 :
Epoch 0 , Loss: 100946.00406646729 , Train RMSE: 2217.814087476548
Epoch 1 , Loss: 100941.93753814697 , Train RMSE: 2217.8197653978355
Epoch 2 , Loss: 100938.018409729 , Train RMSE: 2217.825678400472
Epoch 3 , Loss: 100934.13819885254 , Train RMSE: 2217.8316487703696
Epoch 4 , Loss: 100930.27665710449 , Train RMSE: 2217.8376489586126
Epoch 5 , Loss: 100926.42720794678 , Train RMSE: 2217.8436675976395
Epoch 6 , Loss: 100922.58046722412 , Train RMSE: 2217.849699234268
Epoch 7 , Loss: 100918.73414611816 , Train RMSE: 2217.85573840119
Epoch 8 , Loss: 100914.89111328125 , Train RMSE: 2217.8617861593953
Epoch 9 , Loss: 100911.05081939697 , Train RMSE: 2217.86784192286
Epoch 10 , Loss: 100907.21034240723 , Train RMSE: 2217.8739003477203
Epoch 11 , Loss: 100903.37370300293 , Train RMSE: 2217.8799637246602
Epoch 12 , Loss: 100899.534034729 , Train RMSE: 2217.886027421456
Epoch 13 , Loss: 100895.6950302124 , Train RMSE: 2217.8920897941207
Epoch 14 , Loss: 100891.85866546631 , Train RMSE: 2217.8981508275065
Epoch 15 , Loss: 100888.02332305908 , Train RMSE: 2217.9042172304466
Epoch 16 , Loss: 100884.18297576904 , Train RMSE: 2217.9102877417636
Epoch 17 , Loss: 100880.35037231445 , Train RMSE: 2217.9163637817783
Epoch 18 , Loss: 100876.51676177979 , Train RMSE: 2217.922441619177
Epoch 19 , Loss: 100872.68405914307 , Train RMSE: 2217.928522239893
Epoch 20 , Loss: 100868.85127258301 , Train RMSE: 2217.934608238731
Epoch 21 , Loss: 100865.01850128174 , Train RMSE: 2217.940697980482
Epoch 22 , Loss: 100861.18809509277 , Train RMSE: 2217.9467925141803
Epoch 23 , Loss: 100857.35716247559 , Train RMSE: 2217.952888806965
Epoch 24 , Loss: 100853.52938842773 , Train RMSE: 2217.9589922288333
Epoch 25 , Loss: 100849.69888305664 , Train RMSE: 2217.9650988125336
Epoch 26 , Loss: 100845.87044525146 , Train RMSE: 2217.971209803709
Epoch 27 , Loss: 100842.04315948486 , Train RMSE: 2217.977326517295
Epoch 28 , Loss: 100838.21920776367 , Train RMSE: 2217.983445501035
Epoch 29 , Loss: 100834.39311981201 , Train RMSE: 2217.989567500712
Epoch 30 , Loss: 100830.56883239746 , Train RMSE: 2217.9956950588507
Epoch 31 , Loss: 100826.74443054199 , Train RMSE: 2218.0018244315784
Epoch 32 , Loss: 100822.92106628418 , Train RMSE: 2218.007957662072
Epoch 33 , Loss: 100819.10005950928 , Train RMSE: 2218.014093052398
Epoch 34 , Loss: 100815.27700805664 , Train RMSE: 2218.0202264321892
Epoch 35 , Loss: 100811.45584869385 , Train RMSE: 2218.026362441891
Epoch 36 , Loss: 100807.63710021973 , Train RMSE: 2218.032503394386
Epoch 37 , Loss: 100803.81693267822 , Train RMSE: 2218.038653545855
Epoch 38 , Loss: 100799.99768829346 , Train RMSE: 2218.044810645991
Epoch 39 , Loss: 100796.17963409424 , Train RMSE: 2218.050976892478
Epoch 40 , Loss: 100792.36280059814 , Train RMSE: 2218.057148659068
Epoch 41 , Loss: 100788.5479888916 , Train RMSE: 2218.063332641929
Epoch 42 , Loss: 100784.73257446289 , Train RMSE: 2218.0695319939505
Epoch 43 , Loss: 100780.91900634766 , Train RMSE: 2218.0757449678285
Epoch 44 , Loss: 100777.10600280762 , Train RMSE: 2218.081962702432
Epoch 45 , Loss: 100773.29529571533 , Train RMSE: 2218.0881876130275
Epoch 46 , Loss: 100769.48460388184 , Train RMSE: 2218.09442121313
Epoch 47 , Loss: 100765.67455291748 , Train RMSE: 2218.1006609024676
Epoch 48 , Loss: 100761.86585998535 , Train RMSE: 2218.106905329589
Epoch 49 , Loss: 100758.0567855835 , Train RMSE: 2218.11315278865
Epoch 50 , Loss: 100754.24844360352 , Train RMSE: 2218.1194057509183
Epoch 51 , Loss: 100750.4439086914 , Train RMSE: 2218.1256634388633
Epoch 52 , Loss: 100746.63620758057 , Train RMSE: 2218.1319232614705
Epoch 53 , Loss: 100742.83074951172 , Train RMSE: 2218.138183690085
Epoch 54 , Loss: 100739.02727508545 , Train RMSE: 2218.1444476486768
Epoch 55 , Loss: 100735.2227859497 , Train RMSE: 2218.1507143299864
Epoch 56 , Loss: 100731.4174118042 , Train RMSE: 2218.156983909012
Epoch 57 , Loss: 100727.61506652832 , Train RMSE: 2218.163255228207
Epoch 58 , Loss: 100723.8134841919 , Train RMSE: 2218.169527916853
Epoch 59 , Loss: 100720.01207733154 , Train RMSE: 2218.1758023683847
Epoch 60 , Loss: 100716.21277618408 , Train RMSE: 2218.1820780887565
Epoch 61 , Loss: 100712.4132232666 , Train RMSE: 2218.1883558420473
Epoch 62 , Loss: 100708.61162567139 , Train RMSE: 2218.1946352363643
Epoch 63 , Loss: 100704.81278991699 , Train RMSE: 2218.2009161561773
Epoch 64 , Loss: 100701.0150604248 , Train RMSE: 2218.207199608205
Epoch 65 , Loss: 100697.21739959717 , Train RMSE: 2218.2134838724887
Epoch 66 , Loss: 100693.42176818848 , Train RMSE: 2218.219769434111
Epoch 67 , Loss: 100689.62588500977 , Train RMSE: 2218.2260563317172
Epoch 68 , Loss: 100685.83001708984 , Train RMSE: 2218.2323444189988
Epoch 69 , Loss: 100682.03656768799 , Train RMSE: 2218.238634030877
Epoch 70 , Loss: 100678.24195098877 , Train RMSE: 2218.2449251762782
Epoch 71 , Loss: 100674.4492263794 , Train RMSE: 2218.25121701245
Epoch 72 , Loss: 100670.65464019775 , Train RMSE: 2218.2575102278
Epoch 73 , Loss: 100666.86423492432 , Train RMSE: 2218.26380432589
Epoch 74 , Loss: 100663.07192230225 , Train RMSE: 2218.2700996959343
Epoch 75 , Loss: 100659.28117370605 , Train RMSE: 2218.2763965814115
Epoch 76 , Loss: 100655.49017333984 , Train RMSE: 2218.2826947353806
Epoch 77 , Loss: 100651.70013427734 , Train RMSE: 2218.288993932999
Epoch 78 , Loss: 100647.91415405273 , Train RMSE: 2218.2952946191626
Epoch 79 , Loss: 100644.12338256836 , Train RMSE: 2218.3015961527967
Epoch 80 , Loss: 100640.33749389648 , Train RMSE: 2218.307898574018
Epoch 81 , Loss: 100636.55026245117 , Train RMSE: 2218.3142026245064
Epoch 82 , Loss: 100632.76430511475 , Train RMSE: 2218.320507962476
Epoch 83 , Loss: 100628.97807312012 , Train RMSE: 2218.3268146160017
Epoch 84 , Loss: 100625.19527435303 , Train RMSE: 2218.333122709493
Epoch 85 , Loss: 100621.4105758667 , Train RMSE: 2218.3394317010952
Epoch 86 , Loss: 100617.62642669678 , Train RMSE: 2218.3457418052494
Epoch 87 , Loss: 100613.84399414062 , Train RMSE: 2218.352053226163
Epoch 88 , Loss: 100610.06308746338 , Train RMSE: 2218.358365981836
Epoch 89 , Loss: 100606.28094482422 , Train RMSE: 2218.3646795582254
Epoch 90 , Loss: 100602.50166320801 , Train RMSE: 2218.3709944145003
Epoch 91 , Loss: 100598.71911621094 , Train RMSE: 2218.377310997089
Epoch 92 , Loss: 100594.94119262695 , Train RMSE: 2218.3836285414154
Epoch 93 , Loss: 100591.163230896 , Train RMSE: 2218.3899472762982
Epoch 94 , Loss: 100587.38506317139 , Train RMSE: 2218.3962673242595
Epoch 95 , Loss: 100583.60804748535 , Train RMSE: 2218.402588586664
Epoch 96 , Loss: 100579.82902526855 , Train RMSE: 2218.4089108404205
Epoch 97 , Loss: 100576.05358886719 , Train RMSE: 2218.4152342251905
Epoch 98 , Loss: 100572.2769165039 , Train RMSE: 2218.421558606474
Epoch 99 , Loss: 100568.50367736816 , Train RMSE: 2218.4278841053037
Validation MSE: 1041.920736931548
SPLIT 3 :
Epoch 0 , Loss: 139387.1459197998 , Train RMSE: 1908.3075947139123
Epoch 1 , Loss: 139378.23355865479 , Train RMSE: 1908.3159890212694
Epoch 2 , Loss: 139369.96193695068 , Train RMSE: 1908.324227572443
Epoch 3 , Loss: 139362.6386795044 , Train RMSE: 1908.3323957342227
Epoch 4 , Loss: 139355.50775909424 , Train RMSE: 1908.3403790737345
Epoch 5 , Loss: 139348.4270477295 , Train RMSE: 1908.3482764781215
Epoch 6 , Loss: 139341.35047149658 , Train RMSE: 1908.3560411215897
Epoch 7 , Loss: 139334.25902557373 , Train RMSE: 1908.36366518398
Epoch 8 , Loss: 139327.13027191162 , Train RMSE: 1908.371175373862
Epoch 9 , Loss: 139319.94234466553 , Train RMSE: 1908.3785321052562
Epoch 10 , Loss: 139312.6816253662 , Train RMSE: 1908.3857866843375
Epoch 11 , Loss: 139305.31845855713 , Train RMSE: 1908.3929244510782
Epoch 12 , Loss: 139297.82556152344 , Train RMSE: 1908.3999264307522
Epoch 13 , Loss: 139290.1674118042 , Train RMSE: 1908.406776437352
Epoch 14 , Loss: 139282.30710601807 , Train RMSE: 1908.4134419892173
Epoch 15 , Loss: 139274.19721984863 , Train RMSE: 1908.4200592479658
Epoch 16 , Loss: 139265.78148651123 , Train RMSE: 1908.4266455469237
Epoch 17 , Loss: 139256.98252868652 , Train RMSE: 1908.4331662683082
Epoch 18 , Loss: 139247.68952941895 , Train RMSE: 1908.4395947028108
Epoch 19 , Loss: 139237.7704925537 , Train RMSE: 1908.445893898625
Epoch 20 , Loss: 139227.05823516846 , Train RMSE: 1908.4520337531196
Epoch 21 , Loss: 139215.303855896 , Train RMSE: 1908.457974629024
Epoch 22 , Loss: 139202.1747894287 , Train RMSE: 1908.463670362057
Epoch 23 , Loss: 139187.2149734497 , Train RMSE: 1908.4690352172154
Epoch 24 , Loss: 139169.90966796875 , Train RMSE: 1908.4739667297176
Epoch 25 , Loss: 139149.57637786865 , Train RMSE: 1908.4783854214209
Epoch 26 , Loss: 139125.28646087646 , Train RMSE: 1908.4819552886615
Epoch 27 , Loss: 139095.66682434082 , Train RMSE: 1908.4841367477563
Epoch 28 , Loss: 139058.76165771484 , Train RMSE: 1908.4841338793133
Epoch 29 , Loss: 139011.80798339844 , Train RMSE: 1908.4805732657799
Epoch 30 , Loss: 138950.9083328247 , Train RMSE: 1908.4714295621197
Epoch 31 , Loss: 138870.58100891113 , Train RMSE: 1908.4538063651262
Epoch 32 , Loss: 138763.19649505615 , Train RMSE: 1908.4235587434528
Epoch 33 , Loss: 138618.1540298462 , Train RMSE: 1908.3749404599857
Epoch 34 , Loss: 138420.74890899658 , Train RMSE: 1908.2999386720535
Epoch 35 , Loss: 138150.42740631104 , Train RMSE: 1908.1871600417128
Epoch 36 , Loss: 137777.57973480225 , Train RMSE: 1908.0196185272353
Epoch 37 , Loss: 137263.22706604004 , Train RMSE: 1907.7906612978022
Epoch 38 , Loss: 136547.68334960938 , Train RMSE: 1907.4958588511824
Epoch 39 , Loss: 135525.5799560547 , Train RMSE: 1907.2631456685422
Epoch 40 , Loss: 134015.0623626709 , Train RMSE: 1906.7959625004119
Epoch 41 , Loss: 131912.3191833496 , Train RMSE: 1906.1402135785336
Epoch 42 , Loss: 129084.94177246094 , Train RMSE: 1905.506253826872
Epoch 43 , Loss: 125369.77531433105 , Train RMSE: 1905.1442192054556
Epoch 44 , Loss: 120989.87842941284 , Train RMSE: 1905.6254044420984
Epoch 45 , Loss: 116892.59642791748 , Train RMSE: 1907.3960500182848
Epoch 46 , Loss: 114149.64581489563 , Train RMSE: 1909.7084558917122
Epoch 47 , Loss: 112856.5626449585 , Train RMSE: 1910.8656427042397
Epoch 48 , Loss: 112716.48776435852 , Train RMSE: 1910.1330192506302
Epoch 49 , Loss: 112562.49839687347 , Train RMSE: 1909.5156273223251
Epoch 50 , Loss: 111643.1778717041 , Train RMSE: 1909.028504591843
Epoch 51 , Loss: 111846.32903146744 , Train RMSE: 1909.2394613612853
Epoch 52 , Loss: 111035.83574390411 , Train RMSE: 1905.6845030355314
Epoch 53 , Loss: 110502.75431442261 , Train RMSE: 1903.2353931177406
Epoch 54 , Loss: 110249.67029190063 , Train RMSE: 1901.8791242090083
Epoch 55 , Loss: 110209.69612121582 , Train RMSE: 1899.8871328072366
Epoch 56 , Loss: 111010.67318725586 , Train RMSE: 1901.0812178433807
Epoch 57 , Loss: 110452.19287824631 , Train RMSE: 1900.3612314072257
Epoch 58 , Loss: 109986.15550804138 , Train RMSE: 1901.312765787445
Epoch 59 , Loss: 109415.24810314178 , Train RMSE: 1899.6635423486962
Epoch 60 , Loss: 109002.70430850983 , Train RMSE: 1896.8009607900722
Epoch 61 , Loss: 109248.13582992554 , Train RMSE: 1894.2856689755142
Epoch 62 , Loss: 109033.25793790817 , Train RMSE: 1892.5397202031995
Epoch 63 , Loss: 108756.08491134644 , Train RMSE: 1892.3579905441609
Epoch 64 , Loss: 108851.46716403961 , Train RMSE: 1892.5439847924436
Epoch 65 , Loss: 108568.6691942215 , Train RMSE: 1891.7000468938975
Epoch 66 , Loss: 108445.26519012451 , Train RMSE: 1891.9490002179343
Epoch 67 , Loss: 108337.1490240097 , Train RMSE: 1891.2860758981262
Epoch 68 , Loss: 108287.23099803925 , Train RMSE: 1891.1829254621623
Epoch 69 , Loss: 108181.49100017548 , Train RMSE: 1890.9901778672686
Epoch 70 , Loss: 108248.66276741028 , Train RMSE: 1891.0241440414486
Epoch 71 , Loss: 108905.18262767792 , Train RMSE: 1891.1506503173753
Epoch 72 , Loss: 109147.51825714111 , Train RMSE: 1893.6082285026334
Epoch 73 , Loss: 108406.38805961609 , Train RMSE: 1891.3668935650792
Epoch 74 , Loss: 108225.15711021423 , Train RMSE: 1890.9381503163013
Epoch 75 , Loss: 108148.3670964241 , Train RMSE: 1890.5912652480079
Epoch 76 , Loss: 108020.68972921371 , Train RMSE: 1890.3229775766044
Epoch 77 , Loss: 107961.41882419586 , Train RMSE: 1890.5490464404256
Epoch 78 , Loss: 107915.5451130867 , Train RMSE: 1889.933155396529
Epoch 79 , Loss: 107936.17499256134 , Train RMSE: 1890.1615097877507
Epoch 80 , Loss: 107903.18646287918 , Train RMSE: 1890.0365097787776
Epoch 81 , Loss: 107924.87171173096 , Train RMSE: 1889.9420877136497
Epoch 82 , Loss: 107823.73799276352 , Train RMSE: 1889.658052497388
Epoch 83 , Loss: 107843.92726755142 , Train RMSE: 1889.7711245968194
Epoch 84 , Loss: 107845.55022478104 , Train RMSE: 1889.743113565029
Epoch 85 , Loss: 107799.0309369564 , Train RMSE: 1889.672072810371
Epoch 86 , Loss: 107857.31848311424 , Train RMSE: 1889.5823626368547
Epoch 87 , Loss: 107786.25341272354 , Train RMSE: 1889.696659011734
Epoch 88 , Loss: 107729.65000104904 , Train RMSE: 1889.5133469172126
Epoch 89 , Loss: 107751.2125582695 , Train RMSE: 1889.3930826243623
Epoch 90 , Loss: 107783.20434141159 , Train RMSE: 1889.547192676103
Epoch 91 , Loss: 107826.15364611149 , Train RMSE: 1889.2148929367886
Epoch 92 , Loss: 107697.27901434898 , Train RMSE: 1889.3392041942707
Epoch 93 , Loss: 107675.05185943842 , Train RMSE: 1889.3338244606857
Epoch 94 , Loss: 107717.09985041618 , Train RMSE: 1889.291714789716
Epoch 95 , Loss: 107723.7155098319 , Train RMSE: 1889.0690797030752
Epoch 96 , Loss: 107636.46718025208 , Train RMSE: 1889.1590985864123
Epoch 97 , Loss: 107716.49869680405 , Train RMSE: 1889.0843389979154
Epoch 98 , Loss: 107684.37836098671 , Train RMSE: 1889.0098014769706
Epoch 99 , Loss: 107684.29921448231 , Train RMSE: 1889.0182663491087
Validation MSE: 990.1610693795592
SPLIT 4 :
Epoch 0 , Loss: 177703.64317321777 , Train RMSE: 1727.8661673204454
Epoch 1 , Loss: 177682.6796951294 , Train RMSE: 1727.8749588355256
Epoch 2 , Loss: 177662.68496704102 , Train RMSE: 1727.8838252717114
Epoch 3 , Loss: 177642.47187042236 , Train RMSE: 1727.8923135045172
Epoch 4 , Loss: 177621.11289215088 , Train RMSE: 1727.900697500432
Epoch 5 , Loss: 177597.75469207764 , Train RMSE: 1727.9097017790389
Epoch 6 , Loss: 177571.1919631958 , Train RMSE: 1727.9195458127267
Epoch 7 , Loss: 177539.29663848877 , Train RMSE: 1727.9291221846756
Epoch 8 , Loss: 177498.60857391357 , Train RMSE: 1727.9371121951292
Epoch 9 , Loss: 177443.72602844238 , Train RMSE: 1727.9413225767303
Epoch 10 , Loss: 177366.47006225586 , Train RMSE: 1727.9380894485732
Epoch 11 , Loss: 177253.11638641357 , Train RMSE: 1727.9211336294693
Epoch 12 , Loss: 177079.3825531006 , Train RMSE: 1727.8781402095362
Epoch 13 , Loss: 176803.73922729492 , Train RMSE: 1727.7874154041308
Epoch 14 , Loss: 176358.6184539795 , Train RMSE: 1727.615267550685
Epoch 15 , Loss: 175632.55196380615 , Train RMSE: 1727.3091891480815
Epoch 16 , Loss: 174433.8791885376 , Train RMSE: 1726.7839167859872
Epoch 17 , Loss: 172441.01399230957 , Train RMSE: 1725.9210658814097
Epoch 18 , Loss: 169204.15478515625 , Train RMSE: 1724.6606373301292
Epoch 19 , Loss: 164260.77169036865 , Train RMSE: 1723.2346863892442
Epoch 20 , Loss: 157655.41847610474 , Train RMSE: 1722.5477017041273
Epoch 21 , Loss: 151168.2907485962 , Train RMSE: 1723.8689673827344
Epoch 22 , Loss: 147233.23332595825 , Train RMSE: 1726.192291704878
Epoch 23 , Loss: 145851.80979919434 , Train RMSE: 1726.8733341400184
Epoch 24 , Loss: 145055.49792671204 , Train RMSE: 1726.0201418932013
Epoch 25 , Loss: 144458.6329984665 , Train RMSE: 1726.2654464379025
Epoch 26 , Loss: 144500.5804977417 , Train RMSE: 1726.2478371110842
Epoch 27 , Loss: 143929.61714363098 , Train RMSE: 1726.3322514422766
Epoch 28 , Loss: 143435.4101486206 , Train RMSE: 1725.6906672220753
Epoch 29 , Loss: 142951.89756774902 , Train RMSE: 1724.2688360864136
Epoch 30 , Loss: 142578.53325033188 , Train RMSE: 1719.5496897313149
Epoch 31 , Loss: 142058.17116355896 , Train RMSE: 1713.5228668480406
Epoch 32 , Loss: 141224.68942070007 , Train RMSE: 1711.5772208431406
Epoch 33 , Loss: 141090.91549873352 , Train RMSE: 1710.4996278768865
Epoch 34 , Loss: 140746.02486419678 , Train RMSE: 1710.5162433448065
Epoch 35 , Loss: 140434.18155002594 , Train RMSE: 1710.160406702865
Epoch 36 , Loss: 139915.9701013565 , Train RMSE: 1709.5382788250029
Epoch 37 , Loss: 140120.062582016 , Train RMSE: 1709.7188219640968
Epoch 38 , Loss: 139818.29942703247 , Train RMSE: 1709.4371085887353
Epoch 39 , Loss: 140085.99992465973 , Train RMSE: 1709.7468266696558
Epoch 40 , Loss: 139999.54315567017 , Train RMSE: 1709.59576954595
Epoch 41 , Loss: 139929.37838363647 , Train RMSE: 1709.530625195793
Epoch 42 , Loss: 139950.9524755478 , Train RMSE: 1709.5701090720656
Epoch 43 , Loss: 139890.813331604 , Train RMSE: 1709.5360103614769
Epoch 44 , Loss: 139943.5057516098 , Train RMSE: 1709.9453875991549
Epoch 45 , Loss: 139848.0223646164 , Train RMSE: 1709.522701849949
Epoch 46 , Loss: 139861.9233341217 , Train RMSE: 1709.587407061076
Epoch 47 , Loss: 139874.56549263 , Train RMSE: 1709.5534062078495
Epoch 48 , Loss: 139741.5787343979 , Train RMSE: 1709.3285452514954
Epoch 49 , Loss: 139655.3586359024 , Train RMSE: 1709.2506146140904
Epoch 50 , Loss: 140191.1314687729 , Train RMSE: 1709.6278878193602
Epoch 51 , Loss: 139959.02155590057 , Train RMSE: 1709.5585012426857
Epoch 52 , Loss: 139775.44981575012 , Train RMSE: 1709.5306817927374
Epoch 53 , Loss: 139592.6457901001 , Train RMSE: 1709.3856515499244
Epoch 54 , Loss: 139781.21109485626 , Train RMSE: 1709.4808752059844
Epoch 55 , Loss: 139699.88434123993 , Train RMSE: 1709.433685436376
Epoch 56 , Loss: 139638.45738601685 , Train RMSE: 1709.4408590191622
Epoch 57 , Loss: 139539.0966615677 , Train RMSE: 1709.3937342431977
Epoch 58 , Loss: 139632.44041919708 , Train RMSE: 1710.0521562024587
Epoch 59 , Loss: 139516.5595960617 , Train RMSE: 1709.7372896794848
Epoch 60 , Loss: 139565.054104805 , Train RMSE: 1709.8537493571682
Epoch 61 , Loss: 139402.35672855377 , Train RMSE: 1709.3051046000921
Epoch 62 , Loss: 139414.37590789795 , Train RMSE: 1709.4178132773084
Epoch 63 , Loss: 139338.1035194397 , Train RMSE: 1709.1903473826735
Epoch 64 , Loss: 139665.65264511108 , Train RMSE: 1709.2652869138597
Epoch 65 , Loss: 139464.48265266418 , Train RMSE: 1709.0766743093693
Epoch 66 , Loss: 139352.48969459534 , Train RMSE: 1709.0053505257952
Epoch 67 , Loss: 139408.12833213806 , Train RMSE: 1709.006673424666
Epoch 68 , Loss: 139264.91625404358 , Train RMSE: 1709.0454907742476
Epoch 69 , Loss: 139544.60017681122 , Train RMSE: 1709.1474414826855
Epoch 70 , Loss: 139364.67805671692 , Train RMSE: 1708.8176355627768
Epoch 71 , Loss: 139203.81050014496 , Train RMSE: 1708.7119679947202
Epoch 72 , Loss: 139419.07761859894 , Train RMSE: 1708.7728146351394
Epoch 73 , Loss: 139285.80828762054 , Train RMSE: 1708.6869467609263
Epoch 74 , Loss: 139324.61524009705 , Train RMSE: 1709.1146200568617
Epoch 75 , Loss: 139168.51697540283 , Train RMSE: 1708.7577909224299
Epoch 76 , Loss: 139144.77280521393 , Train RMSE: 1708.6537820077936
Epoch 77 , Loss: 139108.9020638466 , Train RMSE: 1708.5363189413863
Epoch 78 , Loss: 139343.71337747574 , Train RMSE: 1708.7519591777966
Epoch 79 , Loss: 139242.655936718 , Train RMSE: 1708.6761188497242
Epoch 80 , Loss: 139092.51531362534 , Train RMSE: 1708.5768212786018
Epoch 81 , Loss: 139056.21225690842 , Train RMSE: 1708.4326524459718
Epoch 82 , Loss: 139078.21816587448 , Train RMSE: 1708.577677686539
Epoch 83 , Loss: 139018.56160783768 , Train RMSE: 1708.3990517632017
Epoch 84 , Loss: 139022.66366386414 , Train RMSE: 1708.4912018793898
Epoch 85 , Loss: 139028.9262084961 , Train RMSE: 1708.618944688025
Epoch 86 , Loss: 138923.55860853195 , Train RMSE: 1708.1968021121834
Epoch 87 , Loss: 139068.65333652496 , Train RMSE: 1708.7354469368693
Epoch 88 , Loss: 138909.20517158508 , Train RMSE: 1708.2214010943178
Epoch 89 , Loss: 138903.3711886406 , Train RMSE: 1708.2024009904217
Epoch 90 , Loss: 138981.66336393356 , Train RMSE: 1708.336918797544
Epoch 91 , Loss: 138953.917116642 , Train RMSE: 1708.5994257524935
Epoch 92 , Loss: 138887.7227549553 , Train RMSE: 1708.3550941944038
Epoch 93 , Loss: 138848.49419879913 , Train RMSE: 1708.2047347930195
Epoch 94 , Loss: 138856.7601184845 , Train RMSE: 1708.221532691102
Epoch 95 , Loss: 138804.28978347778 , Train RMSE: 1708.0689185861797
Epoch 96 , Loss: 138799.73120832443 , Train RMSE: 1708.0460998878132
Epoch 97 , Loss: 138774.6876115799 , Train RMSE: 1707.970855397156
Epoch 98 , Loss: 138774.04697084427 , Train RMSE: 1707.9535305508593
Epoch 99 , Loss: 138721.80940151215 , Train RMSE: 1707.7064501455798
Validation MSE: 244.69447021971024
SPLIT 5 :
Epoch 0 , Loss: 200841.46083831787 , Train RMSE: 1556.8185461700446
Epoch 1 , Loss: 200815.1174621582 , Train RMSE: 1556.8228698574342
Epoch 2 , Loss: 200788.55078125 , Train RMSE: 1556.8273392875506
Epoch 3 , Loss: 200761.315864563 , Train RMSE: 1556.83157144876
Epoch 4 , Loss: 200732.8596343994 , Train RMSE: 1556.835059412846
Epoch 5 , Loss: 200702.4384613037 , Train RMSE: 1556.8372124351145
Epoch 6 , Loss: 200669.04151153564 , Train RMSE: 1556.837731234016
Epoch 7 , Loss: 200631.22382354736 , Train RMSE: 1556.8363965877732
Epoch 8 , Loss: 200586.77493286133 , Train RMSE: 1556.8320661758203
Epoch 9 , Loss: 200532.1484222412 , Train RMSE: 1556.8227272489476
Epoch 10 , Loss: 200461.481300354 , Train RMSE: 1556.804955130148
Epoch 11 , Loss: 200363.42778778076 , Train RMSE: 1556.77208577455
Epoch 12 , Loss: 200217.91019439697 , Train RMSE: 1556.7143637646886
Epoch 13 , Loss: 199992.05429840088 , Train RMSE: 1556.619281332345
Epoch 14 , Loss: 199623.40802001953 , Train RMSE: 1556.4567459390662
Epoch 15 , Loss: 198985.6636581421 , Train RMSE: 1556.1651725815802
Epoch 16 , Loss: 197805.73209381104 , Train RMSE: 1555.6384313477756
Epoch 17 , Loss: 195597.2057647705 , Train RMSE: 1554.6255407093768
Epoch 18 , Loss: 191594.02893829346 , Train RMSE: 1552.839962285489
Epoch 19 , Loss: 184279.45170593262 , Train RMSE: 1550.51279546418
Epoch 20 , Loss: 172739.39219665527 , Train RMSE: 1548.3741738921997
Epoch 21 , Loss: 161000.7494239807 , Train RMSE: 1550.3465430618064
Epoch 22 , Loss: 155305.3298110962 , Train RMSE: 1554.101665228625
Epoch 23 , Loss: 153865.90326976776 , Train RMSE: 1553.715955783301
Epoch 24 , Loss: 154098.41479873657 , Train RMSE: 1556.3737344619256
Epoch 25 , Loss: 153303.6851978302 , Train RMSE: 1556.5488477307888
Epoch 26 , Loss: 153350.69384527206 , Train RMSE: 1555.4345211539699
Epoch 27 , Loss: 153050.08416080475 , Train RMSE: 1555.599974646605
Epoch 28 , Loss: 154405.1197528839 , Train RMSE: 1555.393903444832
Epoch 29 , Loss: 154168.18363189697 , Train RMSE: 1557.5298420754343
Epoch 30 , Loss: 153144.3367767334 , Train RMSE: 1557.51520885263
Epoch 31 , Loss: 152806.6045742035 , Train RMSE: 1555.9184123840955
Epoch 32 , Loss: 152159.9497566223 , Train RMSE: 1555.4088290597022
Epoch 33 , Loss: 151892.75086021423 , Train RMSE: 1554.4251080373965
Epoch 34 , Loss: 151616.6933517456 , Train RMSE: 1553.1705848956806
Epoch 35 , Loss: 150125.9283103943 , Train RMSE: 1543.831044774014
Epoch 36 , Loss: 150039.6467666626 , Train RMSE: 1545.9005269000852
Epoch 37 , Loss: 149894.2292251587 , Train RMSE: 1539.6154206815736
Epoch 38 , Loss: 149300.57194519043 , Train RMSE: 1537.883255751917
Epoch 39 , Loss: 149657.52759361267 , Train RMSE: 1536.7282827276542
Epoch 40 , Loss: 148706.51040172577 , Train RMSE: 1535.098137082546
Epoch 41 , Loss: 148886.28439712524 , Train RMSE: 1534.5279182755603
Epoch 42 , Loss: 148524.81416225433 , Train RMSE: 1534.3573721203784
Epoch 43 , Loss: 148456.04913330078 , Train RMSE: 1534.556778786095
Epoch 44 , Loss: 148086.30085611343 , Train RMSE: 1534.6987848822469
Epoch 45 , Loss: 148292.96767807007 , Train RMSE: 1536.8658998115682
Epoch 46 , Loss: 148072.13656902313 , Train RMSE: 1537.3384466032
Epoch 47 , Loss: 148288.10250091553 , Train RMSE: 1539.9072166071041
Epoch 48 , Loss: 147955.2229013443 , Train RMSE: 1536.1767667465083
Epoch 49 , Loss: 147604.46684885025 , Train RMSE: 1533.619455527678
Epoch 50 , Loss: 147735.35841560364 , Train RMSE: 1533.2905237761104
Epoch 51 , Loss: 147558.35104608536 , Train RMSE: 1532.9442181039897
Epoch 52 , Loss: 147687.26228618622 , Train RMSE: 1533.9293433036073
Epoch 53 , Loss: 147342.3912987709 , Train RMSE: 1533.312690044546
Epoch 54 , Loss: 147507.23407268524 , Train RMSE: 1532.8147987802736
Epoch 55 , Loss: 147359.74680662155 , Train RMSE: 1533.0669802850196
Epoch 56 , Loss: 147308.78686380386 , Train RMSE: 1533.4990926472747
Epoch 57 , Loss: 147451.04820919037 , Train RMSE: 1535.6924438096758
Epoch 58 , Loss: 147441.98946762085 , Train RMSE: 1532.90606810298
Epoch 59 , Loss: 147320.80126190186 , Train RMSE: 1533.7352876927641
Epoch 60 , Loss: 147134.72250509262 , Train RMSE: 1532.7031417158294
Epoch 61 , Loss: 147411.64437294006 , Train RMSE: 1536.9515558679902
Epoch 62 , Loss: 147059.36505031586 , Train RMSE: 1532.7608129601163
Epoch 63 , Loss: 146937.4921336174 , Train RMSE: 1532.450272940337
Epoch 64 , Loss: 146942.05032634735 , Train RMSE: 1533.0049899431958
Epoch 65 , Loss: 147065.258207798 , Train RMSE: 1532.541867624397
Epoch 66 , Loss: 146698.59706020355 , Train RMSE: 1531.9759390429338
Epoch 67 , Loss: 146878.4085536003 , Train RMSE: 1532.495905979447
Epoch 68 , Loss: 146973.59911108017 , Train RMSE: 1532.7837813254048
Epoch 69 , Loss: 146915.73461580276 , Train RMSE: 1532.6388464580114
Epoch 70 , Loss: 146799.80571174622 , Train RMSE: 1532.6555404026315
Epoch 71 , Loss: 146549.37248945236 , Train RMSE: 1531.7212358819163
Epoch 72 , Loss: 147041.01999473572 , Train RMSE: 1532.7072583604997
Epoch 73 , Loss: 146849.61901140213 , Train RMSE: 1532.0127184020057
Epoch 74 , Loss: 146821.90120315552 , Train RMSE: 1531.8034001356252
Epoch 75 , Loss: 146625.72479224205 , Train RMSE: 1531.8282054917295
Epoch 76 , Loss: 146825.3487212658 , Train RMSE: 1532.1102772906606
Epoch 77 , Loss: 146658.9848368168 , Train RMSE: 1531.753729771631
Epoch 78 , Loss: 146700.1023800373 , Train RMSE: 1531.7861882966426
Epoch 79 , Loss: 146571.07693314552 , Train RMSE: 1531.7715504204452
Epoch 80 , Loss: 146413.94442367554 , Train RMSE: 1531.8559086872572
Epoch 81 , Loss: 146525.87835144997 , Train RMSE: 1531.8134288373894
Epoch 82 , Loss: 146558.06453728676 , Train RMSE: 1531.5558131781952
Epoch 83 , Loss: 146596.03841924667 , Train RMSE: 1532.4137231215273
Epoch 84 , Loss: 146581.98211812973 , Train RMSE: 1532.6404294296924
Epoch 85 , Loss: 146515.56551265717 , Train RMSE: 1531.8510814402055
Epoch 86 , Loss: 146593.21073293686 , Train RMSE: 1532.42413846465
Epoch 87 , Loss: 146545.2699713707 , Train RMSE: 1533.182312235497
Epoch 88 , Loss: 146886.02795124054 , Train RMSE: 1533.2875008691196
Epoch 89 , Loss: 146569.42057037354 , Train RMSE: 1531.3803085939214
Epoch 90 , Loss: 146274.82229423523 , Train RMSE: 1531.1741442328569
Epoch 91 , Loss: 146411.24681425095 , Train RMSE: 1531.666805624516
Epoch 92 , Loss: 146536.75448274612 , Train RMSE: 1531.6566002148984
Epoch 93 , Loss: 146403.5322947502 , Train RMSE: 1532.3353882724925
Epoch 94 , Loss: 146340.17367458344 , Train RMSE: 1532.043754150265
Epoch 95 , Loss: 146338.82418251038 , Train RMSE: 1531.369285149959
Epoch 96 , Loss: 146198.04714250565 , Train RMSE: 1531.1420953386696
Epoch 97 , Loss: 146361.4757757187 , Train RMSE: 1530.985644996684
Epoch 98 , Loss: 146221.394261837 , Train RMSE: 1530.9355150295416
Epoch 99 , Loss: 146071.71461963654 , Train RMSE: 1530.7212304068576
Validation MSE: 253.05453822168565
lr: 0.001
Average validation MSE: 1135.699408991196
*********** CURRENT LEARNING RATE : 0.01 ***********
SPLIT 1 :
Epoch 0 , Loss: 22799.906631469727 , Train RMSE: 436.4433900700297
Epoch 1 , Loss: 22779.257781982422 , Train RMSE: 436.14391066992613
Epoch 2 , Loss: 22759.62094116211 , Train RMSE: 435.86144354798796
Epoch 3 , Loss: 22736.52001953125 , Train RMSE: 435.52833500191707
Epoch 4 , Loss: 22706.16050720215 , Train RMSE: 435.09063816845395
Epoch 5 , Loss: 22656.295288085938 , Train RMSE: 434.3721229982564
Epoch 6 , Loss: 22549.643493652344 , Train RMSE: 432.83317685030033
Epoch 7 , Loss: 22244.715286254883 , Train RMSE: 428.4179161238381
Epoch 8 , Loss: 20975.504684448242 , Train RMSE: 410.2484865718282
Epoch 9 , Loss: 15718.738578796387 , Train RMSE: 345.5513150258832
Epoch 10 , Loss: 9884.262706756592 , Train RMSE: 299.89179187765313
Epoch 11 , Loss: 9828.223876953125 , Train RMSE: 302.2739313104668
Epoch 12 , Loss: 9763.105424880981 , Train RMSE: 303.6638304959867
Epoch 13 , Loss: 9632.232348442078 , Train RMSE: 302.691384317404
Epoch 14 , Loss: 9554.735370635986 , Train RMSE: 301.54458587995947
Epoch 15 , Loss: 9479.005550384521 , Train RMSE: 300.10530595186003
Epoch 16 , Loss: 9525.960063934326 , Train RMSE: 301.4459241416045
Epoch 17 , Loss: 9606.645166397095 , Train RMSE: 299.50637096525725
Epoch 18 , Loss: 9517.554017066956 , Train RMSE: 299.57473362675097
Epoch 19 , Loss: 9497.737384796143 , Train RMSE: 299.4252592660078
Epoch 20 , Loss: 9459.896014213562 , Train RMSE: 297.95682665402455
Epoch 21 , Loss: 9431.948023796082 , Train RMSE: 297.19716039165183
Epoch 22 , Loss: 9432.656785964966 , Train RMSE: 297.173153093431
Epoch 23 , Loss: 9320.245825767517 , Train RMSE: 295.37768161376675
Epoch 24 , Loss: 9319.715731620789 , Train RMSE: 295.2696161101436
Epoch 25 , Loss: 9578.34008693695 , Train RMSE: 293.0801963011151
Epoch 26 , Loss: 9288.657178878784 , Train RMSE: 291.61979414916925
Epoch 27 , Loss: 9240.113403320312 , Train RMSE: 290.87394275463737
Epoch 28 , Loss: 9402.796920776367 , Train RMSE: 292.58900516171184
Epoch 29 , Loss: 9500.32055091858 , Train RMSE: 291.7970250871926
Epoch 30 , Loss: 9277.778765678406 , Train RMSE: 289.45112658667756
Epoch 31 , Loss: 9214.930975914001 , Train RMSE: 289.87722927751327
Epoch 32 , Loss: 9255.305139541626 , Train RMSE: 289.4937396775851
Epoch 33 , Loss: 9143.291792869568 , Train RMSE: 289.64351075781894
Epoch 34 , Loss: 9050.32285785675 , Train RMSE: 288.1549701276506
Epoch 35 , Loss: 9064.28210735321 , Train RMSE: 288.14345426174515
Epoch 36 , Loss: 9009.242853164673 , Train RMSE: 287.3635787679352
Epoch 37 , Loss: 8994.549129486084 , Train RMSE: 287.293670430646
Epoch 38 , Loss: 9076.336929321289 , Train RMSE: 289.47878089430844
Epoch 39 , Loss: 8961.157891273499 , Train RMSE: 287.4528434131084
Epoch 40 , Loss: 9031.879699707031 , Train RMSE: 288.55794173604426
Epoch 41 , Loss: 8996.547567367554 , Train RMSE: 287.6932385221772
Epoch 42 , Loss: 8994.081324577332 , Train RMSE: 287.6035929957724
Epoch 43 , Loss: 8900.30848121643 , Train RMSE: 284.90912565494466
Epoch 44 , Loss: 8846.698167800903 , Train RMSE: 283.90014372817495
Epoch 45 , Loss: 8984.175178527832 , Train RMSE: 285.1597211534103
Epoch 46 , Loss: 9011.879692077637 , Train RMSE: 285.3949596465392
Epoch 47 , Loss: 8931.021109580994 , Train RMSE: 284.51090972639344
Epoch 48 , Loss: 8971.21388053894 , Train RMSE: 286.51727546135334
Epoch 49 , Loss: 8901.62110042572 , Train RMSE: 285.78584720481973
Epoch 50 , Loss: 8927.150569915771 , Train RMSE: 285.2016435461033
Epoch 51 , Loss: 8815.5842294693 , Train RMSE: 283.22991249453236
Epoch 52 , Loss: 8840.85022354126 , Train RMSE: 283.2579238577679
Epoch 53 , Loss: 8742.730890274048 , Train RMSE: 282.5113262368031
Epoch 54 , Loss: 8917.650019645691 , Train RMSE: 284.60043887065
Epoch 55 , Loss: 8785.959261894226 , Train RMSE: 282.60914382559065
Epoch 56 , Loss: 8806.033007621765 , Train RMSE: 283.16837933524073
Epoch 57 , Loss: 8766.065865516663 , Train RMSE: 282.23875542209703
Epoch 58 , Loss: 8833.845128059387 , Train RMSE: 282.888627388233
Epoch 59 , Loss: 8757.468074798584 , Train RMSE: 282.308280069253
Epoch 60 , Loss: 8724.902222633362 , Train RMSE: 281.9540176893193
Epoch 61 , Loss: 8748.278244018555 , Train RMSE: 282.262784122209
Epoch 62 , Loss: 8751.817562103271 , Train RMSE: 282.1412580629348
Epoch 63 , Loss: 8714.539870262146 , Train RMSE: 282.1737682909228
Epoch 64 , Loss: 8675.794695854187 , Train RMSE: 281.5045637588395
Epoch 65 , Loss: 8772.199058532715 , Train RMSE: 283.6006688065911
Epoch 66 , Loss: 8702.263939857483 , Train RMSE: 282.0389190845225
Epoch 67 , Loss: 8702.389868736267 , Train RMSE: 282.3291828588954
Epoch 68 , Loss: 8746.19427204132 , Train RMSE: 283.016255806427
Epoch 69 , Loss: 8683.681595802307 , Train RMSE: 282.0865941995817
Epoch 70 , Loss: 8734.043154716492 , Train RMSE: 284.22437102811756
Epoch 71 , Loss: 8652.815969467163 , Train RMSE: 283.17235794173143
Epoch 72 , Loss: 8744.186141967773 , Train RMSE: 284.64701037172784
Epoch 73 , Loss: 8713.699577331543 , Train RMSE: 283.6900618749892
Epoch 74 , Loss: 8719.469307899475 , Train RMSE: 283.6022083196397
Epoch 75 , Loss: 8687.953272819519 , Train RMSE: 281.92195851685403
Epoch 76 , Loss: 8580.022290229797 , Train RMSE: 280.4514578818028
Epoch 77 , Loss: 8636.025116920471 , Train RMSE: 281.61209650222594
Epoch 78 , Loss: 8609.435914039612 , Train RMSE: 280.80457586982294
Epoch 79 , Loss: 8616.094277381897 , Train RMSE: 281.1131479637908
Epoch 80 , Loss: 8707.247985839844 , Train RMSE: 281.70296058205247
Epoch 81 , Loss: 8621.024751663208 , Train RMSE: 280.9199332564378
Epoch 82 , Loss: 8602.571161270142 , Train RMSE: 280.61465188405964
Epoch 83 , Loss: 8567.082639694214 , Train RMSE: 280.54413631856653
Epoch 84 , Loss: 8588.886638641357 , Train RMSE: 280.3878925533334
Epoch 85 , Loss: 8573.623141288757 , Train RMSE: 280.5603161632953
Epoch 86 , Loss: 8535.581883430481 , Train RMSE: 280.32077214358674
Epoch 87 , Loss: 8560.887813568115 , Train RMSE: 280.1021548809975
Epoch 88 , Loss: 8542.37801361084 , Train RMSE: 280.6353014467104
Epoch 89 , Loss: 8508.749969482422 , Train RMSE: 279.5294548181201
Epoch 90 , Loss: 8549.123002052307 , Train RMSE: 281.81640606472104
Epoch 91 , Loss: 8556.77750492096 , Train RMSE: 281.7740782968419
Epoch 92 , Loss: 8524.19342327118 , Train RMSE: 281.85972963718024
Epoch 93 , Loss: 8519.28735256195 , Train RMSE: 281.61143674081217
Epoch 94 , Loss: 8463.246740341187 , Train RMSE: 280.2269889382205
Epoch 95 , Loss: 8433.968922615051 , Train RMSE: 279.31473428231925
Epoch 96 , Loss: 8447.189128875732 , Train RMSE: 279.1716510109192
Epoch 97 , Loss: 8399.657117843628 , Train RMSE: 279.03618237058106
Epoch 98 , Loss: 8393.106966972351 , Train RMSE: 278.89870510696295
Epoch 99 , Loss: 8383.163986206055 , Train RMSE: 278.6940789692822
Validation MSE: 3121.411072576693
SPLIT 2 :
Epoch 0 , Loss: 100977.5295791626 , Train RMSE: 2217.803889510142
Epoch 1 , Loss: 100932.33023071289 , Train RMSE: 2217.8746442836446
Epoch 2 , Loss: 100881.29886627197 , Train RMSE: 2217.977904116139
Epoch 3 , Loss: 100771.5072479248 , Train RMSE: 2218.1530929396467
Epoch 4 , Loss: 100168.65486907959 , Train RMSE: 2218.814341231997
Epoch 5 , Loss: 94221.3624458313 , Train RMSE: 2223.6915235471265
Epoch 6 , Loss: 80650.53218269348 , Train RMSE: 2209.050634523864
Epoch 7 , Loss: 79624.62441682816 , Train RMSE: 2204.6141979960557
Epoch 8 , Loss: 79653.32564306259 , Train RMSE: 2202.4655866165053
Epoch 9 , Loss: 79235.52448225021 , Train RMSE: 2202.772269859246
Epoch 10 , Loss: 79173.06826925278 , Train RMSE: 2201.584108387838
Epoch 11 , Loss: 78937.3399553299 , Train RMSE: 2201.4291999095476
Epoch 12 , Loss: 78718.48985767365 , Train RMSE: 2200.983263215771
Epoch 13 , Loss: 78667.87189817429 , Train RMSE: 2200.7699589845897
Epoch 14 , Loss: 78920.35786151886 , Train RMSE: 2201.1637295076566
Epoch 15 , Loss: 79096.06112146378 , Train RMSE: 2201.454713474515
Epoch 16 , Loss: 79186.03863620758 , Train RMSE: 2206.698072827221
Epoch 17 , Loss: 78885.38524723053 , Train RMSE: 2202.360102488418
Epoch 18 , Loss: 78989.97595691681 , Train RMSE: 2201.264228290994
Epoch 19 , Loss: 78867.67780256271 , Train RMSE: 2200.2632396793747
Epoch 20 , Loss: 78670.08255290985 , Train RMSE: 2199.645916019042
Epoch 21 , Loss: 78991.55416488647 , Train RMSE: 2199.803630656815
Epoch 22 , Loss: 78776.66606616974 , Train RMSE: 2199.261016535642
Epoch 23 , Loss: 78668.82297372818 , Train RMSE: 2198.600084770937
Epoch 24 , Loss: 78673.28491973877 , Train RMSE: 2198.6080671589434
Epoch 25 , Loss: 78370.59263563156 , Train RMSE: 2197.6400574873405
Epoch 26 , Loss: 78555.19654607773 , Train RMSE: 2197.186319495991
Epoch 27 , Loss: 78459.22798681259 , Train RMSE: 2195.9185361813516
Epoch 28 , Loss: 78338.84225416183 , Train RMSE: 2194.3655571360673
Epoch 29 , Loss: 78319.32540273666 , Train RMSE: 2193.4580924830548
Epoch 30 , Loss: 78013.50615692139 , Train RMSE: 2191.423501373361
Epoch 31 , Loss: 78102.97440338135 , Train RMSE: 2190.0809825107353
Epoch 32 , Loss: 78016.55406475067 , Train RMSE: 2187.5921181086997
Epoch 33 , Loss: 77827.58124399185 , Train RMSE: 2184.7039829645387
Epoch 34 , Loss: 77696.14175891876 , Train RMSE: 2182.0639838110756
Epoch 35 , Loss: 77464.42664003372 , Train RMSE: 2179.1843061021214
Epoch 36 , Loss: 77432.94205856323 , Train RMSE: 2176.1987490839274
Epoch 37 , Loss: 77273.53170108795 , Train RMSE: 2173.1767365973747
Epoch 38 , Loss: 77453.66186618805 , Train RMSE: 2169.4755723580947
Epoch 39 , Loss: 77007.15256404877 , Train RMSE: 2166.5598979938973
Epoch 40 , Loss: 77022.56964588165 , Train RMSE: 2161.901170985448
Epoch 41 , Loss: 77311.15465021133 , Train RMSE: 2181.5589754368048
Epoch 42 , Loss: 77233.21022176743 , Train RMSE: 2165.573036438626
Epoch 43 , Loss: 77023.5628156662 , Train RMSE: 2155.958392055669
Epoch 44 , Loss: 76770.95002126694 , Train RMSE: 2153.6829084021097
Epoch 45 , Loss: 76343.77674293518 , Train RMSE: 2149.250662279223
Epoch 46 , Loss: 76802.92531013489 , Train RMSE: 2157.7392331747287
Epoch 47 , Loss: 76953.3056883812 , Train RMSE: 2159.1586510716784
Epoch 48 , Loss: 76501.56322622299 , Train RMSE: 2143.54355861904
Epoch 49 , Loss: 76385.17024421692 , Train RMSE: 2139.7357881678126
Epoch 50 , Loss: 76191.913128376 , Train RMSE: 2137.1701280964385
Epoch 51 , Loss: 76281.57748889923 , Train RMSE: 2137.993129307081
Epoch 52 , Loss: 76311.16516637802 , Train RMSE: 2136.0477575963932
Epoch 53 , Loss: 76380.03922462463 , Train RMSE: 2134.898738519003
Epoch 54 , Loss: 76364.88437604904 , Train RMSE: 2137.2310339842506
Epoch 55 , Loss: 76304.89320468903 , Train RMSE: 2136.235808262961
Epoch 56 , Loss: 76725.47729110718 , Train RMSE: 2138.0815813977188
Epoch 57 , Loss: 76252.28131818771 , Train RMSE: 2136.0594932482686
Epoch 58 , Loss: 76123.38378810883 , Train RMSE: 2134.0803331777533
Epoch 59 , Loss: 76176.36302757263 , Train RMSE: 2139.9486713224032
Epoch 60 , Loss: 76432.57605218887 , Train RMSE: 2144.055184270298
Epoch 61 , Loss: 75921.51760673523 , Train RMSE: 2135.5967881528313
Epoch 62 , Loss: 76334.36074256897 , Train RMSE: 2142.2675444922684
Epoch 63 , Loss: 76438.30641460419 , Train RMSE: 2128.182457141037
Epoch 64 , Loss: 76490.97525072098 , Train RMSE: 2124.952497458811
Epoch 65 , Loss: 76611.64214134216 , Train RMSE: 2127.9971085008065
Epoch 66 , Loss: 76292.0030207634 , Train RMSE: 2128.8159942724246
Epoch 67 , Loss: 75674.01779317856 , Train RMSE: 2124.4506612484934
Epoch 68 , Loss: 76060.32291269302 , Train RMSE: 2128.7802710403835
Epoch 69 , Loss: 76137.12929058075 , Train RMSE: 2126.3858255743526
Epoch 70 , Loss: 76238.89690065384 , Train RMSE: 2128.268153578758
Epoch 71 , Loss: 75887.85321474075 , Train RMSE: 2121.2706704442226
Epoch 72 , Loss: 77273.86271810532 , Train RMSE: 2161.2576141985774
Epoch 73 , Loss: 76278.57624340057 , Train RMSE: 2154.45546963909
Epoch 74 , Loss: 76186.44014072418 , Train RMSE: 2148.8005732795164
Epoch 75 , Loss: 76716.81284618378 , Train RMSE: 2156.017173436216
Epoch 76 , Loss: 76167.5751504898 , Train RMSE: 2148.045448041844
Epoch 77 , Loss: 76816.01431131363 , Train RMSE: 2148.540589205353
Epoch 78 , Loss: 76055.39194059372 , Train RMSE: 2144.6354552997127
Epoch 79 , Loss: 75996.83701562881 , Train RMSE: 2142.920172102972
Epoch 80 , Loss: 75965.85517501831 , Train RMSE: 2133.781027608494
Epoch 81 , Loss: 75887.99757814407 , Train RMSE: 2138.7904628026827
Epoch 82 , Loss: 75798.0834183693 , Train RMSE: 2139.0941021850595
Epoch 83 , Loss: 75662.38830518723 , Train RMSE: 2134.6906119382033
Epoch 84 , Loss: 76453.5772819519 , Train RMSE: 2143.7881572597157
Epoch 85 , Loss: 75522.8311290741 , Train RMSE: 2133.553137824636
Epoch 86 , Loss: 76066.0401186943 , Train RMSE: 2130.9822439434815
Epoch 87 , Loss: 76045.17729187012 , Train RMSE: 2134.6746521650825
Epoch 88 , Loss: 76252.09538078308 , Train RMSE: 2134.3466628025426
Epoch 89 , Loss: 76219.2887673378 , Train RMSE: 2134.9256263044385
Epoch 90 , Loss: 75445.65058660507 , Train RMSE: 2129.0509789851308
Epoch 91 , Loss: 75481.697763443 , Train RMSE: 2127.6983392416705
Epoch 92 , Loss: 75596.66252851486 , Train RMSE: 2130.5265558282763
Epoch 93 , Loss: 75527.04618024826 , Train RMSE: 2128.344977134849
Epoch 94 , Loss: 75848.05881118774 , Train RMSE: 2130.8864591723445
Epoch 95 , Loss: 75579.67036914825 , Train RMSE: 2124.976279069787
Epoch 96 , Loss: 75458.23670721054 , Train RMSE: 2123.867522519671
Epoch 97 , Loss: 76001.4595913887 , Train RMSE: 2129.2923088016782
Epoch 98 , Loss: 75828.72694683075 , Train RMSE: 2131.3043272634536
Epoch 99 , Loss: 75717.10836601257 , Train RMSE: 2134.2343430874184
Validation MSE: 897.3527205902647
SPLIT 3 :
Epoch 0 , Loss: 139286.80043792725 , Train RMSE: 1908.436709363872
Epoch 1 , Loss: 138732.7706451416 , Train RMSE: 1908.3692627194455
Epoch 2 , Loss: 127548.85746479034 , Train RMSE: 1909.7902166960139
Epoch 3 , Loss: 111468.57168626785 , Train RMSE: 1899.4818208630275
Epoch 4 , Loss: 109804.00307178497 , Train RMSE: 1893.6958262107783
Epoch 5 , Loss: 110160.21851062775 , Train RMSE: 1890.7418799095747
Epoch 6 , Loss: 110330.10809850693 , Train RMSE: 1891.2062451249633
Epoch 7 , Loss: 110770.6572175026 , Train RMSE: 1895.352237748129
Epoch 8 , Loss: 109513.98555755615 , Train RMSE: 1894.6145203354242
Epoch 9 , Loss: 109807.04251194 , Train RMSE: 1891.3966029957662
Epoch 10 , Loss: 109975.54908657074 , Train RMSE: 1891.5269364504898
Epoch 11 , Loss: 110552.30869054794 , Train RMSE: 1891.8343947707713
Epoch 12 , Loss: 110581.51898002625 , Train RMSE: 1891.7171728409453
Epoch 13 , Loss: 110290.90747737885 , Train RMSE: 1891.6811172222995
Epoch 14 , Loss: 110622.4748802185 , Train RMSE: 1892.3824324653513
Epoch 15 , Loss: 110116.91529417038 , Train RMSE: 1891.7634175698822
Epoch 16 , Loss: 109896.37095403671 , Train RMSE: 1891.268031523298
Epoch 17 , Loss: 109615.41405248642 , Train RMSE: 1890.6211369932096
Epoch 18 , Loss: 109601.85356235504 , Train RMSE: 1890.8591446899018
Epoch 19 , Loss: 109535.03321838379 , Train RMSE: 1891.1765393789508
Epoch 20 , Loss: 109456.66956758499 , Train RMSE: 1890.4811448754187
Epoch 21 , Loss: 109753.08200979233 , Train RMSE: 1891.5868199493777
Epoch 22 , Loss: 109462.10460472107 , Train RMSE: 1890.920866992324
Epoch 23 , Loss: 111517.90560483932 , Train RMSE: 1891.6285698393554
Epoch 24 , Loss: 109478.4484128952 , Train RMSE: 1890.5600085142908
Epoch 25 , Loss: 109147.89395236969 , Train RMSE: 1890.4912714978814
Epoch 26 , Loss: 109398.59986686707 , Train RMSE: 1891.2844676675786
Epoch 27 , Loss: 109183.52985191345 , Train RMSE: 1890.1720562516468
Epoch 28 , Loss: 109223.69391870499 , Train RMSE: 1890.6437419200677
Epoch 29 , Loss: 109476.1321644783 , Train RMSE: 1890.4147751967816
Epoch 30 , Loss: 109477.10419988632 , Train RMSE: 1890.1222647156767
Epoch 31 , Loss: 109544.69196367264 , Train RMSE: 1890.0107661848301
Epoch 32 , Loss: 109466.55400943756 , Train RMSE: 1890.17155031926
Epoch 33 , Loss: 109612.03197479248 , Train RMSE: 1889.9733748106846
Epoch 34 , Loss: 110345.41434669495 , Train RMSE: 1890.2933541478167
Epoch 35 , Loss: 109512.57274246216 , Train RMSE: 1890.1454466670727
Epoch 36 , Loss: 109919.35340499878 , Train RMSE: 1889.7597659157962
Epoch 37 , Loss: 109663.5445022583 , Train RMSE: 1889.7620030212688
Epoch 38 , Loss: 109521.82453393936 , Train RMSE: 1889.7607870859265
Epoch 39 , Loss: 109721.44216632843 , Train RMSE: 1889.7289828221624
Epoch 40 , Loss: 109512.74019765854 , Train RMSE: 1890.7401584674283
Epoch 41 , Loss: 109410.22319698334 , Train RMSE: 1889.9246946442122
Epoch 42 , Loss: 109357.57664632797 , Train RMSE: 1889.6219349440098
Epoch 43 , Loss: 109309.25554084778 , Train RMSE: 1889.7636481878585
Epoch 44 , Loss: 109173.99715709686 , Train RMSE: 1889.691590952887
Epoch 45 , Loss: 109307.23270893097 , Train RMSE: 1889.441120548804
Epoch 46 , Loss: 109365.58892202377 , Train RMSE: 1890.2391177115755
Epoch 47 , Loss: 109246.20501852036 , Train RMSE: 1889.6623049504176
Epoch 48 , Loss: 109158.06557750702 , Train RMSE: 1889.2160088813862
Epoch 49 , Loss: 109422.66307497025 , Train RMSE: 1891.4772016275044
Epoch 50 , Loss: 109328.7802605629 , Train RMSE: 1890.4778489183857
Epoch 51 , Loss: 109168.91748571396 , Train RMSE: 1889.4692726889189
Epoch 52 , Loss: 109020.92992210388 , Train RMSE: 1889.22294655913
Epoch 53 , Loss: 109044.35025548935 , Train RMSE: 1889.2282394114
Epoch 54 , Loss: 108949.58886289597 , Train RMSE: 1889.046420385351
Epoch 55 , Loss: 109140.30921459198 , Train RMSE: 1889.2181892971182
Epoch 56 , Loss: 109184.74109458923 , Train RMSE: 1889.068095083385
Epoch 57 , Loss: 109066.39306259155 , Train RMSE: 1889.1711456893338
Epoch 58 , Loss: 109188.71326208115 , Train RMSE: 1888.9217106927651
Epoch 59 , Loss: 109146.56220054626 , Train RMSE: 1888.7760143755006
Epoch 60 , Loss: 109050.1450381279 , Train RMSE: 1888.7306478779608
Epoch 61 , Loss: 109197.0080742836 , Train RMSE: 1890.2511635571516
Epoch 62 , Loss: 109080.26251411438 , Train RMSE: 1889.1620246454004
Epoch 63 , Loss: 108901.06986093521 , Train RMSE: 1888.5193800467332
Epoch 64 , Loss: 108930.79332876205 , Train RMSE: 1888.4066926112262
Epoch 65 , Loss: 108849.0525560379 , Train RMSE: 1888.3366000770193
Epoch 66 , Loss: 109061.23136806488 , Train RMSE: 1889.1375435596208
Epoch 67 , Loss: 108955.44487047195 , Train RMSE: 1888.82395381574
Epoch 68 , Loss: 108841.39781999588 , Train RMSE: 1888.8013833649616
Epoch 69 , Loss: 108695.58436059952 , Train RMSE: 1888.5483975770908
Epoch 70 , Loss: 108747.8703866005 , Train RMSE: 1888.2848440526514
Epoch 71 , Loss: 108781.50776433945 , Train RMSE: 1888.4838081736389
Epoch 72 , Loss: 108733.11577939987 , Train RMSE: 1888.0434123193263
Epoch 73 , Loss: 108782.00975942612 , Train RMSE: 1887.747183013852
Epoch 74 , Loss: 108775.2673778534 , Train RMSE: 1887.6500949753536
Epoch 75 , Loss: 108641.65398073196 , Train RMSE: 1887.2258513996828
Epoch 76 , Loss: 108931.16062450409 , Train RMSE: 1888.42010716029
Epoch 77 , Loss: 108652.87294960022 , Train RMSE: 1886.5793613940282
Epoch 78 , Loss: 108419.12900781631 , Train RMSE: 1883.9940718243138
Epoch 79 , Loss: 108054.20083284378 , Train RMSE: 1881.0748655067064
Epoch 80 , Loss: 107663.62688159943 , Train RMSE: 1876.6115623216176
Epoch 81 , Loss: 107029.91270208359 , Train RMSE: 1869.8775630417513
Epoch 82 , Loss: 106369.61899089813 , Train RMSE: 1861.7583992893663
Epoch 83 , Loss: 105752.33974075317 , Train RMSE: 1851.8343498990141
Epoch 84 , Loss: 105405.60453748703 , Train RMSE: 1844.9765540139665
Epoch 85 , Loss: 104722.57124090195 , Train RMSE: 1833.2155622251796
Epoch 86 , Loss: 105134.74619960785 , Train RMSE: 1843.551230392426
Epoch 87 , Loss: 104456.67539834976 , Train RMSE: 1833.6239270955323
Epoch 88 , Loss: 104156.9293756485 , Train RMSE: 1827.9776293505017
Epoch 89 , Loss: 104870.74425411224 , Train RMSE: 1827.3739635619897
Epoch 90 , Loss: 104423.89760017395 , Train RMSE: 1829.088807558275
Epoch 91 , Loss: 103861.31671714783 , Train RMSE: 1822.6049291082516
Epoch 92 , Loss: 103229.17822265625 , Train RMSE: 1808.5447651071693
Epoch 93 , Loss: 103797.70475816727 , Train RMSE: 1805.0726933312083
Epoch 94 , Loss: 103727.0947523117 , Train RMSE: 1803.0394176148777
Epoch 95 , Loss: 103383.86450433731 , Train RMSE: 1807.7838936477958
Epoch 96 , Loss: 103948.60172510147 , Train RMSE: 1809.0581246648796
Epoch 97 , Loss: 105159.34617233276 , Train RMSE: 1822.6502644953894
Epoch 98 , Loss: 106868.5828948021 , Train RMSE: 1861.4756766733335
Epoch 99 , Loss: 106532.83451223373 , Train RMSE: 1858.0657469078233
Validation MSE: 852.168509322817
SPLIT 4 :
Epoch 0 , Loss: 177662.83177947998 , Train RMSE: 1727.8967716731433
Epoch 1 , Loss: 177551.0899810791 , Train RMSE: 1727.9421805575428
Epoch 2 , Loss: 177435.88761138916 , Train RMSE: 1727.9534732285474
Epoch 3 , Loss: 177307.96648406982 , Train RMSE: 1727.8713959856982
Epoch 4 , Loss: 177142.00135803223 , Train RMSE: 1727.524952317978
Epoch 5 , Loss: 176881.71676635742 , Train RMSE: 1726.5054361449734
Epoch 6 , Loss: 176428.19980621338 , Train RMSE: 1724.2995505734968
Epoch 7 , Loss: 175467.84030914307 , Train RMSE: 1718.081298105923
Epoch 8 , Loss: 174727.94757080078 , Train RMSE: 1714.7586086643064
Epoch 9 , Loss: 173858.7081451416 , Train RMSE: 1705.8936174951468
Epoch 10 , Loss: 173831.91931915283 , Train RMSE: 1709.119995675561
Epoch 11 , Loss: 171243.71424102783 , Train RMSE: 1683.58014908949
Epoch 12 , Loss: 171584.2713470459 , Train RMSE: 1683.94926704864
Epoch 13 , Loss: 172573.25284576416 , Train RMSE: 1699.2445610115997
Epoch 14 , Loss: 172029.10553741455 , Train RMSE: 1691.607240420269
Epoch 15 , Loss: 172648.84857940674 , Train RMSE: 1694.7206235625918
Epoch 16 , Loss: 171236.60744476318 , Train RMSE: 1686.3310556434074
Epoch 17 , Loss: 170983.54917144775 , Train RMSE: 1680.9792575505085
Epoch 18 , Loss: 168247.05044555664 , Train RMSE: 1659.0113857944127
Epoch 19 , Loss: 168744.45038604736 , Train RMSE: 1675.6462692523687
Epoch 20 , Loss: 167620.6153869629 , Train RMSE: 1642.2720439015916
Epoch 21 , Loss: 168707.50873565674 , Train RMSE: 1665.5898660392234
Epoch 22 , Loss: 167019.9323501587 , Train RMSE: 1642.205435340731
Epoch 23 , Loss: 167531.4001159668 , Train RMSE: 1643.527595427679
Epoch 24 , Loss: 166899.2838821411 , Train RMSE: 1637.3701327749504
Epoch 25 , Loss: 167794.1138534546 , Train RMSE: 1660.4218205944542
Epoch 26 , Loss: 167260.23722839355 , Train RMSE: 1647.481973010552
Epoch 27 , Loss: 166710.79375457764 , Train RMSE: 1634.6211101520812
Epoch 28 , Loss: 167203.5002670288 , Train RMSE: 1654.0482546818043
Epoch 29 , Loss: 166271.2137451172 , Train RMSE: 1639.1549541230377
Epoch 30 , Loss: 167306.1157836914 , Train RMSE: 1640.2659586806017
Epoch 31 , Loss: 170095.74098205566 , Train RMSE: 1677.8189338297168
Epoch 32 , Loss: 166237.38875579834 , Train RMSE: 1651.2442646047855
Epoch 33 , Loss: 165937.40279388428 , Train RMSE: 1643.0466390293093
Epoch 34 , Loss: 164556.62802886963 , Train RMSE: 1639.4390623958832
Epoch 35 , Loss: 166782.13034820557 , Train RMSE: 1664.1604722336508
Epoch 36 , Loss: 165270.7373046875 , Train RMSE: 1639.507669354187
Epoch 37 , Loss: 165119.56061553955 , Train RMSE: 1637.8174894453027
Epoch 38 , Loss: 163638.16541290283 , Train RMSE: 1624.373737054982
Epoch 39 , Loss: 163355.21212005615 , Train RMSE: 1622.596538119358
Epoch 40 , Loss: 163562.21920776367 , Train RMSE: 1614.5130795555854
Epoch 41 , Loss: 164130.51147460938 , Train RMSE: 1625.3372195699521
Epoch 42 , Loss: 164126.67768096924 , Train RMSE: 1626.9546727457698
Epoch 43 , Loss: 163404.92789459229 , Train RMSE: 1625.1260696194702
Epoch 44 , Loss: 164108.4793395996 , Train RMSE: 1636.6486355030556
Epoch 45 , Loss: 163690.93516540527 , Train RMSE: 1631.4860668633662
Epoch 46 , Loss: 163578.53261566162 , Train RMSE: 1624.815714308044
Epoch 47 , Loss: 161921.09252166748 , Train RMSE: 1614.2415601891578
Epoch 48 , Loss: 162383.43656158447 , Train RMSE: 1612.6065327864203
Epoch 49 , Loss: 161417.42123413086 , Train RMSE: 1607.3469165058007
Epoch 50 , Loss: 163114.0863647461 , Train RMSE: 1614.385211120933
Epoch 51 , Loss: 165762.85150909424 , Train RMSE: 1672.7199783395704
Epoch 52 , Loss: 166235.54334259033 , Train RMSE: 1673.4011352360667
Epoch 53 , Loss: 162985.77110290527 , Train RMSE: 1634.2913003683543
Epoch 54 , Loss: 164097.40912628174 , Train RMSE: 1627.9190529542386
Epoch 55 , Loss: 162854.71426391602 , Train RMSE: 1634.228332070691
Epoch 56 , Loss: 162419.21900177002 , Train RMSE: 1634.5223690932758
Epoch 57 , Loss: 161886.9716720581 , Train RMSE: 1618.8718285881457
Epoch 58 , Loss: 161340.36995697021 , Train RMSE: 1625.2974325480513
Epoch 59 , Loss: 162374.7239227295 , Train RMSE: 1641.1190712538082
Epoch 60 , Loss: 161118.1169128418 , Train RMSE: 1629.004611857885
Epoch 61 , Loss: 161401.034034729 , Train RMSE: 1626.8257822126927
Epoch 62 , Loss: 161319.60326385498 , Train RMSE: 1629.8822098874703
Epoch 63 , Loss: 161173.73920440674 , Train RMSE: 1621.5502830404282
Epoch 64 , Loss: 160493.0726699829 , Train RMSE: 1630.5208901629746
Epoch 65 , Loss: 160578.29216003418 , Train RMSE: 1628.0871495789158
Epoch 66 , Loss: 162786.51007080078 , Train RMSE: 1643.180056621578
Epoch 67 , Loss: 160523.6289138794 , Train RMSE: 1650.857860760464
Epoch 68 , Loss: 161907.01035308838 , Train RMSE: 1677.1496616898196
Epoch 69 , Loss: 158698.16955566406 , Train RMSE: 1615.6910711675077
Epoch 70 , Loss: 158700.2930984497 , Train RMSE: 1617.9911105306833
Epoch 71 , Loss: 157697.47261810303 , Train RMSE: 1610.1672273483275
Epoch 72 , Loss: 158709.93013000488 , Train RMSE: 1613.9950610595793
Epoch 73 , Loss: 157858.03875732422 , Train RMSE: 1615.664868486791
Epoch 74 , Loss: 157957.702003479 , Train RMSE: 1622.251401972293
Epoch 75 , Loss: 157349.69986724854 , Train RMSE: 1613.756264214139
Epoch 76 , Loss: 155861.78894805908 , Train RMSE: 1603.6875236916371
Epoch 77 , Loss: 164338.3147277832 , Train RMSE: 1646.80108924826
Epoch 78 , Loss: 162579.4017868042 , Train RMSE: 1642.2803758536313
Epoch 79 , Loss: 159949.2027053833 , Train RMSE: 1638.7643305399042
Epoch 80 , Loss: 159879.13317871094 , Train RMSE: 1634.55838598633
Epoch 81 , Loss: 158819.10279083252 , Train RMSE: 1629.713145574715
Epoch 82 , Loss: 158165.05877685547 , Train RMSE: 1617.2601056844728
Epoch 83 , Loss: 157427.2027206421 , Train RMSE: 1608.342595648461
Epoch 84 , Loss: 159177.1597518921 , Train RMSE: 1637.700855534519
Epoch 85 , Loss: 158872.18955230713 , Train RMSE: 1634.9882657022297
Epoch 86 , Loss: 157209.44925689697 , Train RMSE: 1614.4438000106245
Epoch 87 , Loss: 157665.47479248047 , Train RMSE: 1626.5896412506086
Epoch 88 , Loss: 157439.8485107422 , Train RMSE: 1611.7528612301255
Epoch 89 , Loss: 156704.91324615479 , Train RMSE: 1610.3615519818661
Epoch 90 , Loss: 156813.29695892334 , Train RMSE: 1621.321248204659
Epoch 91 , Loss: 158694.83532714844 , Train RMSE: 1655.7036544388054
Epoch 92 , Loss: 156110.58025360107 , Train RMSE: 1644.0272947624453
Epoch 93 , Loss: 156132.2738647461 , Train RMSE: 1644.0950873797053
Epoch 94 , Loss: 157766.9688949585 , Train RMSE: 1647.681111597217
Epoch 95 , Loss: 154725.19899749756 , Train RMSE: 1621.806750949488
Epoch 96 , Loss: 154662.10175323486 , Train RMSE: 1619.4379000338445
Epoch 97 , Loss: 155771.21617889404 , Train RMSE: 1623.8082570678048
Epoch 98 , Loss: 154467.49674987793 , Train RMSE: 1613.198128725157
Epoch 99 , Loss: 154203.7494506836 , Train RMSE: 1605.002162421331
Validation MSE: 484.4880880066243
SPLIT 5 :
Epoch 0 , Loss: 200761.75942230225 , Train RMSE: 1556.6820548420621
Epoch 1 , Loss: 188383.9383468628 , Train RMSE: 1549.9509835186873
Epoch 2 , Loss: 150726.4263253212 , Train RMSE: 1544.3663622750341
Epoch 3 , Loss: 151328.2592215538 , Train RMSE: 1536.9008517465638
Epoch 4 , Loss: 151393.01340007782 , Train RMSE: 1538.860930564612
Epoch 5 , Loss: 151172.95166444778 , Train RMSE: 1540.0054927201209
Epoch 6 , Loss: 151118.85581207275 , Train RMSE: 1541.2503908787985
Epoch 7 , Loss: 150769.94789218903 , Train RMSE: 1536.3292056402404
Epoch 8 , Loss: 150604.71154403687 , Train RMSE: 1538.2215413756712
Epoch 9 , Loss: 149166.28014278412 , Train RMSE: 1531.5436364718619
Epoch 10 , Loss: 149345.72271633148 , Train RMSE: 1525.3098641348065
Epoch 11 , Loss: 147082.09562778473 , Train RMSE: 1518.4377224083883
Epoch 12 , Loss: 147910.72353076935 , Train RMSE: 1522.4724965344878
Epoch 13 , Loss: 145852.53953266144 , Train RMSE: 1504.5292475622348
Epoch 14 , Loss: 144750.73809051514 , Train RMSE: 1504.6473124470592
Epoch 15 , Loss: 145933.38794612885 , Train RMSE: 1500.4658902772833
Epoch 16 , Loss: 146761.9013710022 , Train RMSE: 1493.8901183813334
Epoch 17 , Loss: 146527.0992155075 , Train RMSE: 1500.1944451907011
Epoch 18 , Loss: 146943.25439834595 , Train RMSE: 1504.9240437153503
Epoch 19 , Loss: 146101.30221366882 , Train RMSE: 1497.051098085746
Epoch 20 , Loss: 144424.39337158203 , Train RMSE: 1479.9425305489249
Epoch 21 , Loss: 146376.62587738037 , Train RMSE: 1504.9307670419958
Epoch 22 , Loss: 143719.3786277771 , Train RMSE: 1479.3116868106122
Epoch 23 , Loss: 148202.5830898285 , Train RMSE: 1496.6723077658035
Epoch 24 , Loss: 144346.90702056885 , Train RMSE: 1486.49204665092
Epoch 25 , Loss: 145755.8110513687 , Train RMSE: 1489.3759950136189
Epoch 26 , Loss: 143667.031686306 , Train RMSE: 1484.2439699193494
Epoch 27 , Loss: 150460.9432501793 , Train RMSE: 1531.178698652903
Epoch 28 , Loss: 147379.58095788956 , Train RMSE: 1513.3167780388656
Epoch 29 , Loss: 147822.4088997841 , Train RMSE: 1499.8895101525375
Epoch 30 , Loss: 145208.72267103195 , Train RMSE: 1474.4203927570497
Epoch 31 , Loss: 144695.79705142975 , Train RMSE: 1483.9433638760884
Epoch 32 , Loss: 145263.50650787354 , Train RMSE: 1477.487680699881
Epoch 33 , Loss: 145151.61273527145 , Train RMSE: 1483.6388094262866
Epoch 34 , Loss: 143769.24123954773 , Train RMSE: 1488.213595856374
Epoch 35 , Loss: 144760.72209882736 , Train RMSE: 1475.4941439024767
Epoch 36 , Loss: 145937.08255004883 , Train RMSE: 1460.9804679112513
Epoch 37 , Loss: 143441.66600131989 , Train RMSE: 1470.1679642551596
Epoch 38 , Loss: 145727.48076963425 , Train RMSE: 1486.5367044457576
Epoch 39 , Loss: 142878.9433813095 , Train RMSE: 1465.4320792152985
Epoch 40 , Loss: 146688.41315841675 , Train RMSE: 1493.8760856012689
Epoch 41 , Loss: 143413.73638486862 , Train RMSE: 1471.9155027348322
Epoch 42 , Loss: 142782.0533208847 , Train RMSE: 1465.950552292196
Epoch 43 , Loss: 142330.85575151443 , Train RMSE: 1461.9357157801571
Epoch 44 , Loss: 147921.33730506897 , Train RMSE: 1501.0051110559943
Epoch 45 , Loss: 142488.99150919914 , Train RMSE: 1476.045829489882
Epoch 46 , Loss: 145457.00381231308 , Train RMSE: 1499.4061338643996
Epoch 47 , Loss: 146602.77242994308 , Train RMSE: 1504.4148368460906
Epoch 48 , Loss: 145627.0125284195 , Train RMSE: 1497.3768554741155
Epoch 49 , Loss: 145747.54239320755 , Train RMSE: 1493.8811041779295
Epoch 50 , Loss: 145049.4379749298 , Train RMSE: 1494.8463544725885
Epoch 51 , Loss: 146707.64212560654 , Train RMSE: 1498.7184472696083
Epoch 52 , Loss: 145769.33247995377 , Train RMSE: 1489.7250110000614
Epoch 53 , Loss: 146710.05905008316 , Train RMSE: 1499.9111145536194
Epoch 54 , Loss: 145512.48141098022 , Train RMSE: 1491.7236051793725
Epoch 55 , Loss: 145442.99702501297 , Train RMSE: 1490.9881692124643
Epoch 56 , Loss: 145042.53610372543 , Train RMSE: 1485.9801017455059
Epoch 57 , Loss: 144984.43835496902 , Train RMSE: 1488.589302753649
Epoch 58 , Loss: 145012.6517715454 , Train RMSE: 1484.5966763961694
Epoch 59 , Loss: 144505.0905828476 , Train RMSE: 1481.9787232582341
Epoch 60 , Loss: 145374.66188812256 , Train RMSE: 1487.3830573828545
Epoch 61 , Loss: 145021.3064994812 , Train RMSE: 1481.248993889893
Epoch 62 , Loss: 144777.58164453506 , Train RMSE: 1482.99271219581
Epoch 63 , Loss: 144678.18285655975 , Train RMSE: 1484.7218376702124
Epoch 64 , Loss: 144752.12030792236 , Train RMSE: 1481.0508165498466
Epoch 65 , Loss: 145342.3269352913 , Train RMSE: 1493.8499828496042
Epoch 66 , Loss: 144791.1400899887 , Train RMSE: 1483.3570285871615
Epoch 67 , Loss: 144558.63522052765 , Train RMSE: 1483.9960079165076
Epoch 68 , Loss: 143272.16964960098 , Train RMSE: 1478.6797655858659
Epoch 69 , Loss: 144272.7544322014 , Train RMSE: 1488.063271300358
Epoch 70 , Loss: 144627.0834569931 , Train RMSE: 1489.8764872072047
Epoch 71 , Loss: 145185.60084295273 , Train RMSE: 1485.709043884526
Epoch 72 , Loss: 147110.78586912155 , Train RMSE: 1488.5255438506645
Epoch 73 , Loss: 145675.60663032532 , Train RMSE: 1474.8986068897557
Epoch 74 , Loss: 145481.84074354172 , Train RMSE: 1480.1183163373241
Epoch 75 , Loss: 146382.77929782867 , Train RMSE: 1492.511309383482
Epoch 76 , Loss: 145118.9232196808 , Train RMSE: 1478.7678515545638
Epoch 77 , Loss: 144278.9205942154 , Train RMSE: 1483.7118343712234
Epoch 78 , Loss: 144668.0098748207 , Train RMSE: 1482.628967403826
Epoch 79 , Loss: 144227.68866109848 , Train RMSE: 1481.2839939390485
Epoch 80 , Loss: 144675.12390947342 , Train RMSE: 1481.9689748615995
Epoch 81 , Loss: 144994.4046344757 , Train RMSE: 1484.9478156178627
Epoch 82 , Loss: 146619.5221643448 , Train RMSE: 1493.3566949656777
Epoch 83 , Loss: 145024.7155623436 , Train RMSE: 1483.9138911869734
Epoch 84 , Loss: 143833.3257408142 , Train RMSE: 1472.1762932870727
Epoch 85 , Loss: 144513.2700190544 , Train RMSE: 1479.2179180709536
Epoch 86 , Loss: 144089.50219345093 , Train RMSE: 1479.7417065301684
Epoch 87 , Loss: 145282.00221300125 , Train RMSE: 1484.847791881387
Epoch 88 , Loss: 144498.5345029831 , Train RMSE: 1476.1848734723324
Epoch 89 , Loss: 143962.4541592598 , Train RMSE: 1473.5866832291872
Epoch 90 , Loss: 143977.36490345 , Train RMSE: 1475.176231943484
Epoch 91 , Loss: 143737.96846413612 , Train RMSE: 1478.2121945238882
Epoch 92 , Loss: 144262.30963993073 , Train RMSE: 1492.0332697872464
Epoch 93 , Loss: 143577.64018440247 , Train RMSE: 1476.534803381017
Epoch 94 , Loss: 143480.69869947433 , Train RMSE: 1482.5622985023813
Epoch 95 , Loss: 143162.10299563408 , Train RMSE: 1477.206201330601
Epoch 96 , Loss: 142388.7717986107 , Train RMSE: 1474.7551574114652
Epoch 97 , Loss: 142956.6926665306 , Train RMSE: 1471.6878302975188
Epoch 98 , Loss: 142640.89346718788 , Train RMSE: 1473.108302740015
Epoch 99 , Loss: 142734.32826185226 , Train RMSE: 1473.2429150830794
Validation MSE: 279.9705893546578
lr: 0.01
Average validation MSE: 1127.0781959702113
*********** CURRENT LEARNING RATE : 0.1 ***********
SPLIT 1 :
Epoch 0 , Loss: 20917.774948120117 , Train RMSE: 420.5677069970247
Epoch 1 , Loss: 11462.689611434937 , Train RMSE: 312.1062029093793
Epoch 2 , Loss: 11169.378131866455 , Train RMSE: 311.71211431190306
Epoch 3 , Loss: 11081.380369186401 , Train RMSE: 309.8822196585954
Epoch 4 , Loss: 11230.677770614624 , Train RMSE: 314.8837926702386
Epoch 5 , Loss: 11080.347379684448 , Train RMSE: 312.85975369970146
Epoch 6 , Loss: 10778.252208709717 , Train RMSE: 310.1994197816228
Epoch 7 , Loss: 10566.777807235718 , Train RMSE: 304.6104580435741
Epoch 8 , Loss: 10613.06915473938 , Train RMSE: 309.08002154997126
Epoch 9 , Loss: 10564.80251121521 , Train RMSE: 308.6119516106459
Epoch 10 , Loss: 10347.941966056824 , Train RMSE: 305.6958673090712
Epoch 11 , Loss: 10379.960021972656 , Train RMSE: 302.8464984842918
Epoch 12 , Loss: 10256.993984222412 , Train RMSE: 295.86880152300347
Epoch 13 , Loss: 10366.475128173828 , Train RMSE: 300.70747317926725
Epoch 14 , Loss: 15073.412137031555 , Train RMSE: 356.1540143246918
Epoch 15 , Loss: 12506.687789916992 , Train RMSE: 318.6888730525189
Epoch 16 , Loss: 11283.891901016235 , Train RMSE: 315.16062741716985
Epoch 17 , Loss: 10712.17841720581 , Train RMSE: 308.0757367588447
Epoch 18 , Loss: 10523.385908126831 , Train RMSE: 305.49124283684114
Epoch 19 , Loss: 10263.783372879028 , Train RMSE: 305.81497440877564
Epoch 20 , Loss: 10268.310693740845 , Train RMSE: 295.5953057849366
Epoch 21 , Loss: 10183.623534202576 , Train RMSE: 295.45141889277744
Epoch 22 , Loss: 10328.774978637695 , Train RMSE: 296.01778052376227
Epoch 23 , Loss: 10145.78289604187 , Train RMSE: 293.8242335931716
Epoch 24 , Loss: 10068.735326766968 , Train RMSE: 293.3613782288163
Epoch 25 , Loss: 9912.471450805664 , Train RMSE: 291.27664038780193
Epoch 26 , Loss: 9736.063455581665 , Train RMSE: 289.87980646780267
Epoch 27 , Loss: 10108.161727905273 , Train RMSE: 294.76474919342854
Epoch 28 , Loss: 10049.508483886719 , Train RMSE: 293.43056947811044
Epoch 29 , Loss: 10058.551577568054 , Train RMSE: 293.0512413974849
Epoch 30 , Loss: 10026.44542503357 , Train RMSE: 291.878152758062
Epoch 31 , Loss: 9841.056503295898 , Train RMSE: 291.5202590527425
Epoch 32 , Loss: 9986.477558135986 , Train RMSE: 292.6592594601905
Epoch 33 , Loss: 9788.604011535645 , Train RMSE: 289.6930090812193
Epoch 34 , Loss: 9776.291654586792 , Train RMSE: 289.5561134899318
Epoch 35 , Loss: 9693.15979385376 , Train RMSE: 288.8095157764365
Epoch 36 , Loss: 9679.98438835144 , Train RMSE: 288.9985948563311
Epoch 37 , Loss: 9668.113592147827 , Train RMSE: 288.7411861648985
Epoch 38 , Loss: 9669.784999847412 , Train RMSE: 289.3508266936354
Epoch 39 , Loss: 9686.453298568726 , Train RMSE: 288.5495285798786
Epoch 40 , Loss: 9665.220865249634 , Train RMSE: 289.02414621368195
Epoch 41 , Loss: 9655.003618240356 , Train RMSE: 288.6383359912911
Epoch 42 , Loss: 9686.98549079895 , Train RMSE: 288.68051358006534
Epoch 43 , Loss: 9684.279439926147 , Train RMSE: 288.9566271194487
Epoch 44 , Loss: 9706.05394744873 , Train RMSE: 287.6675789982569
Epoch 45 , Loss: 9863.056894302368 , Train RMSE: 292.8687710958111
Epoch 46 , Loss: 9857.654438018799 , Train RMSE: 292.4754092936462
Epoch 47 , Loss: 9925.399698257446 , Train RMSE: 289.78108489798404
Epoch 48 , Loss: 10016.817240715027 , Train RMSE: 293.59158443863254
Epoch 49 , Loss: 9952.491598129272 , Train RMSE: 293.3143789607802
Epoch 50 , Loss: 9826.520603179932 , Train RMSE: 288.00836131361297
Epoch 51 , Loss: 9865.712911605835 , Train RMSE: 289.2124798849765
Epoch 52 , Loss: 9740.425189971924 , Train RMSE: 288.11181965885834
Epoch 53 , Loss: 9891.982164382935 , Train RMSE: 290.15460328225186
Epoch 54 , Loss: 9750.472368240356 , Train RMSE: 287.9018827776087
Epoch 55 , Loss: 9728.564437866211 , Train RMSE: 287.2119397796194
Epoch 56 , Loss: 9597.745586395264 , Train RMSE: 285.83052153563176
Epoch 57 , Loss: 9702.971759796143 , Train RMSE: 286.6866921853785
Epoch 58 , Loss: 9625.969931602478 , Train RMSE: 286.142143505715
Epoch 59 , Loss: 9628.678712844849 , Train RMSE: 285.4566325136091
Epoch 60 , Loss: 9640.120697021484 , Train RMSE: 285.3833912070446
Epoch 61 , Loss: 9649.942832946777 , Train RMSE: 285.5098468526016
Epoch 62 , Loss: 9766.711223602295 , Train RMSE: 285.4448351196244
Epoch 63 , Loss: 9877.569187164307 , Train RMSE: 284.36271947831375
Epoch 64 , Loss: 9671.810638427734 , Train RMSE: 281.5619193880684
Epoch 65 , Loss: 9598.653944015503 , Train RMSE: 280.48489223310213
Epoch 66 , Loss: 9824.45866394043 , Train RMSE: 281.6805250166716
Epoch 67 , Loss: 9757.030437469482 , Train RMSE: 279.9743709676268
Epoch 68 , Loss: 9712.576362609863 , Train RMSE: 278.21647404333027
Epoch 69 , Loss: 10015.837102890015 , Train RMSE: 284.90835083106623
Epoch 70 , Loss: 10058.896385192871 , Train RMSE: 287.1875313335428
Epoch 71 , Loss: 9961.971632003784 , Train RMSE: 287.1890195868855
Epoch 72 , Loss: 10026.885759353638 , Train RMSE: 285.1729377815489
Epoch 73 , Loss: 9831.194857597351 , Train RMSE: 281.17895138021873
Epoch 74 , Loss: 10030.627174377441 , Train RMSE: 287.7845171353473
Epoch 75 , Loss: 9920.724004745483 , Train RMSE: 282.5394461843387
Epoch 76 , Loss: 9949.42332649231 , Train RMSE: 283.67571446482475
Epoch 77 , Loss: 9737.112087249756 , Train RMSE: 281.3196886076679
Epoch 78 , Loss: 9737.200946807861 , Train RMSE: 280.0801135405457
Epoch 79 , Loss: 9648.987997055054 , Train RMSE: 280.4733715029553
Epoch 80 , Loss: 9704.98230934143 , Train RMSE: 280.7893339084469
Epoch 81 , Loss: 9675.64044380188 , Train RMSE: 279.42549590559224
Epoch 82 , Loss: 9619.740129470825 , Train RMSE: 278.48297845102223
Epoch 83 , Loss: 9704.594366073608 , Train RMSE: 278.5273969500525
Epoch 84 , Loss: 9677.481460571289 , Train RMSE: 277.33657823929815
Epoch 85 , Loss: 9803.115934371948 , Train RMSE: 280.8734203450622
Epoch 86 , Loss: 9838.461330413818 , Train RMSE: 281.1649500965034
Epoch 87 , Loss: 9679.020490646362 , Train RMSE: 278.59803344303924
Epoch 88 , Loss: 9626.752197265625 , Train RMSE: 277.6556757358584
Epoch 89 , Loss: 9808.922006607056 , Train RMSE: 278.86412517668487
Epoch 90 , Loss: 9586.971942901611 , Train RMSE: 276.3663661047732
Epoch 91 , Loss: 9586.444135665894 , Train RMSE: 276.96904823769734
Epoch 92 , Loss: 9616.830820083618 , Train RMSE: 278.0012592457868
Epoch 93 , Loss: 9561.860330581665 , Train RMSE: 276.6259464935041
Epoch 94 , Loss: 9654.399461746216 , Train RMSE: 277.3368149454042
Epoch 95 , Loss: 9608.172981262207 , Train RMSE: 276.43375496101083
Epoch 96 , Loss: 9543.5749168396 , Train RMSE: 278.96095913743017
Epoch 97 , Loss: 9447.925922393799 , Train RMSE: 276.4900856033524
Epoch 98 , Loss: 9795.101660728455 , Train RMSE: 278.19891755935805
Epoch 99 , Loss: 9701.150342941284 , Train RMSE: 279.58615833277764
Validation MSE: 3093.9797247277747
SPLIT 2 :
Epoch 0 , Loss: 91070.66991996765 , Train RMSE: 2213.120823739071
Epoch 1 , Loss: 83618.02127933502 , Train RMSE: 2208.1718985777625
Epoch 2 , Loss: 81481.87370109558 , Train RMSE: 2205.138877365548
Epoch 3 , Loss: 80500.89483737946 , Train RMSE: 2204.4117357693754
Epoch 4 , Loss: 80264.17115592957 , Train RMSE: 2202.701377967137
Epoch 5 , Loss: 79828.6758480072 , Train RMSE: 2201.295838818331
Epoch 6 , Loss: 80080.07267379761 , Train RMSE: 2202.5498843651994
Epoch 7 , Loss: 80578.43777561188 , Train RMSE: 2203.829339280689
Epoch 8 , Loss: 79828.82182073593 , Train RMSE: 2201.0945661760047
Epoch 9 , Loss: 79731.14007949829 , Train RMSE: 2200.6231662828955
Epoch 10 , Loss: 80186.7758846283 , Train RMSE: 2202.545171919543
Epoch 11 , Loss: 79999.09363937378 , Train RMSE: 2200.1640135535345
Epoch 12 , Loss: 79499.64983081818 , Train RMSE: 2199.3685011465705
Epoch 13 , Loss: 79838.46708869934 , Train RMSE: 2199.6752737046886
Epoch 14 , Loss: 79534.26223659515 , Train RMSE: 2199.5002484549714
Epoch 15 , Loss: 79564.1815404892 , Train RMSE: 2200.0527030733283
Epoch 16 , Loss: 79997.08107471466 , Train RMSE: 2202.9580516343453
Epoch 17 , Loss: 79967.98669624329 , Train RMSE: 2200.8575992356105
Epoch 18 , Loss: 79353.28551578522 , Train RMSE: 2199.207719735
Epoch 19 , Loss: 79406.06896686554 , Train RMSE: 2198.6659574680293
Epoch 20 , Loss: 79428.19214344025 , Train RMSE: 2198.7916104208234
Epoch 21 , Loss: 79489.874833107 , Train RMSE: 2198.5763056399105
Epoch 22 , Loss: 79311.22248363495 , Train RMSE: 2198.3591764612183
Epoch 23 , Loss: 79275.8368396759 , Train RMSE: 2198.3701694062256
Epoch 24 , Loss: 79154.99729919434 , Train RMSE: 2197.723042180955
Epoch 25 , Loss: 78861.68374061584 , Train RMSE: 2197.825464175705
Epoch 26 , Loss: 79117.54964733124 , Train RMSE: 2197.58609296269
Epoch 27 , Loss: 79171.16626262665 , Train RMSE: 2197.648040711301
Epoch 28 , Loss: 78944.35424137115 , Train RMSE: 2197.240296280799
Epoch 29 , Loss: 79720.82208061218 , Train RMSE: 2200.4532881590044
Epoch 30 , Loss: 79574.49718952179 , Train RMSE: 2199.478596521276
Epoch 31 , Loss: 79375.86703681946 , Train RMSE: 2198.70822909108
Epoch 32 , Loss: 79026.35611152649 , Train RMSE: 2197.3674191526716
Epoch 33 , Loss: 78995.9763431549 , Train RMSE: 2196.797283247883
Epoch 34 , Loss: 78944.16287231445 , Train RMSE: 2196.83157769589
Epoch 35 , Loss: 78919.81137275696 , Train RMSE: 2196.9418731424225
Epoch 36 , Loss: 78824.32132720947 , Train RMSE: 2196.538925770718
Epoch 37 , Loss: 78706.20929527283 , Train RMSE: 2196.469350538197
Epoch 38 , Loss: 78550.97065019608 , Train RMSE: 2196.6286478257693
Epoch 39 , Loss: 79087.24819850922 , Train RMSE: 2196.5567078191784
Epoch 40 , Loss: 78594.48157119751 , Train RMSE: 2196.26237500028
Epoch 41 , Loss: 78518.55679893494 , Train RMSE: 2195.84491766483
Epoch 42 , Loss: 78578.59195518494 , Train RMSE: 2195.9447652560243
Epoch 43 , Loss: 78560.10495471954 , Train RMSE: 2195.601024575077
Epoch 44 , Loss: 78538.79143285751 , Train RMSE: 2195.755581560633
Epoch 45 , Loss: 78833.5177230835 , Train RMSE: 2195.5880429199633
Epoch 46 , Loss: 78613.138463974 , Train RMSE: 2195.369388234872
Epoch 47 , Loss: 78525.62776374817 , Train RMSE: 2195.2748552197727
Epoch 48 , Loss: 78542.97611236572 , Train RMSE: 2195.1823115892726
Epoch 49 , Loss: 78712.09916687012 , Train RMSE: 2195.1778183632237
Epoch 50 , Loss: 78582.84887313843 , Train RMSE: 2194.966059803852
Epoch 51 , Loss: 78684.50124645233 , Train RMSE: 2195.247854227544
Epoch 52 , Loss: 79289.16347122192 , Train RMSE: 2195.3701555472167
Epoch 53 , Loss: 79311.77970027924 , Train RMSE: 2198.9980301154246
Epoch 54 , Loss: 79030.03386688232 , Train RMSE: 2192.5342275291537
Epoch 55 , Loss: 78226.8792514801 , Train RMSE: 2185.6116221690972
Epoch 56 , Loss: 78308.68999004364 , Train RMSE: 2179.228045925318
Epoch 57 , Loss: 77482.85591506958 , Train RMSE: 2159.287238299467
Epoch 58 , Loss: 76017.85664081573 , Train RMSE: 2101.3509480341418
Epoch 59 , Loss: 75917.73464107513 , Train RMSE: 2080.979108497058
Epoch 60 , Loss: 75849.33053684235 , Train RMSE: 2058.73861866725
Epoch 61 , Loss: 75051.62574481964 , Train RMSE: 2027.4455866471926
Epoch 62 , Loss: 75666.84098339081 , Train RMSE: 2019.8458047683037
Epoch 63 , Loss: 76162.94061946869 , Train RMSE: 2071.7448430029845
Epoch 64 , Loss: 76074.40830516815 , Train RMSE: 2068.0204997584915
Epoch 65 , Loss: 75492.87502288818 , Train RMSE: 2062.5556579089257
Epoch 66 , Loss: 75067.3827381134 , Train RMSE: 2029.1582149206608
Epoch 67 , Loss: 75685.6296415329 , Train RMSE: 2047.6683536712173
Epoch 68 , Loss: 75920.5316619873 , Train RMSE: 2062.589222064473
Epoch 69 , Loss: 76482.3086900711 , Train RMSE: 2077.4318765032217
Epoch 70 , Loss: 76891.12564563751 , Train RMSE: 2072.3123530367566
Epoch 71 , Loss: 76157.39959526062 , Train RMSE: 2082.6757346621566
Epoch 72 , Loss: 75123.8534412384 , Train RMSE: 2051.0670973412393
Epoch 73 , Loss: 76911.89911460876 , Train RMSE: 2093.857206243286
Epoch 74 , Loss: 76122.54975128174 , Train RMSE: 2077.7666044708976
Epoch 75 , Loss: 77116.333486557 , Train RMSE: 2086.8786236091532
Epoch 76 , Loss: 76399.78081989288 , Train RMSE: 2086.0119668497596
Epoch 77 , Loss: 76193.73405838013 , Train RMSE: 2070.9066771766015
Epoch 78 , Loss: 76074.4680223465 , Train RMSE: 2074.688674178489
Epoch 79 , Loss: 75924.05840206146 , Train RMSE: 2066.5787182566482
Epoch 80 , Loss: 76049.15074157715 , Train RMSE: 2067.225036531208
Epoch 81 , Loss: 76284.70001888275 , Train RMSE: 2070.6060240958795
Epoch 82 , Loss: 75430.34290504456 , Train RMSE: 2033.7010781481995
Epoch 83 , Loss: 76288.02730369568 , Train RMSE: 2033.2027404845599
Epoch 84 , Loss: 75386.28080368042 , Train RMSE: 2008.8128694477973
Epoch 85 , Loss: 76397.0642337799 , Train RMSE: 2054.022911839751
Epoch 86 , Loss: 76284.71172046661 , Train RMSE: 2050.6327124434188
Epoch 87 , Loss: 76414.9325428009 , Train RMSE: 2055.8535915260577
Epoch 88 , Loss: 76683.73726177216 , Train RMSE: 2056.5932447649484
Epoch 89 , Loss: 75941.59346008301 , Train RMSE: 2058.6272945382866
Epoch 90 , Loss: 76036.31006240845 , Train RMSE: 2047.5367150811128
Epoch 91 , Loss: 76125.88550376892 , Train RMSE: 2046.6548459286114
Epoch 92 , Loss: 75891.4223356247 , Train RMSE: 2037.4532776378078
Epoch 93 , Loss: 76148.3844909668 , Train RMSE: 2040.8733606579183
Epoch 94 , Loss: 75971.29865932465 , Train RMSE: 2038.572130038878
Epoch 95 , Loss: 75564.50365161896 , Train RMSE: 2025.689684291519
Epoch 96 , Loss: 75831.99816894531 , Train RMSE: 2033.633954645833
Epoch 97 , Loss: 76245.2852525711 , Train RMSE: 2058.743096771931
Epoch 98 , Loss: 75904.48940753937 , Train RMSE: 2049.867755667424
Epoch 99 , Loss: 75681.5436410904 , Train RMSE: 2038.989098636414
Validation MSE: 851.3031167600765
SPLIT 3 :
Epoch 0 , Loss: 127750.9834318161 , Train RMSE: 1900.3427738487344
Epoch 1 , Loss: 112365.40730285645 , Train RMSE: 1890.6837966234486
Epoch 2 , Loss: 110774.25145816803 , Train RMSE: 1889.1775614555693
Epoch 3 , Loss: 110731.54971218109 , Train RMSE: 1889.2964564082286
Epoch 4 , Loss: 111052.22537612915 , Train RMSE: 1889.4128007589086
Epoch 5 , Loss: 110741.05465126038 , Train RMSE: 1888.7960813863358
Epoch 6 , Loss: 111015.96981048584 , Train RMSE: 1888.8058911397422
Epoch 7 , Loss: 110888.43939495087 , Train RMSE: 1888.5926165677292
Epoch 8 , Loss: 111866.55029773712 , Train RMSE: 1894.6961752276977
Epoch 9 , Loss: 111526.27204322815 , Train RMSE: 1889.0316011987836
Epoch 10 , Loss: 110806.82676792145 , Train RMSE: 1887.6566548167868
Epoch 11 , Loss: 110710.71578788757 , Train RMSE: 1887.7411776843787
Epoch 12 , Loss: 110743.15710163116 , Train RMSE: 1887.6490225408224
Epoch 13 , Loss: 110603.58093833923 , Train RMSE: 1887.8536013446396
Epoch 14 , Loss: 110133.05205345154 , Train RMSE: 1886.9260973208218
Epoch 15 , Loss: 110830.70838451385 , Train RMSE: 1887.2009027164138
Epoch 16 , Loss: 111587.50476455688 , Train RMSE: 1887.9600851830517
Epoch 17 , Loss: 110413.02398395538 , Train RMSE: 1886.7361079075854
Epoch 18 , Loss: 110195.93394088745 , Train RMSE: 1886.3956518529585
Epoch 19 , Loss: 110495.91509723663 , Train RMSE: 1886.7192509391184
Epoch 20 , Loss: 110128.46838951111 , Train RMSE: 1886.1585269410132
Epoch 21 , Loss: 109921.02233600616 , Train RMSE: 1885.9554749531612
Epoch 22 , Loss: 109918.88159656525 , Train RMSE: 1886.728111629253
Epoch 23 , Loss: 109754.52938270569 , Train RMSE: 1885.2209751496196
Epoch 24 , Loss: 109922.091173172 , Train RMSE: 1885.2565966253007
Epoch 25 , Loss: 109923.99482154846 , Train RMSE: 1885.0056449585493
Epoch 26 , Loss: 109774.90415668488 , Train RMSE: 1884.5819551938544
Epoch 27 , Loss: 110139.77152633667 , Train RMSE: 1884.4298990761267
Epoch 28 , Loss: 109736.87585449219 , Train RMSE: 1884.25300961329
Epoch 29 , Loss: 109394.30563926697 , Train RMSE: 1883.5850025039197
Epoch 30 , Loss: 109548.26626586914 , Train RMSE: 1884.0449730974378
Epoch 31 , Loss: 109839.58185100555 , Train RMSE: 1884.17545444376
Epoch 32 , Loss: 109715.5097913742 , Train RMSE: 1883.3155453648656
Epoch 33 , Loss: 109485.84704971313 , Train RMSE: 1883.1103076551833
Epoch 34 , Loss: 109532.68430137634 , Train RMSE: 1883.2772912219773
Epoch 35 , Loss: 109332.21099185944 , Train RMSE: 1882.8438920298327
Epoch 36 , Loss: 109444.53399658203 , Train RMSE: 1882.3599561464887
Epoch 37 , Loss: 109158.60062789917 , Train RMSE: 1881.8904718081017
Epoch 38 , Loss: 109207.79626274109 , Train RMSE: 1881.7245899010506
Epoch 39 , Loss: 109342.19756221771 , Train RMSE: 1881.5175058879036
Epoch 40 , Loss: 109089.18039894104 , Train RMSE: 1880.9809881449341
Epoch 41 , Loss: 109158.84303760529 , Train RMSE: 1880.773001010526
Epoch 42 , Loss: 109080.04160261154 , Train RMSE: 1880.5789311695078
Epoch 43 , Loss: 109095.28658819199 , Train RMSE: 1880.3126353423047
Epoch 44 , Loss: 108936.07181978226 , Train RMSE: 1879.925276462328
Epoch 45 , Loss: 108817.76999664307 , Train RMSE: 1879.6006851384057
Epoch 46 , Loss: 109001.74654150009 , Train RMSE: 1879.442215182748
Epoch 47 , Loss: 109035.10995769501 , Train RMSE: 1879.497171359641
Epoch 48 , Loss: 109142.37367391586 , Train RMSE: 1878.950798929189
Epoch 49 , Loss: 108984.80559682846 , Train RMSE: 1878.9408596655169
Epoch 50 , Loss: 108939.99304294586 , Train RMSE: 1878.6084005935222
Epoch 51 , Loss: 109135.44103527069 , Train RMSE: 1878.3836904598238
Epoch 52 , Loss: 109000.95997810364 , Train RMSE: 1878.1369443330232
Epoch 53 , Loss: 109080.80594110489 , Train RMSE: 1877.9423881144485
Epoch 54 , Loss: 109376.90438461304 , Train RMSE: 1879.2883857592044
Epoch 55 , Loss: 109031.01831054688 , Train RMSE: 1878.3220199162474
Epoch 56 , Loss: 109148.81608867645 , Train RMSE: 1879.7151167139984
Epoch 57 , Loss: 109360.92395496368 , Train RMSE: 1877.5287985557
Epoch 58 , Loss: 109044.22749710083 , Train RMSE: 1877.1314605073558
Epoch 59 , Loss: 108968.6573395729 , Train RMSE: 1876.86900254954
Epoch 60 , Loss: 108784.40779924393 , Train RMSE: 1876.35201099312
Epoch 61 , Loss: 108809.99599266052 , Train RMSE: 1876.8024517589222
Epoch 62 , Loss: 108854.59953784943 , Train RMSE: 1875.9474897180298
Epoch 63 , Loss: 108446.04972267151 , Train RMSE: 1875.5357942455673
Epoch 64 , Loss: 108636.67486953735 , Train RMSE: 1875.5756920928673
Epoch 65 , Loss: 108703.42511081696 , Train RMSE: 1875.3990511626746
Epoch 66 , Loss: 108795.33005094528 , Train RMSE: 1875.2894656872254
Epoch 67 , Loss: 108750.81900119781 , Train RMSE: 1875.8494243593427
Epoch 68 , Loss: 108722.08740711212 , Train RMSE: 1874.9236752337954
Epoch 69 , Loss: 108675.4470834732 , Train RMSE: 1875.4066073191725
Epoch 70 , Loss: 108620.64500284195 , Train RMSE: 1874.0183937310924
Epoch 71 , Loss: 108594.97408533096 , Train RMSE: 1873.833610842172
Epoch 72 , Loss: 108744.2575082779 , Train RMSE: 1873.9583816198294
Epoch 73 , Loss: 108652.02199935913 , Train RMSE: 1873.4174884251347
Epoch 74 , Loss: 108569.04634094238 , Train RMSE: 1873.2769019682949
Epoch 75 , Loss: 108631.41253852844 , Train RMSE: 1873.3155667059923
Epoch 76 , Loss: 108924.70776081085 , Train RMSE: 1873.635996514128
Epoch 77 , Loss: 108686.41708660126 , Train RMSE: 1873.4047461083499
Epoch 78 , Loss: 108473.81508731842 , Train RMSE: 1872.97979609231
Epoch 79 , Loss: 109255.41822433472 , Train RMSE: 1874.7981987944074
Epoch 80 , Loss: 109232.12706565857 , Train RMSE: 1874.0119794817952
Epoch 81 , Loss: 108564.11864757538 , Train RMSE: 1872.3524641762106
Epoch 82 , Loss: 108714.95271492004 , Train RMSE: 1872.4485618093072
Epoch 83 , Loss: 108578.89013147354 , Train RMSE: 1872.2295708727927
Epoch 84 , Loss: 108737.07223939896 , Train RMSE: 1872.1794066259079
Epoch 85 , Loss: 108950.93133354187 , Train RMSE: 1871.9242310859386
Epoch 86 , Loss: 108627.51586389542 , Train RMSE: 1871.8177544891162
Epoch 87 , Loss: 108866.58578395844 , Train RMSE: 1871.701998503629
Epoch 88 , Loss: 108830.72422409058 , Train RMSE: 1871.6241012913529
Epoch 89 , Loss: 108420.57050704956 , Train RMSE: 1871.1434701404526
Epoch 90 , Loss: 108489.44223642349 , Train RMSE: 1870.8913549554043
Epoch 91 , Loss: 108505.52163410187 , Train RMSE: 1870.853690025723
Epoch 92 , Loss: 108584.26443815231 , Train RMSE: 1870.549103152534
Epoch 93 , Loss: 108514.22982978821 , Train RMSE: 1872.019731464195
Epoch 94 , Loss: 108692.1576833725 , Train RMSE: 1871.7427284814135
Epoch 95 , Loss: 108525.73517417908 , Train RMSE: 1871.4198991364055
Epoch 96 , Loss: 108482.50328826904 , Train RMSE: 1870.854440506363
Epoch 97 , Loss: 108376.42386817932 , Train RMSE: 1870.4243581802618
Epoch 98 , Loss: 108584.46891784668 , Train RMSE: 1870.3030411699945
Epoch 99 , Loss: 108426.92388296127 , Train RMSE: 1870.2433629712257
Validation MSE: 973.6815636168232
SPLIT 4 :
Epoch 0 , Loss: 154182.92692565918 , Train RMSE: 1716.475646304597
Epoch 1 , Loss: 144460.1906003952 , Train RMSE: 1710.735152254143
Epoch 2 , Loss: 143055.9467935562 , Train RMSE: 1706.5453986968948
Epoch 3 , Loss: 138669.36567878723 , Train RMSE: 1671.3311725920191
Epoch 4 , Loss: 137243.04672813416 , Train RMSE: 1643.5290906861837
Epoch 5 , Loss: 135850.13815498352 , Train RMSE: 1588.0847052894276
Epoch 6 , Loss: 138019.49679660797 , Train RMSE: 1625.2418389710242
Epoch 7 , Loss: 134518.2858171463 , Train RMSE: 1589.8512986784533
Epoch 8 , Loss: 133254.21072483063 , Train RMSE: 1553.8414318574057
Epoch 9 , Loss: 132509.10808944702 , Train RMSE: 1552.1087851268626
Epoch 10 , Loss: 133956.61100959778 , Train RMSE: 1608.5711827642742
Epoch 11 , Loss: 135073.3640499115 , Train RMSE: 1598.3953938627
Epoch 12 , Loss: 137012.63409996033 , Train RMSE: 1637.3525537580936
Epoch 13 , Loss: 135302.0084218979 , Train RMSE: 1641.6128849715533
Epoch 14 , Loss: 134765.1537542343 , Train RMSE: 1620.916183525347
Epoch 15 , Loss: 136255.4179353714 , Train RMSE: 1599.491136907531
Epoch 16 , Loss: 134427.21127605438 , Train RMSE: 1573.7713356434385
Epoch 17 , Loss: 132539.1710319519 , Train RMSE: 1548.1572270581278
Epoch 18 , Loss: 137086.9270772934 , Train RMSE: 1604.4973509317995
Epoch 19 , Loss: 136060.9539680481 , Train RMSE: 1595.8550121012256
Epoch 20 , Loss: 136630.66908454895 , Train RMSE: 1613.1854513534072
Epoch 21 , Loss: 137576.95251369476 , Train RMSE: 1626.8765336941804
Epoch 22 , Loss: 136273.2513408661 , Train RMSE: 1612.5099506508886
Epoch 23 , Loss: 137303.85158729553 , Train RMSE: 1617.2410172358652
Epoch 24 , Loss: 135221.44330883026 , Train RMSE: 1597.5572311825613
Epoch 25 , Loss: 136623.90905189514 , Train RMSE: 1588.9209710648108
Epoch 26 , Loss: 135072.61515712738 , Train RMSE: 1580.2410795276558
Epoch 27 , Loss: 135646.6924381256 , Train RMSE: 1585.2312490281206
Epoch 28 , Loss: 135662.13587093353 , Train RMSE: 1581.6246959596963
Epoch 29 , Loss: 136672.08658504486 , Train RMSE: 1614.315230221001
Epoch 30 , Loss: 135700.32126140594 , Train RMSE: 1597.3737556280234
Epoch 31 , Loss: 135957.56529426575 , Train RMSE: 1596.3679807737378
Epoch 32 , Loss: 134583.56739616394 , Train RMSE: 1587.0280637899189
Epoch 33 , Loss: 135326.827354908 , Train RMSE: 1591.5092524051054
Epoch 34 , Loss: 135153.64213085175 , Train RMSE: 1590.0666107652946
Epoch 35 , Loss: 135342.2779865265 , Train RMSE: 1591.7289568748172
Epoch 36 , Loss: 135400.0432252884 , Train RMSE: 1590.4244702240337
Epoch 37 , Loss: 134703.04191303253 , Train RMSE: 1601.050400215632
Epoch 38 , Loss: 134600.70218658447 , Train RMSE: 1600.04233336782
Epoch 39 , Loss: 132289.6436958313 , Train RMSE: 1577.6372579572665
Epoch 40 , Loss: 133466.6855688095 , Train RMSE: 1577.446884970752
Epoch 41 , Loss: 131655.8594083786 , Train RMSE: 1585.8161209585203
Epoch 42 , Loss: 136292.00493240356 , Train RMSE: 1660.6904347580864
Epoch 43 , Loss: 136652.46982002258 , Train RMSE: 1649.839339003298
Epoch 44 , Loss: 132930.31834506989 , Train RMSE: 1597.2847446080084
Epoch 45 , Loss: 133273.76307487488 , Train RMSE: 1592.7791420273231
Epoch 46 , Loss: 132579.80545520782 , Train RMSE: 1580.8767560381912
Epoch 47 , Loss: 133564.7254962921 , Train RMSE: 1581.3135346902707
Epoch 48 , Loss: 133045.3554649353 , Train RMSE: 1580.839293218191
Epoch 49 , Loss: 132694.45888137817 , Train RMSE: 1582.22851640972
Epoch 50 , Loss: 133083.2799062729 , Train RMSE: 1590.6058030577894
Epoch 51 , Loss: 133229.6532459259 , Train RMSE: 1588.8177407142225
Epoch 52 , Loss: 132959.54647541046 , Train RMSE: 1584.7946965776493
Epoch 53 , Loss: 134099.98691749573 , Train RMSE: 1611.9473256568804
Epoch 54 , Loss: 134143.85144519806 , Train RMSE: 1615.687746971473
Epoch 55 , Loss: 135524.4830236435 , Train RMSE: 1632.8304910564525
Epoch 56 , Loss: 134676.00113010406 , Train RMSE: 1624.667807362969
Epoch 57 , Loss: 136896.8270969391 , Train RMSE: 1665.5814198067062
Epoch 58 , Loss: 134066.7017660141 , Train RMSE: 1614.6209810329983
Epoch 59 , Loss: 134707.2498278618 , Train RMSE: 1631.130717272512
Epoch 60 , Loss: 134578.3506860733 , Train RMSE: 1628.3015170392314
Epoch 61 , Loss: 144793.24233055115 , Train RMSE: 1720.4462323143596
Epoch 62 , Loss: 142781.39167308807 , Train RMSE: 1713.366030007177
Epoch 63 , Loss: 143176.2244758606 , Train RMSE: 1716.0424753396303
Epoch 64 , Loss: 142674.04709243774 , Train RMSE: 1715.6117757118561
Epoch 65 , Loss: 140619.55202579498 , Train RMSE: 1703.1828854078033
Epoch 66 , Loss: 140301.33477210999 , Train RMSE: 1693.578449414302
Epoch 67 , Loss: 141380.97640895844 , Train RMSE: 1694.8181301254572
Epoch 68 , Loss: 141619.15636348724 , Train RMSE: 1698.3394430279654
Epoch 69 , Loss: 140384.5922794342 , Train RMSE: 1692.909207778386
Epoch 70 , Loss: 137402.02068042755 , Train RMSE: 1667.8541037459197
Epoch 71 , Loss: 135862.86074543 , Train RMSE: 1649.5742946461155
Epoch 72 , Loss: 134862.125207901 , Train RMSE: 1630.7914322028912
Epoch 73 , Loss: 134655.35104179382 , Train RMSE: 1626.5133385885285
Epoch 74 , Loss: 134539.80845165253 , Train RMSE: 1627.0078078385243
Epoch 75 , Loss: 134474.85406303406 , Train RMSE: 1624.286279441861
Epoch 76 , Loss: 136847.98800373077 , Train RMSE: 1657.3571684027834
Epoch 77 , Loss: 134648.3596830368 , Train RMSE: 1629.7124319113077
Epoch 78 , Loss: 134997.97085285187 , Train RMSE: 1629.4279690215253
Epoch 79 , Loss: 134835.80741882324 , Train RMSE: 1631.7272157005211
Epoch 80 , Loss: 135396.287191391 , Train RMSE: 1631.2660786069837
Epoch 81 , Loss: 136440.1431913376 , Train RMSE: 1630.1660566676446
Epoch 82 , Loss: 136291.9587173462 , Train RMSE: 1627.3570558316862
Epoch 83 , Loss: 135755.9950838089 , Train RMSE: 1622.1383712595882
Epoch 84 , Loss: 134253.2473640442 , Train RMSE: 1589.7546191713109
Epoch 85 , Loss: 135264.4176416397 , Train RMSE: 1599.3563063206439
Epoch 86 , Loss: 133766.0669670105 , Train RMSE: 1583.5418805333254
Epoch 87 , Loss: 133296.3004655838 , Train RMSE: 1578.02792482727
Epoch 88 , Loss: 132940.46115589142 , Train RMSE: 1584.7332339644597
Epoch 89 , Loss: 132818.35837554932 , Train RMSE: 1586.1668852578914
Epoch 90 , Loss: 133160.25147247314 , Train RMSE: 1578.741071451186
Epoch 91 , Loss: 132476.20457077026 , Train RMSE: 1570.8668506045058
Epoch 92 , Loss: 132967.11843299866 , Train RMSE: 1571.849095666212
Epoch 93 , Loss: 132984.84032392502 , Train RMSE: 1570.562985112782
Epoch 94 , Loss: 133002.9495382309 , Train RMSE: 1570.1056861755114
Epoch 95 , Loss: 133110.84215974808 , Train RMSE: 1574.8162553329744
Epoch 96 , Loss: 133076.8372335434 , Train RMSE: 1571.981063675
Epoch 97 , Loss: 132883.2351064682 , Train RMSE: 1569.6860757812378
Epoch 98 , Loss: 133199.34941196442 , Train RMSE: 1574.6460475755766
Epoch 99 , Loss: 133022.201795578 , Train RMSE: 1570.5672345422813
Validation MSE: 636.3565713744723
SPLIT 5 :
Epoch 0 , Loss: 165301.6389875412 , Train RMSE: 1543.6045835241357
Epoch 1 , Loss: 153166.98238658905 , Train RMSE: 1534.7936202270992
Epoch 2 , Loss: 151989.41808700562 , Train RMSE: 1532.6266998090387
Epoch 3 , Loss: 153382.3267364502 , Train RMSE: 1533.074910841225
Epoch 4 , Loss: 153351.26922035217 , Train RMSE: 1535.377689369667
Epoch 5 , Loss: 155170.8021183014 , Train RMSE: 1533.9744702151097
Epoch 6 , Loss: 151045.91622924805 , Train RMSE: 1532.014298199603
Epoch 7 , Loss: 150896.15311527252 , Train RMSE: 1531.9666217510169
Epoch 8 , Loss: 150174.8413362503 , Train RMSE: 1531.8419380646847
Epoch 9 , Loss: 150372.35407161713 , Train RMSE: 1531.2041119014718
Epoch 10 , Loss: 149956.16645622253 , Train RMSE: 1530.5417385325168
Epoch 11 , Loss: 149633.78826856613 , Train RMSE: 1530.118103559867
Epoch 12 , Loss: 149152.40175914764 , Train RMSE: 1529.6610333259032
Epoch 13 , Loss: 148959.48688602448 , Train RMSE: 1529.6019148032367
Epoch 14 , Loss: 149397.14738368988 , Train RMSE: 1529.772282507306
Epoch 15 , Loss: 149006.68344020844 , Train RMSE: 1528.5555773747053
Epoch 16 , Loss: 148927.68391990662 , Train RMSE: 1528.2191533286114
Epoch 17 , Loss: 148815.54065322876 , Train RMSE: 1527.9280823374788
Epoch 18 , Loss: 148790.70684862137 , Train RMSE: 1527.899479058005
Epoch 19 , Loss: 148652.75728273392 , Train RMSE: 1527.250211915522
Epoch 20 , Loss: 148544.22031402588 , Train RMSE: 1526.794042467843
Epoch 21 , Loss: 149072.3914527893 , Train RMSE: 1527.1944710962352
Epoch 22 , Loss: 148927.58607769012 , Train RMSE: 1526.5608041859214
Epoch 23 , Loss: 149099.86813545227 , Train RMSE: 1526.68907013944
Epoch 24 , Loss: 148641.6527452469 , Train RMSE: 1526.0823256841134
Epoch 25 , Loss: 148878.3501815796 , Train RMSE: 1525.6020135230897
Epoch 26 , Loss: 148265.57377052307 , Train RMSE: 1524.873043226054
Epoch 27 , Loss: 148302.09444332123 , Train RMSE: 1524.652070197878
Epoch 28 , Loss: 148078.20086812973 , Train RMSE: 1524.2461107430927
Epoch 29 , Loss: 148455.4166841507 , Train RMSE: 1524.636368577884
Epoch 30 , Loss: 148492.9834280014 , Train RMSE: 1524.196842128793
Epoch 31 , Loss: 147882.83511447906 , Train RMSE: 1523.4404958874725
Epoch 32 , Loss: 147944.02011585236 , Train RMSE: 1523.030574696776
Epoch 33 , Loss: 147642.73818588257 , Train RMSE: 1522.3969936326132
Epoch 34 , Loss: 148083.1738653183 , Train RMSE: 1522.6686516659593
Epoch 35 , Loss: 147791.3406944275 , Train RMSE: 1522.0475452225276
Epoch 36 , Loss: 147514.9809846878 , Train RMSE: 1521.507691018979
Epoch 37 , Loss: 147457.4848165512 , Train RMSE: 1521.629138425478
Epoch 38 , Loss: 147357.1605644226 , Train RMSE: 1520.9333704331855
Epoch 39 , Loss: 147153.9740304947 , Train RMSE: 1520.714012397631
Epoch 40 , Loss: 147592.41446781158 , Train RMSE: 1521.3937530492549
Epoch 41 , Loss: 147470.56601667404 , Train RMSE: 1520.2705376680915
Epoch 42 , Loss: 147291.23868465424 , Train RMSE: 1520.665414779685
Epoch 43 , Loss: 147184.83572769165 , Train RMSE: 1520.1869849107431
Epoch 44 , Loss: 147152.5245628357 , Train RMSE: 1519.4991647605327
Epoch 45 , Loss: 147092.558678627 , Train RMSE: 1519.5200471954954
Epoch 46 , Loss: 147000.8532857895 , Train RMSE: 1519.0780967829808
Epoch 47 , Loss: 146547.47847795486 , Train RMSE: 1518.385312424929
Epoch 48 , Loss: 146350.5971775055 , Train RMSE: 1517.8886401923796
Epoch 49 , Loss: 146319.89885663986 , Train RMSE: 1517.4378456689624
Epoch 50 , Loss: 146697.71240377426 , Train RMSE: 1517.0856948888168
Epoch 51 , Loss: 146536.07935857773 , Train RMSE: 1516.9502110788862
Epoch 52 , Loss: 146434.97538232803 , Train RMSE: 1516.7121648946334
Epoch 53 , Loss: 146515.19668006897 , Train RMSE: 1516.3704453022392
Epoch 54 , Loss: 146942.5150680542 , Train RMSE: 1516.694566120916
Epoch 55 , Loss: 146479.4742951393 , Train RMSE: 1515.7924739229452
Epoch 56 , Loss: 146866.9710559845 , Train RMSE: 1515.6519684076775
Epoch 57 , Loss: 146670.0061416626 , Train RMSE: 1515.6139118970614
Epoch 58 , Loss: 146694.90527963638 , Train RMSE: 1515.1910244716214
Epoch 59 , Loss: 146755.6991186142 , Train RMSE: 1515.1146808328633
Epoch 60 , Loss: 146339.49671268463 , Train RMSE: 1514.6573093308145
Epoch 61 , Loss: 145914.34958934784 , Train RMSE: 1514.2187832734983
Epoch 62 , Loss: 146195.50868034363 , Train RMSE: 1513.743900353134
Epoch 63 , Loss: 146684.77969932556 , Train RMSE: 1514.0843376755104
Epoch 64 , Loss: 146306.36186408997 , Train RMSE: 1513.5605540928866
Epoch 65 , Loss: 146319.14402747154 , Train RMSE: 1513.0131335012538
Epoch 66 , Loss: 146027.68187618256 , Train RMSE: 1512.6395922753848
Epoch 67 , Loss: 146348.2565727234 , Train RMSE: 1512.769536724488
Epoch 68 , Loss: 146228.3824596405 , Train RMSE: 1512.405832794121
Epoch 69 , Loss: 146406.96658706665 , Train RMSE: 1512.430042105054
Epoch 70 , Loss: 146269.5499343872 , Train RMSE: 1512.2482392035472
Epoch 71 , Loss: 146146.47835063934 , Train RMSE: 1511.7013183625268
Epoch 72 , Loss: 146258.88388061523 , Train RMSE: 1511.4751365594643
Epoch 73 , Loss: 145889.04223537445 , Train RMSE: 1511.1848377994381
Epoch 74 , Loss: 145659.52145671844 , Train RMSE: 1510.768763419305
Epoch 75 , Loss: 145555.0877122879 , Train RMSE: 1510.5201946337809
Epoch 76 , Loss: 145497.80428171158 , Train RMSE: 1509.9702545005841
Epoch 77 , Loss: 145914.2471165657 , Train RMSE: 1510.245092804744
Epoch 78 , Loss: 145941.3837146759 , Train RMSE: 1510.287243122594
Epoch 79 , Loss: 147391.61669540405 , Train RMSE: 1515.3153536752009
Epoch 80 , Loss: 147367.11747074127 , Train RMSE: 1513.6827500277393
Epoch 81 , Loss: 146417.38381576538 , Train RMSE: 1511.828835599568
Epoch 82 , Loss: 147310.31017279625 , Train RMSE: 1512.3749705634873
Epoch 83 , Loss: 146044.30707359314 , Train RMSE: 1512.3600879766338
Epoch 84 , Loss: 145684.59380340576 , Train RMSE: 1510.4094536560754
Epoch 85 , Loss: 145976.1914615631 , Train RMSE: 1510.7446198129776
Epoch 86 , Loss: 145811.79034805298 , Train RMSE: 1508.5870727754195
Epoch 87 , Loss: 145950.4449415207 , Train RMSE: 1508.4720345756987
Epoch 88 , Loss: 145913.97814941406 , Train RMSE: 1509.0108965077575
Epoch 89 , Loss: 146346.44422769547 , Train RMSE: 1510.0083960026384
Epoch 90 , Loss: 145798.00806951523 , Train RMSE: 1507.494204584877
Epoch 91 , Loss: 145738.14912366867 , Train RMSE: 1509.0672582007144
Epoch 92 , Loss: 145895.503323555 , Train RMSE: 1507.968267468605
Epoch 93 , Loss: 145978.94840717316 , Train RMSE: 1507.0653118802643
Epoch 94 , Loss: 146038.23714494705 , Train RMSE: 1509.0538106248796
Epoch 95 , Loss: 145481.9156551361 , Train RMSE: 1508.57927253465
Epoch 96 , Loss: 145921.5199432373 , Train RMSE: 1507.4964131511756
Epoch 97 , Loss: 146041.02560806274 , Train RMSE: 1507.662313251798
Epoch 98 , Loss: 146255.461892128 , Train RMSE: 1507.7284654766363
Epoch 99 , Loss: 145947.90774154663 , Train RMSE: 1506.7634813702632
Validation MSE: 289.218393851035
lr: 0.1
Average validation MSE: 1168.9078740660364
*********** CURRENT LEARNING RATE : 1 ***********
SPLIT 1 :
Epoch 0 , Loss: 24235.486366271973 , Train RMSE: 579.9832962933765
Epoch 1 , Loss: 20256.452339172363 , Train RMSE: 402.0470024748964
Epoch 2 , Loss: 19005.407188415527 , Train RMSE: 386.0562903299664
Epoch 3 , Loss: 17849.885772705078 , Train RMSE: 373.3969227375402
Epoch 4 , Loss: 16739.64178466797 , Train RMSE: 362.3203567928439
Epoch 5 , Loss: 15814.007591247559 , Train RMSE: 354.6920122504829
Epoch 6 , Loss: 15041.090576171875 , Train RMSE: 349.8807150757564
Epoch 7 , Loss: 14410.505638122559 , Train RMSE: 347.0039837211674
Epoch 8 , Loss: 13792.02507019043 , Train RMSE: 344.1940236631773
Epoch 9 , Loss: 13487.501434326172 , Train RMSE: 347.82034836493966
Epoch 10 , Loss: 13133.761074066162 , Train RMSE: 348.97996775336986
Epoch 11 , Loss: 12908.154209136963 , Train RMSE: 350.9808715678061
Epoch 12 , Loss: 12745.773971557617 , Train RMSE: 353.16542701939596
Epoch 13 , Loss: 12631.1097946167 , Train RMSE: 355.34689065317104
Epoch 14 , Loss: 12552.848718643188 , Train RMSE: 357.3868018025022
Epoch 15 , Loss: 12503.826124191284 , Train RMSE: 359.1583527733361
Epoch 16 , Loss: 12476.30115032196 , Train RMSE: 360.55365375756367
Epoch 17 , Loss: 12460.66750240326 , Train RMSE: 361.6224246844677
Epoch 18 , Loss: 12451.268798828125 , Train RMSE: 362.45555566859514
Epoch 19 , Loss: 12445.445735931396 , Train RMSE: 363.1092531869811
Epoch 20 , Loss: 12441.781385421753 , Train RMSE: 363.6217523624482
Epoch 21 , Loss: 12439.448600769043 , Train RMSE: 364.0223365266072
Epoch 22 , Loss: 12437.945503234863 , Train RMSE: 364.3343596895571
Epoch 23 , Loss: 12436.965044021606 , Train RMSE: 364.5766278105956
Epoch 24 , Loss: 12436.316701889038 , Train RMSE: 364.76425226288245
Epoch 25 , Loss: 12435.88189125061 , Train RMSE: 364.90925241612155
Epoch 26 , Loss: 12435.58477973938 , Train RMSE: 365.0211038978694
Epoch 27 , Loss: 12435.378747940063 , Train RMSE: 365.1072478417266
Epoch 28 , Loss: 12435.234338760376 , Train RMSE: 365.17354971356303
Epoch 29 , Loss: 12435.130416870117 , Train RMSE: 365.2245270212989
Epoch 30 , Loss: 12435.056095123291 , Train RMSE: 365.2636557920885
Epoch 31 , Loss: 12435.00145149231 , Train RMSE: 365.29371339207927
Epoch 32 , Loss: 12434.960681915283 , Train RMSE: 365.31677762742004
Epoch 33 , Loss: 12434.93137550354 , Train RMSE: 365.3344827786553
Epoch 34 , Loss: 12434.909154891968 , Train RMSE: 365.3480837370842
Epoch 35 , Loss: 12434.891767501831 , Train RMSE: 365.35850174185634
Epoch 36 , Loss: 12434.87963294983 , Train RMSE: 365.36651561405176
Epoch 37 , Loss: 12434.869607925415 , Train RMSE: 365.3726545550851
Epoch 38 , Loss: 12434.862312316895 , Train RMSE: 365.37734564596144
Epoch 39 , Loss: 12434.856893539429 , Train RMSE: 365.38094104479904
Epoch 40 , Loss: 12434.852800369263 , Train RMSE: 365.3837181087879
Epoch 41 , Loss: 12434.849060058594 , Train RMSE: 365.3858689433466
Epoch 42 , Loss: 12434.846906661987 , Train RMSE: 365.3875118191636
Epoch 43 , Loss: 12434.844497680664 , Train RMSE: 365.3887558451322
Epoch 44 , Loss: 12434.843389511108 , Train RMSE: 365.38971263888425
Epoch 45 , Loss: 12434.842100143433 , Train RMSE: 365.39043580714724
Epoch 46 , Loss: 12434.840885162354 , Train RMSE: 365.3909841155425
Epoch 47 , Loss: 12434.840196609497 , Train RMSE: 365.3913902849314
Epoch 48 , Loss: 12434.839796066284 , Train RMSE: 365.3916889704461
Epoch 49 , Loss: 12434.839931488037 , Train RMSE: 365.39190847676895
Epoch 50 , Loss: 12434.839778900146 , Train RMSE: 365.39208203204447
Epoch 51 , Loss: 12434.839214324951 , Train RMSE: 365.3922181645481
Epoch 52 , Loss: 12434.838947296143 , Train RMSE: 365.3923241699777
Epoch 53 , Loss: 12434.838836669922 , Train RMSE: 365.3924019034655
Epoch 54 , Loss: 12434.838623046875 , Train RMSE: 365.39246386855604
Epoch 55 , Loss: 12434.838562011719 , Train RMSE: 365.392511644847
Epoch 56 , Loss: 12434.8387966156 , Train RMSE: 365.3925507209029
Epoch 57 , Loss: 12434.838771820068 , Train RMSE: 365.39257935580315
Epoch 58 , Loss: 12434.838542938232 , Train RMSE: 365.39259449018533
Epoch 59 , Loss: 12434.838472366333 , Train RMSE: 365.3926042592161
Epoch 60 , Loss: 12434.838474273682 , Train RMSE: 365.3926140282492
Epoch 61 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 62 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 63 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 64 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 65 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 66 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 67 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 68 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 69 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 70 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 71 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 72 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 73 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 74 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 75 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 76 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 77 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 78 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 79 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 80 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 81 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 82 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 83 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 84 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 85 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 86 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 87 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 88 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 89 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 90 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 91 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 92 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 93 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 94 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 95 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 96 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 97 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 98 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Epoch 99 , Loss: 12434.838325500488 , Train RMSE: 365.3926237918732
Validation MSE: 3217.0081725595505
SPLIT 2 :
Epoch 0 , Loss: 99201.18235015869 , Train RMSE: 2227.1672280671487
Epoch 1 , Loss: 96081.76500701904 , Train RMSE: 2233.8830747942343
Epoch 2 , Loss: 93601.30715179443 , Train RMSE: 2240.918159203218
Epoch 3 , Loss: 91699.1238937378 , Train RMSE: 2247.856969739748
Epoch 4 , Loss: 90283.78225708008 , Train RMSE: 2254.415939328251
Epoch 5 , Loss: 89256.98134994507 , Train RMSE: 2260.3593605835067
Epoch 6 , Loss: 88546.11826324463 , Train RMSE: 2265.4782237935756
Epoch 7 , Loss: 88081.09364700317 , Train RMSE: 2269.7131726308407
Epoch 8 , Loss: 87789.92905807495 , Train RMSE: 2273.126655927638
Epoch 9 , Loss: 87612.34494018555 , Train RMSE: 2275.8187670384023
Epoch 10 , Loss: 87506.01712417603 , Train RMSE: 2277.893792593941
Epoch 11 , Loss: 87443.11848068237 , Train RMSE: 2279.459151424906
Epoch 12 , Loss: 87405.98086547852 , Train RMSE: 2280.6209026550023
Epoch 13 , Loss: 87383.87327194214 , Train RMSE: 2281.4737345763115
Epoch 14 , Loss: 87370.51034164429 , Train RMSE: 2282.095185057053
Epoch 15 , Loss: 87362.27534866333 , Train RMSE: 2282.545672571168
Epoch 16 , Loss: 87357.0885810852 , Train RMSE: 2282.8709049421764
Epoch 17 , Loss: 87353.75127029419 , Train RMSE: 2283.1051201536925
Epoch 18 , Loss: 87351.55449295044 , Train RMSE: 2283.2733936710224
Epoch 19 , Loss: 87350.08666992188 , Train RMSE: 2283.3941120401882
Epoch 20 , Loss: 87349.09202957153 , Train RMSE: 2283.4806264757167
Epoch 21 , Loss: 87348.40260314941 , Train RMSE: 2283.542587413819
Epoch 22 , Loss: 87347.92902755737 , Train RMSE: 2283.5868823061405
Epoch 23 , Loss: 87347.59482192993 , Train RMSE: 2283.6185366053637
Epoch 24 , Loss: 87347.36094665527 , Train RMSE: 2283.6411800246287
Epoch 25 , Loss: 87347.19631195068 , Train RMSE: 2283.6573700761946
Epoch 26 , Loss: 87347.07761764526 , Train RMSE: 2283.668964764609
Epoch 27 , Loss: 87346.99333572388 , Train RMSE: 2283.677222290457
Epoch 28 , Loss: 87346.93252182007 , Train RMSE: 2283.6831303274207
Epoch 29 , Loss: 87346.89167404175 , Train RMSE: 2283.6873587600735
Epoch 30 , Loss: 87346.86380004883 , Train RMSE: 2283.690407758763
Epoch 31 , Loss: 87346.84029006958 , Train RMSE: 2283.6925668921795
Epoch 32 , Loss: 87346.82325363159 , Train RMSE: 2283.694079815601
Epoch 33 , Loss: 87346.81438064575 , Train RMSE: 2283.6951620657087
Epoch 34 , Loss: 87346.8042602539 , Train RMSE: 2283.6959285204493
Epoch 35 , Loss: 87346.79981613159 , Train RMSE: 2283.6964811207567
Epoch 36 , Loss: 87346.79586791992 , Train RMSE: 2283.696884268995
Epoch 37 , Loss: 87346.79280471802 , Train RMSE: 2283.6971842934863
Epoch 38 , Loss: 87346.7898979187 , Train RMSE: 2283.6973995328112
Epoch 39 , Loss: 87346.7883720398 , Train RMSE: 2283.697533776909
Epoch 40 , Loss: 87346.78705596924 , Train RMSE: 2283.697629219547
Epoch 41 , Loss: 87346.78701019287 , Train RMSE: 2283.6976925693853
Epoch 42 , Loss: 87346.78623199463 , Train RMSE: 2283.6977327261598
Epoch 43 , Loss: 87346.78643417358 , Train RMSE: 2283.697764215255
Epoch 44 , Loss: 87346.78653335571 , Train RMSE: 2283.697787243932
Epoch 45 , Loss: 87346.78562164307 , Train RMSE: 2283.6978028924636
Epoch 46 , Loss: 87346.78606033325 , Train RMSE: 2283.697818540997
Epoch 47 , Loss: 87346.78575515747 , Train RMSE: 2283.6978341895324
Epoch 48 , Loss: 87346.78533935547 , Train RMSE: 2283.697842597384
Epoch 49 , Loss: 87346.78482055664 , Train RMSE: 2283.697850421653
Epoch 50 , Loss: 87346.78519439697 , Train RMSE: 2283.697858245922
Epoch 51 , Loss: 87346.7854423523 , Train RMSE: 2283.697866070191
Epoch 52 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 53 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 54 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 55 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 56 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 57 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 58 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 59 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 60 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 61 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 62 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 63 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 64 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 65 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 66 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 67 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 68 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 69 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 70 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 71 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 72 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 73 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 74 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 75 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 76 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 77 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 78 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 79 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 80 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 81 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 82 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 83 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 84 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 85 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 86 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 87 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 88 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 89 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 90 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 91 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 92 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 93 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 94 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 95 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 96 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 97 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 98 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Epoch 99 , Loss: 87346.78549957275 , Train RMSE: 2283.6978666974146
Validation MSE: 1130.38599569927
SPLIT 3 :
Epoch 0 , Loss: 137157.4362411499 , Train RMSE: 1918.392887274745
Epoch 1 , Loss: 131442.375 , Train RMSE: 1925.9473777655674
Epoch 2 , Loss: 127883.52046203613 , Train RMSE: 1935.4214533022507
Epoch 3 , Loss: 125558.15369415283 , Train RMSE: 1944.248990784524
Epoch 4 , Loss: 124155.71323013306 , Train RMSE: 1951.7070845802018
Epoch 5 , Loss: 123378.13036727905 , Train RMSE: 1957.4479311958285
Epoch 6 , Loss: 122975.32355117798 , Train RMSE: 1961.61410092002
Epoch 7 , Loss: 122772.82904052734 , Train RMSE: 1964.5422514157397
Epoch 8 , Loss: 122671.61949157715 , Train RMSE: 1966.5474496081395
Epoch 9 , Loss: 122620.54926300049 , Train RMSE: 1967.8885351834322
Epoch 10 , Loss: 122594.2375164032 , Train RMSE: 1968.7687374807253
Epoch 11 , Loss: 122580.24715995789 , Train RMSE: 1969.338849619357
Epoch 12 , Loss: 122572.50256156921 , Train RMSE: 1969.70480410662
Epoch 13 , Loss: 122568.07320404053 , Train RMSE: 1969.9383742191167
Epoch 14 , Loss: 122565.45489311218 , Train RMSE: 1970.0868979393335
Epoch 15 , Loss: 122563.87489128113 , Train RMSE: 1970.1811089202547
Epoch 16 , Loss: 122562.8586139679 , Train RMSE: 1970.2407975697079
Epoch 17 , Loss: 122562.26800727844 , Train RMSE: 1970.2786493390427
Epoch 18 , Loss: 122561.79924964905 , Train RMSE: 1970.302525545279
Epoch 19 , Loss: 122561.46165847778 , Train RMSE: 1970.3176427536494
Epoch 20 , Loss: 122561.07691001892 , Train RMSE: 1970.3272832166094
Epoch 21 , Loss: 122560.99773597717 , Train RMSE: 1970.333618949853
Epoch 22 , Loss: 122561.52828979492 , Train RMSE: 1970.3379626857532
Epoch 23 , Loss: 122560.43552398682 , Train RMSE: 1970.3394114062714
Epoch 24 , Loss: 122561.20282554626 , Train RMSE: 1970.3416570595987
Epoch 25 , Loss: 122559.21777915955 , Train RMSE: 1970.3414855303913
Epoch 26 , Loss: 122560.70182037354 , Train RMSE: 1970.3440132179078
Epoch 27 , Loss: 119840.9573841095 , Train RMSE: 1900.0425241827115
Epoch 28 , Loss: 122554.92658805847 , Train RMSE: 1970.8331897199962
Epoch 29 , Loss: 122557.1186504364 , Train RMSE: 1970.6519651162241
Epoch 30 , Loss: 122558.62698936462 , Train RMSE: 1970.538059362633
Epoch 31 , Loss: 122559.62565803528 , Train RMSE: 1970.4662906708725
Epoch 32 , Loss: 122560.27226638794 , Train RMSE: 1970.4210384817918
Epoch 33 , Loss: 122560.69017791748 , Train RMSE: 1970.392490893958
Epoch 34 , Loss: 122560.95676994324 , Train RMSE: 1970.3744468887285
Epoch 35 , Loss: 122561.12285232544 , Train RMSE: 1970.3630129401217
Epoch 36 , Loss: 122561.23431968689 , Train RMSE: 1970.35582352101
Epoch 37 , Loss: 122561.29797554016 , Train RMSE: 1970.351302845359
Epoch 38 , Loss: 122561.34528160095 , Train RMSE: 1970.3484891496203
Epoch 39 , Loss: 122561.37031364441 , Train RMSE: 1970.3466813888203
Epoch 40 , Loss: 122561.38812446594 , Train RMSE: 1970.3455381680549
Epoch 41 , Loss: 122561.39938354492 , Train RMSE: 1970.3447888147384
Epoch 42 , Loss: 122561.40717506409 , Train RMSE: 1970.3443300086258
Epoch 43 , Loss: 122561.41074943542 , Train RMSE: 1970.3440580088775
Epoch 44 , Loss: 122561.41335487366 , Train RMSE: 1970.3438813549285
Epoch 45 , Loss: 122561.41485214233 , Train RMSE: 1970.3437636081446
Epoch 46 , Loss: 122561.41525650024 , Train RMSE: 1970.3436642131999
Epoch 47 , Loss: 122561.41608428955 , Train RMSE: 1970.3436048390727
Epoch 48 , Loss: 122561.41678619385 , Train RMSE: 1970.343583062686
Epoch 49 , Loss: 122561.4174861908 , Train RMSE: 1970.3435750554345
Epoch 50 , Loss: 122561.4182472229 , Train RMSE: 1970.343567029502
Epoch 51 , Loss: 122561.4185180664 , Train RMSE: 1970.3435590035695
Epoch 52 , Loss: 122561.4178943634 , Train RMSE: 1970.343550977638
Epoch 53 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 54 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 55 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 56 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 57 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 58 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 59 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 60 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 61 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 62 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 63 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 64 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 65 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 66 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 67 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 68 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 69 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 70 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 71 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 72 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 73 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 74 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 75 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 76 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 77 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 78 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 79 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 80 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 81 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 82 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 83 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 84 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 85 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 86 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 87 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 88 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 89 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 90 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 91 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 92 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 93 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 94 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 95 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 96 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 97 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 98 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Epoch 99 , Loss: 122561.41800117493 , Train RMSE: 1970.343551009548
Validation MSE: 1097.2308549301952
SPLIT 4 :
Epoch 0 , Loss: 173423.7348022461 , Train RMSE: 1737.1680769606905
Epoch 1 , Loss: 166401.8028564453 , Train RMSE: 1748.9215669213074
Epoch 2 , Loss: 162202.7147064209 , Train RMSE: 1760.5838475775226
Epoch 3 , Loss: 159993.73078918457 , Train RMSE: 1770.2414400864918
Epoch 4 , Loss: 158378.84259033203 , Train RMSE: 1774.2683729967093
Epoch 5 , Loss: 158985.8708305359 , Train RMSE: 1781.7672633751224
Epoch 6 , Loss: 158324.4240989685 , Train RMSE: 1784.160003552456
Epoch 7 , Loss: 158235.91749572754 , Train RMSE: 1785.9247011548619
Epoch 8 , Loss: 158905.534740448 , Train RMSE: 1788.792149575603
Epoch 9 , Loss: 158168.46074676514 , Train RMSE: 1787.877991104177
Epoch 10 , Loss: 158163.07460784912 , Train RMSE: 1788.0880592543085
Epoch 11 , Loss: 158160.1572113037 , Train RMSE: 1788.208149818616
Epoch 12 , Loss: 158158.54028320312 , Train RMSE: 1788.2767005012697
Epoch 13 , Loss: 158157.63902282715 , Train RMSE: 1788.3158381137825
Epoch 14 , Loss: 158157.1254119873 , Train RMSE: 1788.3380828951692
Epoch 15 , Loss: 158156.8380355835 , Train RMSE: 1788.3507752299204
Epoch 16 , Loss: 158156.67401885986 , Train RMSE: 1788.3579998928367
Epoch 17 , Loss: 158156.5819015503 , Train RMSE: 1788.3620949502194
Epoch 18 , Loss: 158156.53239440918 , Train RMSE: 1788.3644849926468
Epoch 19 , Loss: 158156.50004196167 , Train RMSE: 1788.3658692850124
Epoch 20 , Loss: 158156.4803237915 , Train RMSE: 1788.366595050347
Epoch 21 , Loss: 158156.46980667114 , Train RMSE: 1788.3670078436369
Epoch 22 , Loss: 158156.468875885 , Train RMSE: 1788.3672535739267
Epoch 23 , Loss: 158156.461769104 , Train RMSE: 1788.3674023227065
Epoch 24 , Loss: 158156.46098327637 , Train RMSE: 1788.3674671293625
Epoch 25 , Loss: 158156.45944595337 , Train RMSE: 1788.3675133761237
Epoch 26 , Loss: 158156.45945739746 , Train RMSE: 1788.3675220072828
Epoch 27 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 28 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 29 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 30 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 31 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 32 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 33 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 34 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 35 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 36 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 37 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 38 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 39 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 40 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 41 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 42 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 43 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 44 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 45 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 46 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 47 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 48 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 49 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 50 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 51 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 52 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 53 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 54 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 55 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 56 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 57 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 58 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 59 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 60 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 61 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 62 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 63 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 64 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 65 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 66 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 67 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 68 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 69 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 70 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 71 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 72 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 73 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 74 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 75 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 76 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 77 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 78 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 79 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 80 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 81 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 82 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 83 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 84 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 85 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 86 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 87 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 88 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 89 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 90 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 91 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 92 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 93 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 94 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 95 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 96 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 97 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 98 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Epoch 99 , Loss: 158156.4588356018 , Train RMSE: 1788.367529329215
Validation MSE: 209.21438371519102
SPLIT 5 :
Epoch 0 , Loss: 190937.74194335938 , Train RMSE: 1560.7174857701782
Epoch 1 , Loss: 176330.73056793213 , Train RMSE: 1575.0806208798392
Epoch 2 , Loss: 169681.4220237732 , Train RMSE: 1590.3734116842356
Epoch 3 , Loss: 167539.0131072998 , Train RMSE: 1600.805044708447
Epoch 4 , Loss: 167008.33428955078 , Train RMSE: 1606.499351805686
Epoch 5 , Loss: 166900.52795028687 , Train RMSE: 1609.280383032715
Epoch 6 , Loss: 166884.5757331848 , Train RMSE: 1610.535191733652
Epoch 7 , Loss: 166884.0710029602 , Train RMSE: 1611.0766314516723
Epoch 8 , Loss: 166885.02262115479 , Train RMSE: 1611.305188041782
Epoch 9 , Loss: 166885.6272277832 , Train RMSE: 1611.4007640141097
Epoch 10 , Loss: 166885.91590499878 , Train RMSE: 1611.4405362046195
Epoch 11 , Loss: 166886.03873443604 , Train RMSE: 1611.4570475478397
Epoch 12 , Loss: 166886.09368515015 , Train RMSE: 1611.4639287005898
Epoch 13 , Loss: 166886.11750793457 , Train RMSE: 1611.4667986999252
Epoch 14 , Loss: 166886.12603759766 , Train RMSE: 1611.467980074443
Epoch 15 , Loss: 166886.12999343872 , Train RMSE: 1611.468460879845
Epoch 16 , Loss: 166886.13124847412 , Train RMSE: 1611.4686586759624
Epoch 17 , Loss: 166886.13166809082 , Train RMSE: 1611.4687542167387
Epoch 18 , Loss: 166886.13189315796 , Train RMSE: 1611.4687851633114
Epoch 19 , Loss: 166886.1314277649 , Train RMSE: 1611.4687930480502
Epoch 20 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 21 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 22 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 23 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 24 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 25 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 26 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 27 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 28 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 29 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 30 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 31 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 32 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 33 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 34 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 35 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 36 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 37 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 38 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 39 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 40 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 41 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 42 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 43 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 44 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 45 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 46 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 47 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 48 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 49 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 50 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 51 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 52 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 53 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 54 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 55 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 56 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 57 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 58 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 59 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 60 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 61 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 62 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 63 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 64 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 65 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 66 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 67 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 68 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 69 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 70 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 71 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 72 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 73 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 74 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 75 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 76 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 77 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 78 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 79 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 80 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 81 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 82 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 83 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 84 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 85 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 86 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 87 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 88 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 89 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 90 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 91 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 92 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 93 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 94 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 95 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 96 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 97 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 98 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Epoch 99 , Loss: 166886.1308517456 , Train RMSE: 1611.4687935419101
Validation MSE: 343.4550871671335
lr: 1
Average validation MSE: 1199.4588988142682
Train the model with all counties data and test with all counties data
#Train the model with all data
cv = 5
EPOCHS = 100
lr = 0.001
HIDDEN_DIM = 4
train_data = pd.read_csv("training_data.csv",dtype=object,index_col=False)
train_data['time']= pd.to_datetime(train_data['time'])
headers = ['dailyCases_chng_t-1', 'dailyCases_chng_t-2',
'outpatient_cli_t-1', 'outpatient_cli_t-2', 'hospitalAdm_claim_t-1',
'hospitalAdm_claim_t-2', 'googleSym_sum_t-1', 'googleSym_sum_t-2',
'doctorVisits_t-1', 'doctorVisits_t-2','dailyCases_t']
for column_name in headers:
train_data[column_name] = train_data[column_name].astype(float)
train_data[column_name] = train_data[column_name]-train_data[column_name].mean()
y = train_data["dailyCases_t"]
y = np.array(y,dtype=np.float32)
X = train_data.drop(columns=['dailyCases_t','time','fips'])
X = np.array(X,dtype=np.float32)
X = X.reshape(X.shape[0], 1, X.shape[1])
LEARNING_RATE = lr
training_set = TensorDataset(torch.from_numpy(X),
torch.from_numpy(y))
training_dataloader = torch.utils.data.DataLoader(training_set, batch_size=16, shuffle=False)
model = LSTM(input_dim=10, hidden_dim=HIDDEN_DIM,n_layers=1)
# optimizer = torch.optim.Adam(model.parameters(), lr=LEARNING_RATE)
optimizer = torch.optim.SGD(model.parameters(), lr=LEARNING_RATE)
loss_fn = nn.MSELoss()
model.train()
train_MSEs = []
for epoch in range(EPOCHS):
epoch_loss = 0.0
train_prediction = []
train_ground_truth = []
for inputs, labels in training_dataloader:
optimizer.zero_grad()
output = model(inputs)
loss = torch.sqrt(loss_fn(output, torch.reshape(labels,output.shape)))
loss.backward()
optimizer.step()
epoch_loss += loss.item()
train_prediction = np.concatenate((train_prediction,torch.flatten(output).detach().numpy()))
train_ground_truth = np.concatenate((train_ground_truth,labels.detach().numpy()))
train_mse = np.sqrt(mean_squared_error(train_prediction,train_ground_truth))
train_MSEs.append(train_mse)
print("Epoch", epoch, ", Loss:", epoch_loss, ", Train RMSE:", train_mse)
#Test on all county
test_data = pd.read_csv("test_data.csv",dtype=object,index_col=False)
mean_dailyCases = test_data['dailyCases_t'].astype(float).mean()
for column_name in headers:
test_data[column_name] = test_data[column_name].astype(float)
test_data[column_name] = test_data[column_name]-test_data[column_name].mean()
y_test = test_data["dailyCases_t"]
y_test = np.array(y_test,dtype=np.float32)
X_test = test_data.drop(columns=['dailyCases_t','time','fips'])
X_test = np.array(X_test,dtype=np.float32)
X_test = X_test.reshape(X_test.shape[0], 1, X_test.shape[1])
test_set = TensorDataset(torch.from_numpy(X_test),
torch.from_numpy(y_test))
test_dataloader = torch.utils.data.DataLoader(test_set, batch_size=16, shuffle=False)
model.eval()
test_prediction=[]
test_ground_truth =[]
test_mse = 0.0
with torch.set_grad_enabled(False):
for inputs, labels in test_dataloader:
output = model(inputs)
test_prediction = np.concatenate((test_prediction,torch.flatten(output).detach().numpy()))
test_ground_truth = np.concatenate((test_ground_truth,labels.detach().numpy()))
test_mse = np.sqrt(mean_squared_error(test_prediction,test_ground_truth))
print("Test RMSE: ", test_mse)
plt.plot(np.arange(len(test_ground_truth)),test_ground_truth+mean_dailyCases,label="ground truth")
plt.plot(np.arange(len(test_prediction)),test_prediction+mean_dailyCases,label="prediction",)
plt.legend()
plt.title("Predict COVID19 cases 9/1/2021-10/1/2021 for 15 counties")
# plt.xlabel("Day")
plt.ylabel("Daily Cases")
plt.savefig("lstm_test_prediction.png")
plt.show()
Epoch 0 , Loss: 225610.56871795654 , Train RMSE: 1433.0296948814046
Epoch 1 , Loss: 225547.68769073486 , Train RMSE: 1433.022587415577
Epoch 2 , Loss: 225470.9746170044 , Train RMSE: 1433.0103488916436
Epoch 3 , Loss: 225370.44682312012 , Train RMSE: 1432.9890076792683
Epoch 4 , Loss: 225224.93056488037 , Train RMSE: 1432.9491128835057
Epoch 5 , Loss: 224990.0918045044 , Train RMSE: 1432.8688197172567
Epoch 6 , Loss: 224564.1753768921 , Train RMSE: 1432.690856202207
Epoch 7 , Loss: 223722.65032958984 , Train RMSE: 1432.2817970108756
Epoch 8 , Loss: 222006.21600341797 , Train RMSE: 1431.3864655494112
Epoch 9 , Loss: 218508.50887298584 , Train RMSE: 1429.5643891954967
Epoch 10 , Loss: 211482.98948669434 , Train RMSE: 1426.1970288809707
Epoch 11 , Loss: 198051.72396087646 , Train RMSE: 1421.2191477363617
Epoch 12 , Loss: 177128.5846824646 , Train RMSE: 1418.0942459106
Epoch 13 , Loss: 164468.40920639038 , Train RMSE: 1422.9167778446708
Epoch 14 , Loss: 162475.31298160553 , Train RMSE: 1421.2973850236579
Epoch 15 , Loss: 161736.56945228577 , Train RMSE: 1419.1080412729932
Epoch 16 , Loss: 161767.3394522667 , Train RMSE: 1414.802322395599
Epoch 17 , Loss: 159592.77378463745 , Train RMSE: 1411.058078541616
Epoch 18 , Loss: 159256.46481275558 , Train RMSE: 1409.8758234408997
Epoch 19 , Loss: 158573.11807394028 , Train RMSE: 1409.2311277440072
Epoch 20 , Loss: 158624.31598186493 , Train RMSE: 1409.7312330332375
Epoch 21 , Loss: 160630.764585495 , Train RMSE: 1409.6848946154826
Epoch 22 , Loss: 160193.59286785126 , Train RMSE: 1415.0475656234796
Epoch 23 , Loss: 159234.4496717453 , Train RMSE: 1410.0690422450707
Epoch 24 , Loss: 158721.14774274826 , Train RMSE: 1410.8295504322432
Epoch 25 , Loss: 158268.23161888123 , Train RMSE: 1410.0918485623747
Epoch 26 , Loss: 158373.82094812393 , Train RMSE: 1410.1508297409591
Epoch 27 , Loss: 158150.12008333206 , Train RMSE: 1409.6110058354582
Epoch 28 , Loss: 158082.3663663864 , Train RMSE: 1408.5929866795793
Epoch 29 , Loss: 157549.21115112305 , Train RMSE: 1408.2251887850202
Epoch 30 , Loss: 159593.82698440552 , Train RMSE: 1409.9128719813375
Epoch 31 , Loss: 160470.98340511322 , Train RMSE: 1420.5441219498045
Epoch 32 , Loss: 158249.98065662384 , Train RMSE: 1411.0549104563004
Epoch 33 , Loss: 158031.29702186584 , Train RMSE: 1409.8130267036297
Epoch 34 , Loss: 157839.07623291016 , Train RMSE: 1411.8984479471658
Epoch 35 , Loss: 157468.46301555634 , Train RMSE: 1411.2878523101795
Epoch 36 , Loss: 157279.2987394333 , Train RMSE: 1410.5736116272055
Epoch 37 , Loss: 157537.01859283447 , Train RMSE: 1409.1304336582834
Epoch 38 , Loss: 158024.84571361542 , Train RMSE: 1409.6642140889933
Epoch 39 , Loss: 157791.8976392746 , Train RMSE: 1406.4718656271036
Epoch 40 , Loss: 157139.85357570648 , Train RMSE: 1406.2813493614904
Epoch 41 , Loss: 157148.9574213028 , Train RMSE: 1406.361398585373
Epoch 42 , Loss: 157499.53990840912 , Train RMSE: 1405.9486789494836
Epoch 43 , Loss: 157251.13499832153 , Train RMSE: 1404.9474550816167
Epoch 44 , Loss: 157114.54120731354 , Train RMSE: 1405.3447196437703
Epoch 45 , Loss: 157538.54975032806 , Train RMSE: 1406.701238397438
Epoch 46 , Loss: 157534.0147447586 , Train RMSE: 1406.5034743441515
Epoch 47 , Loss: 157611.0837879181 , Train RMSE: 1406.5083725438135
Epoch 48 , Loss: 157702.06774044037 , Train RMSE: 1405.5745997279253
Epoch 49 , Loss: 157413.08296108246 , Train RMSE: 1405.8562521636522
Epoch 50 , Loss: 157351.37273406982 , Train RMSE: 1406.0489637639726
Epoch 51 , Loss: 157450.34379673004 , Train RMSE: 1405.7378537950963
Epoch 52 , Loss: 157283.59987068176 , Train RMSE: 1405.372546223269
Epoch 53 , Loss: 157059.25930023193 , Train RMSE: 1405.0149458429814
Epoch 54 , Loss: 156865.33944177628 , Train RMSE: 1404.0000395656934
Epoch 55 , Loss: 157065.51685714722 , Train RMSE: 1404.7008637578672
Epoch 56 , Loss: 156791.1568055153 , Train RMSE: 1404.717490625405
Epoch 57 , Loss: 157140.03999567032 , Train RMSE: 1403.8769300486144
Epoch 58 , Loss: 156862.97569608688 , Train RMSE: 1403.844280154902
Epoch 59 , Loss: 156576.62638902664 , Train RMSE: 1403.6256282213446
Epoch 60 , Loss: 156490.22493314743 , Train RMSE: 1403.6296235199165
Epoch 61 , Loss: 156599.2053117752 , Train RMSE: 1403.9720679070222
Epoch 62 , Loss: 156449.47619485855 , Train RMSE: 1403.7460311573611
Epoch 63 , Loss: 156390.41976165771 , Train RMSE: 1403.84843471877
Epoch 64 , Loss: 156240.3620429039 , Train RMSE: 1403.775265238047
Epoch 65 , Loss: 156062.50327444077 , Train RMSE: 1403.3119276902821
Epoch 66 , Loss: 156111.19936656952 , Train RMSE: 1403.548734355851
Epoch 67 , Loss: 155889.41016292572 , Train RMSE: 1403.4398279372429
Epoch 68 , Loss: 155735.88526916504 , Train RMSE: 1403.7161224602137
Epoch 69 , Loss: 155530.56993818283 , Train RMSE: 1403.3915941008338
Epoch 70 , Loss: 155631.08728647232 , Train RMSE: 1403.520917296445
Epoch 71 , Loss: 155566.4062962532 , Train RMSE: 1403.704848601309
Epoch 72 , Loss: 155658.34515857697 , Train RMSE: 1403.9026516481172
Epoch 73 , Loss: 155388.03846788406 , Train RMSE: 1403.7533295651801
Epoch 74 , Loss: 155616.06320905685 , Train RMSE: 1403.96791272936
Epoch 75 , Loss: 155433.3652396202 , Train RMSE: 1403.6282070772384
Epoch 76 , Loss: 155130.84997558594 , Train RMSE: 1403.1280770787382
Epoch 77 , Loss: 155093.37040424347 , Train RMSE: 1403.344042619623
Epoch 78 , Loss: 155100.19874954224 , Train RMSE: 1403.5353649733902
Epoch 79 , Loss: 155362.70774316788 , Train RMSE: 1403.503964845265
Epoch 80 , Loss: 155096.63173675537 , Train RMSE: 1403.0374602742734
Epoch 81 , Loss: 154947.12256240845 , Train RMSE: 1403.0621907270624
Epoch 82 , Loss: 154882.49226999283 , Train RMSE: 1403.137509728262
Epoch 83 , Loss: 154790.16265439987 , Train RMSE: 1403.1561226142044
Epoch 84 , Loss: 154867.70603752136 , Train RMSE: 1403.327145516166
Epoch 85 , Loss: 154808.56974220276 , Train RMSE: 1402.7466984269934
Epoch 86 , Loss: 154832.20602750778 , Train RMSE: 1403.0865868163455
Epoch 87 , Loss: 154702.86727905273 , Train RMSE: 1402.913669540479
Epoch 88 , Loss: 154673.06339788437 , Train RMSE: 1402.9016074505482
Epoch 89 , Loss: 154893.18458127975 , Train RMSE: 1403.4959541201988
Epoch 90 , Loss: 154870.07267665863 , Train RMSE: 1403.5493840323536
Epoch 91 , Loss: 154788.43430948257 , Train RMSE: 1403.4505303674644
Epoch 92 , Loss: 155183.21197080612 , Train RMSE: 1403.9125113768448
Epoch 93 , Loss: 154555.03306770325 , Train RMSE: 1402.6434699472288
Epoch 94 , Loss: 154488.06871557236 , Train RMSE: 1402.8359310107103
Epoch 95 , Loss: 154420.09363269806 , Train RMSE: 1402.9605089658469
Epoch 96 , Loss: 154869.65060710907 , Train RMSE: 1403.6771783637334
Epoch 97 , Loss: 154728.0365524292 , Train RMSE: 1403.1836292738658
Epoch 98 , Loss: 154718.7209663391 , Train RMSE: 1403.4760440571015
Epoch 99 , Loss: 154654.91313934326 , Train RMSE: 1403.5354679644408
Test RMSE: 872.0604026096225
#Plot train MSE
plt.plot(np.arange(EPOCHS),train_MSEs)
plt.xlabel("Epoch")
plt.ylabel("Train RMSE")
plt.savefig("train_RMSE.png")
plt.show()
Test on a single county
#Test on single county
counties = ['06001', '06013', '06019', '06029', '06037', '06059', '06065',
'06067', '06071', '06073', '06075', '06077', '06081', '06085',
'06111']
for county in counties:
test_data = pd.read_csv("test_data.csv",dtype=object,index_col=False)
test_data = test_data[test_data["fips"]==county]
mean_dailyCases = test_data['dailyCases_t'].astype(float).mean()
for column_name in headers:
test_data[column_name] = test_data[column_name].astype(float)
test_data[column_name] = test_data[column_name]-test_data[column_name].mean()
y_test = test_data["dailyCases_t"]
y_test = np.array(y_test,dtype=np.float32)
X_test = test_data.drop(columns=['dailyCases_t','time','fips'])
X_test = np.array(X_test,dtype=np.float32)
X_test = X_test.reshape(X_test.shape[0], 1, X_test.shape[1])
test_set = TensorDataset(torch.from_numpy(X_test),
torch.from_numpy(y_test))
test_dataloader = torch.utils.data.DataLoader(test_set, batch_size=16, shuffle=False)
model.eval()
test_prediction=[]
test_ground_truth =[]
test_mse = 0.0
with torch.set_grad_enabled(False):
for inputs, labels in test_dataloader:
output = model(inputs)
test_prediction = np.concatenate((test_prediction,torch.flatten(output).detach().numpy()))
test_ground_truth = np.concatenate((test_ground_truth,labels.detach().numpy()))
test_mse = np.sqrt(mean_squared_error(test_prediction,test_ground_truth))
print("Test RMSE: ", test_mse)
plt.plot(np.arange(len(test_ground_truth)),test_ground_truth+mean_dailyCases,label="ground truth",c='r')
plt.plot(np.arange(len(test_prediction)),test_prediction+mean_dailyCases,label="prediction",)
plt.legend()
plt.xlabel("Day")
plt.ylabel("Daily Cases")
plt.title("county "+county)
plt.show()
# plt.savefig("lstm_test_prediction.png")
Test RMSE: 422.96855915421315
Test RMSE: 389.79688977191597
Test RMSE: 548.7047392266221
Test RMSE: 519.2485649450427
Test RMSE: 2165.628641457177
Test RMSE: 577.7467416936556
Test RMSE: 847.4010750524387
Test RMSE: 625.0115243391487
Test RMSE: 813.5223898441664
Test RMSE: 988.6604761160371
Test RMSE: 336.09873845254344
Test RMSE: 470.7531034449218
Test RMSE: 310.65657127066
Test RMSE: 452.3952963502877
Test RMSE: 351.9262260762237
Train the model with average county data
counties = [ '06019', '06029', '06037', '06059', '06065',
'06067', '06071', '06073', '06075', '06077', '06081', '06085']
df = pd.read_csv("data.csv",dtype=object,index_col=False)
df['time']= pd.to_datetime(df['time'])
time = df["time"]
df=df.drop(columns=["time"])
headers = ['dailyCases_chng_t-1', 'dailyCases_chng_t-2',
'outpatient_cli_t-1', 'outpatient_cli_t-2', 'hospitalAdm_claim_t-1',
'hospitalAdm_claim_t-2', 'googleSym_sum_t-1', 'googleSym_sum_t-2',
'doctorVisits_t-1', 'doctorVisits_t-2','dailyCases_t']
for column_name in headers:
df[column_name] = df[column_name].astype(float)
avg_county_data = df[df["fips"]=='06013'].drop(columns=["fips"])
for county in counties:
data = df[df["fips"]==county].drop(columns=["fips"])
for h in headers:
avg_county_data[h] += np.array(data[h])
avg_county_data /= (len(counties)+1)
avg_county_data["time"] = time
avg_county_data
# Train the model with average county data
cv = 5
EPOCHS = 100
lr = 1
HIDDEN_DIM = 4
data = avg_county_data
headers = ['dailyCases_chng_t-1', 'dailyCases_chng_t-2',
'outpatient_cli_t-1', 'outpatient_cli_t-2', 'hospitalAdm_claim_t-1',
'hospitalAdm_claim_t-2', 'googleSym_sum_t-1', 'googleSym_sum_t-2',
'doctorVisits_t-1', 'doctorVisits_t-2','dailyCases_t']
mean_dailyCases = data['dailyCases_t'].astype(float).mean()
for column_name in headers:
data[column_name] = data[column_name].astype(float)
data[column_name] = data[column_name]-data[column_name].mean()
idx = data.index[data["time"]==np.datetime64(date(2021,9,1))]
test_data = data.loc[idx[0]:]
y_test = test_data["dailyCases_t"]
y_test = np.array(y_test,dtype=np.float32)
X_test = test_data.drop(columns=['dailyCases_t','time'])
X_test = np.array(X_test,dtype=np.float32)
X_test = X_test.reshape(X_test.shape[0], 1, X_test.shape[1])
test_set = TensorDataset(torch.from_numpy(X_test),
torch.from_numpy(y_test))
test_dataloader = torch.utils.data.DataLoader(test_set, batch_size=16, shuffle=False)
train_data = data.loc[:idx[0]-1]
y = train_data["dailyCases_t"]
y = np.array(y,dtype=np.float32)
X = train_data.drop(columns=['dailyCases_t','time'])
X = np.array(X,dtype=np.float32)
X = X.reshape(X.shape[0], 1, X.shape[1])
LEARNING_RATE = lr
i=0
tscv = TimeSeriesSplit(n_splits=cv)
avg_valid_mse = 0.0
train_RMSEs_nn_avg = []
valid_RMSEs_nn_avg = []
test_RMSEs_nn_avg = []
for train_index, valid_index in tscv.split(X):
X_train, X_valid = X[train_index], X[valid_index]
y_train, y_valid = y[train_index], y[valid_index]
training_set = TensorDataset(torch.from_numpy(X_train),
torch.from_numpy(y_train))
training_dataloader = torch.utils.data.DataLoader(training_set, batch_size=16, shuffle=False)
valid_set = TensorDataset(torch.from_numpy(X_valid),
torch.from_numpy(y_valid))
validation_dataloader = torch.utils.data.DataLoader(valid_set, batch_size=16, shuffle=False)
print("split",i+1,":")
model = LSTM(input_dim=10, hidden_dim=HIDDEN_DIM,n_layers=1)
#optimizer = torch.optim.Adam(model.parameters(), lr=LEARNING_RATE)
optimizer = torch.optim.SGD(model.parameters(), lr=LEARNING_RATE)
loss_fn = nn.MSELoss()
model.train()
train_mse_epoch_sum = 0.0
for epoch in range(EPOCHS):
# Train the network by filling in this block of code
epoch_loss = 0.0
#epoch_loss = []
#train_mse = 0.0
train_prediction = []
train_ground_truth = []
for inputs, labels in training_dataloader:
optimizer.zero_grad()
output = model(inputs)
loss = torch.sqrt(loss_fn(output, torch.reshape(labels,output.shape)))
loss.backward()
optimizer.step()
epoch_loss += loss.item()
#epoch_loss.append(loss.item())
#train_MSEs[i].append(loss.item())
train_prediction = np.concatenate((train_prediction,torch.flatten(output).detach().numpy()))
train_ground_truth = np.concatenate((train_ground_truth,labels.detach().numpy()))
# train_MSEs[i].append(mean_squared_error(torch.flatten(output).detach().numpy(), labels.detach().numpy()))
#train_prediction[i] = np.concatenate((train_prediction[i],torch.flatten(output).detach().numpy()))
# train_ground_truth[i] = np.concatenate((train_ground_truth[i],labels.detach().numpy()))
#train_acc += torch.sum(prediction == labels)
#train_accs.append(train_acc/count)
#losses.append(np.mean(epoch_loss))
train_mse = np.sqrt(mean_squared_error(train_prediction,train_ground_truth))
train_mse_epoch_sum += train_mse
print("Epoch", epoch, ", Loss:", epoch_loss, ", Train MSE:", train_mse)
train_RMSEs_nn_avg.append(train_mse_epoch_sum/EPOCHS)
model.eval()
valid_mse = 0.0
#valid_mse = []
valid_prediction=[]
valid_ground_truth =[]
with torch.set_grad_enabled(False):
for inputs, labels in validation_dataloader:
output = model(inputs)
valid_prediction = np.concatenate((valid_prediction,torch.flatten(output).detach().numpy()))
valid_ground_truth = np.concatenate((valid_ground_truth,labels.detach().numpy()))
#valid_MSEs[i].append(mean_squared_error(torch.flatten(output).detach().numpy(), labels.detach().numpy()))
#valid_MSEs[i].append(valid_mse)
valid_mse = np.sqrt(mean_squared_error(valid_prediction,valid_ground_truth))
avg_valid_mse+=valid_mse
valid_RMSEs_nn_avg.append(valid_mse)
print("Validation MSE: ", valid_mse)
plt.plot(np.arange(len(valid_prediction)),valid_prediction,label="prediction")
plt.plot(np.arange(len(valid_ground_truth)),valid_ground_truth,label="ground truth")
plt.legend()
plt.show()
i += 1
if i==cv:
test_prediction=[]
test_ground_truth =[]
test_mse = 0.0
with torch.set_grad_enabled(False):
for inputs, labels in test_dataloader:
output = model(inputs)
test_prediction = np.concatenate((test_prediction,torch.flatten(output).detach().numpy()))
test_ground_truth = np.concatenate((test_ground_truth,labels.detach().numpy()))
test_mse = np.sqrt(mean_squared_error(test_prediction,test_ground_truth))
test_RMSEs_nn_avg.append(test_mse)
print("Test MSE: ", test_mse)
plt.plot(np.arange(len(test_prediction)),test_prediction+mean_dailyCases,label="prediction")
plt.plot(np.arange(len(test_ground_truth)),test_ground_truth+mean_dailyCases,label="ground truth")
plt.title("Test data Prediction")
plt.legend()
plt.show()
avg_valid_mse /= cv
print("Average validation MSE:",avg_valid_mse)
print()
split 1 :
Epoch 0 , Loss: 1328.2661743164062 , Train MSE: 334.71046017281
Epoch 1 , Loss: 1591.6925964355469 , Train MSE: 424.1697317314275
Epoch 2 , Loss: 1305.9246520996094 , Train MSE: 329.11728512936696
Epoch 3 , Loss: 1290.6605834960938 , Train MSE: 325.3159419165538
Epoch 4 , Loss: 1275.4153442382812 , Train MSE: 321.5194512443805
Epoch 5 , Loss: 1260.1893310546875 , Train MSE: 317.7279867793907
Epoch 6 , Loss: 1244.9836730957031 , Train MSE: 313.9417332950636
Epoch 7 , Loss: 1229.7988586425781 , Train MSE: 310.16088159531427
Epoch 8 , Loss: 1214.635757446289 , Train MSE: 306.3856351279638
Epoch 9 , Loss: 1199.4953308105469 , Train MSE: 302.6162030049028
Epoch 10 , Loss: 1184.378402709961 , Train MSE: 298.8528037530241
Epoch 11 , Loss: 1169.2859344482422 , Train MSE: 295.0956674307358
Epoch 12 , Loss: 1154.2188720703125 , Train MSE: 291.3450425678921
Epoch 13 , Loss: 1139.1783905029297 , Train MSE: 287.60117851024967
Epoch 14 , Loss: 1124.16552734375 , Train MSE: 283.86434145152816
Epoch 15 , Loss: 1109.1814727783203 , Train MSE: 280.13482039441175
Epoch 16 , Loss: 1094.227310180664 , Train MSE: 276.4129064957359
Epoch 17 , Loss: 1079.3046875 , Train MSE: 272.6989145841512
Epoch 18 , Loss: 1064.4145812988281 , Train MSE: 268.9931708489399
Epoch 19 , Loss: 1049.5586547851562 , Train MSE: 265.2960201359148
Epoch 20 , Loss: 1034.7384948730469 , Train MSE: 261.6078242781161
Epoch 21 , Loss: 1019.9556121826172 , Train MSE: 257.92897960513943
Epoch 22 , Loss: 1005.2117004394531 , Train MSE: 254.25986689493678
Epoch 23 , Loss: 990.5086669921875 , Train MSE: 250.60092250372193
Epoch 24 , Loss: 975.8483428955078 , Train MSE: 246.9526004040871
Epoch 25 , Loss: 961.2329559326172 , Train MSE: 243.31537380110154
Epoch 26 , Loss: 946.6643981933594 , Train MSE: 239.68974531357753
Epoch 27 , Loss: 932.1451721191406 , Train MSE: 236.07623622777456
Epoch 28 , Loss: 917.6775360107422 , Train MSE: 232.47540224915244
Epoch 29 , Loss: 903.2641906738281 , Train MSE: 228.88784340256566
Epoch 30 , Loss: 888.9078216552734 , Train MSE: 225.31418281857785
Epoch 31 , Loss: 874.6113891601562 , Train MSE: 221.75508609198178
Epoch 32 , Loss: 860.3778991699219 , Train MSE: 218.21123698978076
Epoch 33 , Loss: 846.2106170654297 , Train MSE: 214.68338797919523
Epoch 34 , Loss: 832.1130828857422 , Train MSE: 211.17231319613245
Epoch 35 , Loss: 818.0889739990234 , Train MSE: 207.67883729895613
Epoch 36 , Loss: 804.1422424316406 , Train MSE: 204.20385480088723
Epoch 37 , Loss: 790.2769775390625 , Train MSE: 200.74826464163
Epoch 38 , Loss: 776.4976348876953 , Train MSE: 197.3130603317144
Epoch 39 , Loss: 762.808837890625 , Train MSE: 193.8992578626878
Epoch 40 , Loss: 749.2156677246094 , Train MSE: 190.50797490695135
Epoch 41 , Loss: 735.7232971191406 , Train MSE: 187.1403405483634
Epoch 42 , Loss: 722.3373260498047 , Train MSE: 183.79758523950088
Epoch 43 , Loss: 709.0638275146484 , Train MSE: 180.48102205752355
Epoch 44 , Loss: 695.9088592529297 , Train MSE: 177.19198189504613
Epoch 45 , Loss: 682.8790664672852 , Train MSE: 173.93188892084407
Epoch 46 , Loss: 669.9814071655273 , Train MSE: 170.70226459612707
Epoch 47 , Loss: 657.2230224609375 , Train MSE: 167.50465978772485
Epoch 48 , Loss: 644.6116180419922 , Train MSE: 164.3407279251323
Epoch 49 , Loss: 632.155143737793 , Train MSE: 161.2121919831639
Epoch 50 , Loss: 619.8618774414062 , Train MSE: 158.12085686976044
Epoch 51 , Loss: 607.7403411865234 , Train MSE: 155.06856997905336
Epoch 52 , Loss: 595.7993927001953 , Train MSE: 152.05726462671643
Epoch 53 , Loss: 584.0481491088867 , Train MSE: 149.08893784814944
Epoch 54 , Loss: 572.4958267211914 , Train MSE: 146.16562636539228
Epoch 55 , Loss: 561.1517181396484 , Train MSE: 143.28941826368694
Epoch 56 , Loss: 550.0252304077148 , Train MSE: 140.46241864271371
Epoch 57 , Loss: 539.1257171630859 , Train MSE: 137.6867662936621
Epoch 58 , Loss: 528.4623031616211 , Train MSE: 134.96458943448096
Epoch 59 , Loss: 518.0439453125 , Train MSE: 132.29801547845287
Epoch 60 , Loss: 507.879150390625 , Train MSE: 129.6891248394944
Epoch 61 , Loss: 497.9760284423828 , Train MSE: 127.13995395812861
Epoch 62 , Loss: 488.3419876098633 , Train MSE: 124.65244242697773
Epoch 63 , Loss: 478.9837341308594 , Train MSE: 122.22844845373437
Epoch 64 , Loss: 469.90699768066406 , Train MSE: 119.8696725871335
Epoch 65 , Loss: 461.11678314208984 , Train MSE: 117.57770030097997
Epoch 66 , Loss: 452.616943359375 , Train MSE: 115.35394733214011
Epoch 67 , Loss: 444.4102783203125 , Train MSE: 113.19963715164485
Epoch 68 , Loss: 436.4984664916992 , Train MSE: 111.11580377479234
Epoch 69 , Loss: 428.88197326660156 , Train MSE: 109.1032477296098
Epoch 70 , Loss: 421.56026458740234 , Train MSE: 107.16257757297099
Epoch 71 , Loss: 414.53163146972656 , Train MSE: 105.29415579766211
Epoch 72 , Loss: 407.79344940185547 , Train MSE: 103.49814912057276
Epoch 73 , Loss: 401.34212493896484 , Train MSE: 101.77449049942825
Epoch 74 , Loss: 395.1730041503906 , Train MSE: 100.12286395524808
Epoch 75 , Loss: 389.2810516357422 , Train MSE: 98.54281677268537
Epoch 76 , Loss: 383.66027069091797 , Train MSE: 97.03363915138833
Epoch 77 , Loss: 378.30428314208984 , Train MSE: 95.59446817144614
Epoch 78 , Loss: 373.2062301635742 , Train MSE: 94.22426187122517
Epoch 79 , Loss: 368.35890197753906 , Train MSE: 92.92183963710889
Epoch 80 , Loss: 363.75475311279297 , Train MSE: 91.68582348130003
Epoch 81 , Loss: 359.38607025146484 , Train MSE: 90.5147461912987
Epoch 82 , Loss: 355.2452087402344 , Train MSE: 89.40704568917663
Epoch 83 , Loss: 351.3242950439453 , Train MSE: 88.36102839400115
Epoch 84 , Loss: 347.6154327392578 , Train MSE: 87.37490423555803
Epoch 85 , Loss: 344.1106491088867 , Train MSE: 86.44678598221414
Epoch 86 , Loss: 340.8021697998047 , Train MSE: 85.57474878465567
Epoch 87 , Loss: 337.68236541748047 , Train MSE: 84.75682882446694
Epoch 88 , Loss: 334.7434844970703 , Train MSE: 83.99096971752952
Epoch 89 , Loss: 331.97796630859375 , Train MSE: 83.27509782185625
Epoch 90 , Loss: 329.3783950805664 , Train MSE: 82.6071253475296
Epoch 91 , Loss: 326.9373321533203 , Train MSE: 81.98491350075588
Epoch 92 , Loss: 324.64772033691406 , Train MSE: 81.40636946352788
Epoch 93 , Loss: 322.5024719238281 , Train MSE: 80.86937341539294
Epoch 94 , Loss: 320.4946594238281 , Train MSE: 80.37183281345051
Epoch 95 , Loss: 318.6175994873047 , Train MSE: 79.91166852932024
Epoch 96 , Loss: 316.86458587646484 , Train MSE: 79.48682023303111
Epoch 97 , Loss: 315.2292938232422 , Train MSE: 79.09529807088384
Epoch 98 , Loss: 313.70542907714844 , Train MSE: 78.73513404915668
Epoch 99 , Loss: 312.2869110107422 , Train MSE: 78.40441925235298
Validation MSE: 1531.18086178817
split 2 :
Epoch 0 , Loss: 5849.905258178711 , Train MSE: 939.9531717295616
Epoch 1 , Loss: 6162.233322143555 , Train MSE: 965.1065039390105
Epoch 2 , Loss: 5696.972717285156 , Train MSE: 914.3859335684965
Epoch 3 , Loss: 6865.129867553711 , Train MSE: 1127.3492030153411
Epoch 4 , Loss: 5844.840026855469 , Train MSE: 941.4069739760911
Epoch 5 , Loss: 5843.532409667969 , Train MSE: 941.7497544592626
Epoch 6 , Loss: 5842.239120483398 , Train MSE: 942.0919349761696
Epoch 7 , Loss: 5840.959625244141 , Train MSE: 942.4335040357304
Epoch 8 , Loss: 5839.693557739258 , Train MSE: 942.7744507352872
Epoch 9 , Loss: 5838.44091796875 , Train MSE: 943.1147665676253
Epoch 10 , Loss: 5837.201904296875 , Train MSE: 943.4544418928875
Epoch 11 , Loss: 5835.975891113281 , Train MSE: 943.7934669934072
Epoch 12 , Loss: 5834.762405395508 , Train MSE: 944.1318334444234
Epoch 13 , Loss: 5833.56217956543 , Train MSE: 944.4695306540394
Epoch 14 , Loss: 5832.374771118164 , Train MSE: 944.8065509825922
Epoch 15 , Loss: 5831.199676513672 , Train MSE: 945.1428859643854
Epoch 16 , Loss: 5830.037124633789 , Train MSE: 945.4785291708313
Epoch 17 , Loss: 5828.88688659668 , Train MSE: 945.8134706233699
Epoch 18 , Loss: 5827.748550415039 , Train MSE: 946.1477022372782
Epoch 19 , Loss: 5826.622482299805 , Train MSE: 946.4812182257823
Epoch 20 , Loss: 5825.508514404297 , Train MSE: 946.8140108835237
Epoch 21 , Loss: 5824.405960083008 , Train MSE: 947.146071023259
Epoch 22 , Loss: 5823.314987182617 , Train MSE: 947.4773948855569
Epoch 23 , Loss: 5822.235595703125 , Train MSE: 947.8079743443585
Epoch 24 , Loss: 5821.167526245117 , Train MSE: 948.137803342544
Epoch 25 , Loss: 5820.110748291016 , Train MSE: 948.4668753588451
Epoch 26 , Loss: 5819.0650634765625 , Train MSE: 948.7951844307022
Epoch 27 , Loss: 5818.030227661133 , Train MSE: 949.1227243366882
Epoch 28 , Loss: 5817.006393432617 , Train MSE: 949.4494868950063
Epoch 29 , Loss: 5815.992935180664 , Train MSE: 949.7754713890309
Epoch 30 , Loss: 5814.990280151367 , Train MSE: 950.1006694533859
Epoch 31 , Loss: 5813.998580932617 , Train MSE: 950.4250759463133
Epoch 32 , Loss: 5813.0167236328125 , Train MSE: 950.7486859244852
Epoch 33 , Loss: 5812.045211791992 , Train MSE: 951.0714960521614
Epoch 34 , Loss: 5811.083694458008 , Train MSE: 951.3934966256743
Epoch 35 , Loss: 5810.132431030273 , Train MSE: 951.714690659278
Epoch 36 , Loss: 5809.19140625 , Train MSE: 952.0350670609511
Epoch 37 , Loss: 5808.25944519043 , Train MSE: 952.3546231019016
Epoch 38 , Loss: 5807.337463378906 , Train MSE: 952.6733546522588
Epoch 39 , Loss: 5806.425369262695 , Train MSE: 952.9912585193173
Epoch 40 , Loss: 5805.522247314453 , Train MSE: 953.3083325068731
Epoch 41 , Loss: 5804.628616333008 , Train MSE: 953.624567088239
Epoch 42 , Loss: 5803.7445068359375 , Train MSE: 953.9399635053587
Epoch 43 , Loss: 5802.869689941406 , Train MSE: 954.2545144770785
Epoch 44 , Loss: 5802.003280639648 , Train MSE: 954.5682198128404
Epoch 45 , Loss: 5801.146377563477 , Train MSE: 954.8810739619888
Epoch 46 , Loss: 5800.298263549805 , Train MSE: 955.1930711321166
Epoch 47 , Loss: 5799.458679199219 , Train MSE: 955.5042143161885
Epoch 48 , Loss: 5798.627960205078 , Train MSE: 955.8144951829184
Epoch 49 , Loss: 5797.805816650391 , Train MSE: 956.1239132723955
Epoch 50 , Loss: 5796.992034912109 , Train MSE: 956.4324632474949
Epoch 51 , Loss: 5796.186569213867 , Train MSE: 956.7401473031576
Epoch 52 , Loss: 5795.389694213867 , Train MSE: 957.0469567414638
Epoch 53 , Loss: 5794.600799560547 , Train MSE: 957.3528924342152
Epoch 54 , Loss: 5793.819641113281 , Train MSE: 957.6579491454154
Epoch 55 , Loss: 5793.047332763672 , Train MSE: 957.9621259162313
Epoch 56 , Loss: 5792.282287597656 , Train MSE: 958.2654207208684
Epoch 57 , Loss: 5791.525405883789 , Train MSE: 958.5678325403182
Epoch 58 , Loss: 5790.776138305664 , Train MSE: 958.8693549676902
Epoch 59 , Loss: 5790.034652709961 , Train MSE: 959.1699897352752
Epoch 60 , Loss: 5789.30078125 , Train MSE: 959.4697325346204
Epoch 61 , Loss: 5788.574264526367 , Train MSE: 959.7685795395206
Epoch 62 , Loss: 5787.855438232422 , Train MSE: 960.0665316630246
Epoch 63 , Loss: 5787.1436767578125 , Train MSE: 960.363583851667
Epoch 64 , Loss: 5786.439056396484 , Train MSE: 960.6597390207377
Epoch 65 , Loss: 5785.741943359375 , Train MSE: 960.95499357105
Epoch 66 , Loss: 5785.0518798828125 , Train MSE: 961.2493382348836
Epoch 67 , Loss: 5784.368881225586 , Train MSE: 961.5427824908431
Epoch 68 , Loss: 5783.6927490234375 , Train MSE: 961.8353236438462
Epoch 69 , Loss: 5783.023696899414 , Train MSE: 962.1269484070848
Epoch 70 , Loss: 5782.3614501953125 , Train MSE: 962.4176632268493
Epoch 71 , Loss: 5781.7059326171875 , Train MSE: 962.7074749511147
Epoch 72 , Loss: 5781.056945800781 , Train MSE: 962.9963722462116
Epoch 73 , Loss: 5780.414611816406 , Train MSE: 963.2843540496298
Epoch 74 , Loss: 5779.779067993164 , Train MSE: 963.5714226417579
Epoch 75 , Loss: 5779.149444580078 , Train MSE: 963.8575687556602
Epoch 76 , Loss: 5778.526626586914 , Train MSE: 964.1427949998383
Epoch 77 , Loss: 5777.910232543945 , Train MSE: 964.4271111406338
Epoch 78 , Loss: 5777.299957275391 , Train MSE: 964.7105023661785
Epoch 79 , Loss: 5776.695861816406 , Train MSE: 964.9929726008035
Epoch 80 , Loss: 5776.097763061523 , Train MSE: 965.2745111786385
Epoch 81 , Loss: 5775.50617980957 , Train MSE: 965.5551259035697
Epoch 82 , Loss: 5774.920227050781 , Train MSE: 965.8348183475712
Epoch 83 , Loss: 5774.340301513672 , Train MSE: 966.1135850785274
Epoch 84 , Loss: 5773.766464233398 , Train MSE: 966.3914211093557
Epoch 85 , Loss: 5773.198425292969 , Train MSE: 966.6683327312151
Epoch 86 , Loss: 5772.635818481445 , Train MSE: 966.9443134040478
Epoch 87 , Loss: 5772.0791015625 , Train MSE: 967.2193646958992
Epoch 88 , Loss: 5771.528091430664 , Train MSE: 967.4934842254554
Epoch 89 , Loss: 5770.982955932617 , Train MSE: 967.7666692464286
Epoch 90 , Loss: 5770.442977905273 , Train MSE: 968.0389240782274
Epoch 91 , Loss: 5769.9083251953125 , Train MSE: 968.310246584179
Epoch 92 , Loss: 5769.379837036133 , Train MSE: 968.5806351188019
Epoch 93 , Loss: 5768.856185913086 , Train MSE: 968.8500907382534
Epoch 94 , Loss: 5768.337966918945 , Train MSE: 969.1186077389683
Epoch 95 , Loss: 5767.824691772461 , Train MSE: 969.3861843478779
Epoch 96 , Loss: 5767.316940307617 , Train MSE: 969.6528242185266
Epoch 97 , Loss: 5766.814743041992 , Train MSE: 969.9185247463492
Epoch 98 , Loss: 5766.317230224609 , Train MSE: 970.1832938157527
Epoch 99 , Loss: 5765.824905395508 , Train MSE: 970.4471157720809
Validation MSE: 1057.2320652235235
split 3 :
Epoch 0 , Loss: 8336.762573242188 , Train MSE: 964.9162164317185
Epoch 1 , Loss: 8344.619583129883 , Train MSE: 963.7985154851931
Epoch 2 , Loss: 8398.960403442383 , Train MSE: 966.0012398058932
Epoch 3 , Loss: 8396.276992797852 , Train MSE: 966.5622727920281
Epoch 4 , Loss: 8393.636764526367 , Train MSE: 967.1211427188798
Epoch 5 , Loss: 8391.039276123047 , Train MSE: 967.677835546318
Epoch 6 , Loss: 8388.483215332031 , Train MSE: 968.2323379835406
Epoch 7 , Loss: 8385.968307495117 , Train MSE: 968.7846372000707
Epoch 8 , Loss: 8383.493728637695 , Train MSE: 969.3347199988386
Epoch 9 , Loss: 8381.05827331543 , Train MSE: 969.8825772420187
Epoch 10 , Loss: 8378.661987304688 , Train MSE: 970.4281973320705
Epoch 11 , Loss: 8376.303146362305 , Train MSE: 970.971570342828
Epoch 12 , Loss: 8373.982315063477 , Train MSE: 971.5126859888775
Epoch 13 , Loss: 8371.697967529297 , Train MSE: 972.05153866615
Epoch 14 , Loss: 8369.449584960938 , Train MSE: 972.5881179271448
Epoch 15 , Loss: 8367.236618041992 , Train MSE: 973.1224170131895
Epoch 16 , Loss: 8365.058334350586 , Train MSE: 973.654431633071
Epoch 17 , Loss: 8362.913925170898 , Train MSE: 974.1841521902186
Epoch 18 , Loss: 8360.803680419922 , Train MSE: 974.7115721918424
Epoch 19 , Loss: 8358.725936889648 , Train MSE: 975.2366891220133
Epoch 20 , Loss: 8356.680587768555 , Train MSE: 975.759498156512
Epoch 21 , Loss: 8354.667343139648 , Train MSE: 976.2799922720544
Epoch 22 , Loss: 8352.685028076172 , Train MSE: 976.7981672670558
Epoch 23 , Loss: 8350.733489990234 , Train MSE: 977.3140223849813
Epoch 24 , Loss: 8348.811935424805 , Train MSE: 977.8275510476906
Epoch 25 , Loss: 8346.919998168945 , Train MSE: 978.3387530761352
Epoch 26 , Loss: 8345.05744934082 , Train MSE: 978.8476216641579
Epoch 27 , Loss: 8343.223281860352 , Train MSE: 979.3541592667011
Epoch 28 , Loss: 8341.417556762695 , Train MSE: 979.8583578583832
Epoch 29 , Loss: 8339.639144897461 , Train MSE: 980.3602211170933
Epoch 30 , Loss: 8337.888366699219 , Train MSE: 980.859745047287
Epoch 31 , Loss: 8336.163772583008 , Train MSE: 981.3569303262349
Epoch 32 , Loss: 8334.465744018555 , Train MSE: 981.851777043322
Epoch 33 , Loss: 8332.793106079102 , Train MSE: 982.3442775304416
Epoch 34 , Loss: 8331.146514892578 , Train MSE: 982.8344353566489
Epoch 35 , Loss: 8329.524520874023 , Train MSE: 983.3222498232767
Epoch 36 , Loss: 8327.926940917969 , Train MSE: 983.8077206559057
Epoch 37 , Loss: 8326.353866577148 , Train MSE: 984.2908481040743
Epoch 38 , Loss: 8324.804458618164 , Train MSE: 984.7716313058188
Epoch 39 , Loss: 8323.277938842773 , Train MSE: 985.2500716979397
Epoch 40 , Loss: 8321.775344848633 , Train MSE: 985.7261684781881
Epoch 41 , Loss: 8320.294815063477 , Train MSE: 986.1999216310729
Epoch 42 , Loss: 8318.83674621582 , Train MSE: 986.671330606098
Epoch 43 , Loss: 8317.400314331055 , Train MSE: 987.1403993071619
Epoch 44 , Loss: 8315.985580444336 , Train MSE: 987.6071260678366
Epoch 45 , Loss: 8314.591903686523 , Train MSE: 988.0715159807836
Epoch 46 , Loss: 8313.219223022461 , Train MSE: 988.5335621127377
Epoch 47 , Loss: 8311.866989135742 , Train MSE: 988.9932708529794
Epoch 48 , Loss: 8310.535278320312 , Train MSE: 989.4506417378175
Epoch 49 , Loss: 8309.223129272461 , Train MSE: 989.9056776924749
Epoch 50 , Loss: 8307.930847167969 , Train MSE: 990.3583816173395
Epoch 51 , Loss: 8306.65786743164 , Train MSE: 990.8087512650429
Epoch 52 , Loss: 8305.40396118164 , Train MSE: 991.256787138451
Epoch 53 , Loss: 8304.168212890625 , Train MSE: 991.7024928136127
Epoch 54 , Loss: 8302.951538085938 , Train MSE: 992.1458737254487
Epoch 55 , Loss: 8301.75259399414 , Train MSE: 992.5869205322314
Epoch 56 , Loss: 8300.571655273438 , Train MSE: 993.025643444061
Epoch 57 , Loss: 8299.408325195312 , Train MSE: 993.4620414641034
Epoch 58 , Loss: 8298.262329101562 , Train MSE: 993.8961171238647
Epoch 59 , Loss: 8297.133026123047 , Train MSE: 994.3278689959159
Epoch 60 , Loss: 8296.02116394043 , Train MSE: 994.7573014879378
Epoch 61 , Loss: 8294.925674438477 , Train MSE: 995.184419843544
Epoch 62 , Loss: 8293.846176147461 , Train MSE: 995.609226381484
Epoch 63 , Loss: 8292.783248901367 , Train MSE: 996.0317147717778
Epoch 64 , Loss: 8291.736053466797 , Train MSE: 996.4518936557024
Epoch 65 , Loss: 8290.704376220703 , Train MSE: 996.8697577919311
Epoch 66 , Loss: 8289.688095092773 , Train MSE: 997.2853123082991
Epoch 67 , Loss: 8288.687026977539 , Train MSE: 997.6985595027659
Epoch 68 , Loss: 8287.701065063477 , Train MSE: 998.109512554905
Epoch 69 , Loss: 8286.729843139648 , Train MSE: 998.518158361371
Epoch 70 , Loss: 8285.772720336914 , Train MSE: 998.9245010922547
Epoch 71 , Loss: 8284.830215454102 , Train MSE: 999.328545700572
Epoch 72 , Loss: 8283.901733398438 , Train MSE: 999.7302905406573
Epoch 73 , Loss: 8282.987319946289 , Train MSE: 1000.1297393963379
Epoch 74 , Loss: 8282.086669921875 , Train MSE: 1000.5268913476897
Epoch 75 , Loss: 8281.199417114258 , Train MSE: 1000.9217643819003
Epoch 76 , Loss: 8280.326080322266 , Train MSE: 1001.3143400292566
Epoch 77 , Loss: 8279.46517944336 , Train MSE: 1001.7046229722924
Epoch 78 , Loss: 8278.61734008789 , Train MSE: 1002.0926271291046
Epoch 79 , Loss: 8277.782608032227 , Train MSE: 1002.4783564947089
Epoch 80 , Loss: 8276.960250854492 , Train MSE: 1002.8617986273756
Epoch 81 , Loss: 8276.150482177734 , Train MSE: 1003.2429583928636
Epoch 82 , Loss: 8275.35269165039 , Train MSE: 1003.6218411120537
Epoch 83 , Loss: 8274.567596435547 , Train MSE: 1003.9984570091648
Epoch 84 , Loss: 8273.79409790039 , Train MSE: 1004.3728014402542
Epoch 85 , Loss: 8273.032333374023 , Train MSE: 1004.7448696661273
Epoch 86 , Loss: 8272.282302856445 , Train MSE: 1005.1146654232067
Epoch 87 , Loss: 8271.543518066406 , Train MSE: 1005.4822044740644
Epoch 88 , Loss: 8270.816268920898 , Train MSE: 1005.8474806703288
Epoch 89 , Loss: 8270.100128173828 , Train MSE: 1006.2104886809691
Epoch 90 , Loss: 8269.395156860352 , Train MSE: 1006.5712364076468
Epoch 91 , Loss: 8268.70053100586 , Train MSE: 1006.9297312077342
Epoch 92 , Loss: 8268.017379760742 , Train MSE: 1007.2859717883115
Epoch 93 , Loss: 8267.34439086914 , Train MSE: 1007.6399571250804
Epoch 94 , Loss: 8266.681579589844 , Train MSE: 1007.9916901202083
Epoch 95 , Loss: 8266.029708862305 , Train MSE: 1008.3411793880373
Epoch 96 , Loss: 8265.387268066406 , Train MSE: 1008.6884230352791
Epoch 97 , Loss: 8264.755081176758 , Train MSE: 1009.0334196983237
Epoch 98 , Loss: 8264.132995605469 , Train MSE: 1009.3761797779092
Epoch 99 , Loss: 8263.520797729492 , Train MSE: 1009.716701377598
Validation MSE: 317.8096015094717
split 4 :
Epoch 0 , Loss: 11252.927673339844 , Train MSE: 965.9300755791759
Epoch 1 , Loss: 10163.03140258789 , Train MSE: 865.193187340249
Epoch 2 , Loss: 10131.755783081055 , Train MSE: 866.1593915027779
Epoch 3 , Loss: 10100.982925415039 , Train MSE: 867.1522969122924
Epoch 4 , Loss: 10070.700637817383 , Train MSE: 868.1710911165814
Epoch 5 , Loss: 10040.897369384766 , Train MSE: 869.2149835022433
Epoch 6 , Loss: 10011.562026977539 , Train MSE: 870.2832087200984
Epoch 7 , Loss: 9982.683197021484 , Train MSE: 871.3750171543008
Epoch 8 , Loss: 9954.25210571289 , Train MSE: 872.4896774522962
Epoch 9 , Loss: 9926.257263183594 , Train MSE: 873.6264720422829
Epoch 10 , Loss: 9898.690399169922 , Train MSE: 874.7847033642755
Epoch 11 , Loss: 9871.542297363281 , Train MSE: 875.9636755983961
Epoch 12 , Loss: 9844.804809570312 , Train MSE: 877.1627138133441
Epoch 13 , Loss: 9818.471237182617 , Train MSE: 878.3811416419163
Epoch 14 , Loss: 9792.533828735352 , Train MSE: 879.6182911447227
Epoch 15 , Loss: 9766.985977172852 , Train MSE: 880.8734904401467
Epoch 16 , Loss: 9741.823654174805 , Train MSE: 882.1460793260217
Epoch 17 , Loss: 9717.040405273438 , Train MSE: 883.4353814062603
Epoch 18 , Loss: 9692.632629394531 , Train MSE: 884.7407184039945
Epoch 19 , Loss: 9668.597152709961 , Train MSE: 886.0614070780032
Epoch 20 , Loss: 9644.930313110352 , Train MSE: 887.396753821022
Epoch 21 , Loss: 9621.631057739258 , Train MSE: 888.7460447309129
Epoch 22 , Loss: 9598.697326660156 , Train MSE: 890.1085599610748
Epoch 23 , Loss: 9576.129470825195 , Train MSE: 891.4835368343098
Epoch 24 , Loss: 9553.92756652832 , Train MSE: 892.870203813363
Epoch 25 , Loss: 9532.092971801758 , Train MSE: 894.2677608319841
Epoch 26 , Loss: 9510.627395629883 , Train MSE: 895.6753706083674
Epoch 27 , Loss: 9489.534042358398 , Train MSE: 897.0921543141224
Epoch 28 , Loss: 9468.817398071289 , Train MSE: 898.5172240907395
Epoch 29 , Loss: 9448.481430053711 , Train MSE: 899.9495965449448
Epoch 30 , Loss: 9428.53189086914 , Train MSE: 901.3882786466849
Epoch 31 , Loss: 9408.975891113281 , Train MSE: 902.8322349749569
Epoch 32 , Loss: 9389.819778442383 , Train MSE: 904.2803515532656
Epoch 33 , Loss: 9371.072769165039 , Train MSE: 905.7314440142362
Epoch 34 , Loss: 9352.742897033691 , Train MSE: 907.1843147391622
Epoch 35 , Loss: 9334.841110229492 , Train MSE: 908.6376649390046
Epoch 36 , Loss: 9317.376831054688 , Train MSE: 910.0901451419468
Epoch 37 , Loss: 9300.361473083496 , Train MSE: 911.5403341832373
Epoch 38 , Loss: 9283.806213378906 , Train MSE: 912.9867608002471
Epoch 39 , Loss: 9267.722785949707 , Train MSE: 914.4278763399765
Epoch 40 , Loss: 9252.122253417969 , Train MSE: 915.8620881570071
Epoch 41 , Loss: 9237.017112731934 , Train MSE: 917.2877319662834
Epoch 42 , Loss: 9222.417442321777 , Train MSE: 918.7031360498437
Epoch 43 , Loss: 9208.33480834961 , Train MSE: 920.1065688248459
Epoch 44 , Loss: 9194.7777633667 , Train MSE: 921.4962673729789
Epoch 45 , Loss: 9181.755088806152 , Train MSE: 922.8705308401078
Epoch 46 , Loss: 9169.272773742676 , Train MSE: 924.2275961116028
Epoch 47 , Loss: 9157.336128234863 , Train MSE: 925.5657760820033
Epoch 48 , Loss: 9145.947486877441 , Train MSE: 926.8834414578339
Epoch 49 , Loss: 9135.107688903809 , Train MSE: 928.1789999585525
Epoch 50 , Loss: 9124.814323425293 , Train MSE: 929.4510073564227
Epoch 51 , Loss: 9115.06290435791 , Train MSE: 930.6980674974359
Epoch 52 , Loss: 9105.846412658691 , Train MSE: 931.9189375386994
Epoch 53 , Loss: 9097.155090332031 , Train MSE: 933.1125183049086
Epoch 54 , Loss: 9088.976585388184 , Train MSE: 934.2778607606168
Epoch 55 , Loss: 9081.298156738281 , Train MSE: 935.4141763014436
Epoch 56 , Loss: 9074.102027893066 , Train MSE: 936.5208060391695
Epoch 57 , Loss: 9067.371368408203 , Train MSE: 937.5972696477858
Epoch 58 , Loss: 9061.08772277832 , Train MSE: 938.6432280568513
Epoch 59 , Loss: 9055.23038482666 , Train MSE: 939.658492157673
Epoch 60 , Loss: 9049.77880859375 , Train MSE: 940.6429889567819
Epoch 61 , Loss: 9044.71231842041 , Train MSE: 941.5967896216338
Epoch 62 , Loss: 9040.009750366211 , Train MSE: 942.520104411738
Epoch 63 , Loss: 9035.650337219238 , Train MSE: 943.4131648432895
Epoch 64 , Loss: 9031.613006591797 , Train MSE: 944.2762938012844
Epoch 65 , Loss: 9027.87833404541 , Train MSE: 945.109992675297
Epoch 66 , Loss: 9024.426551818848 , Train MSE: 945.9147225818605
Epoch 67 , Loss: 9021.238693237305 , Train MSE: 946.6910500771394
Epoch 68 , Loss: 9018.297157287598 , Train MSE: 947.4395185701362
Epoch 69 , Loss: 9015.585052490234 , Train MSE: 948.1607353651009
Epoch 70 , Loss: 9013.086013793945 , Train MSE: 948.8553767785439
Epoch 71 , Loss: 9010.785453796387 , Train MSE: 949.5241062817311
Epoch 72 , Loss: 9008.667495727539 , Train MSE: 950.1676408783803
Epoch 73 , Loss: 9006.720481872559 , Train MSE: 950.7865894742168
Epoch 74 , Loss: 9004.930488586426 , Train MSE: 951.3816918326896
Epoch 75 , Loss: 9003.286125183105 , Train MSE: 951.9536272077748
Epoch 76 , Loss: 9001.776885986328 , Train MSE: 952.5030725212193
Epoch 77 , Loss: 9000.391380310059 , Train MSE: 953.0307443481669
Epoch 78 , Loss: 8999.120964050293 , Train MSE: 953.5373333078502
Epoch 79 , Loss: 8997.956108093262 , Train MSE: 954.0234688624691
Epoch 80 , Loss: 8996.889068603516 , Train MSE: 954.4898705435799
Epoch 81 , Loss: 8995.91170501709 , Train MSE: 954.9371680821729
Epoch 82 , Loss: 8995.017051696777 , Train MSE: 955.3660148481882
Epoch 83 , Loss: 8994.19873046875 , Train MSE: 955.7770211572248
Epoch 84 , Loss: 8993.44985961914 , Train MSE: 956.17086736862
Epoch 85 , Loss: 8992.765968322754 , Train MSE: 956.548124498734
Epoch 86 , Loss: 8992.140914916992 , Train MSE: 956.9093910261491
Epoch 87 , Loss: 8991.570236206055 , Train MSE: 957.2552432817449
Epoch 88 , Loss: 8991.048843383789 , Train MSE: 957.5862512919963
Epoch 89 , Loss: 8990.574104309082 , Train MSE: 957.9029695384739
Epoch 90 , Loss: 8990.140083312988 , Train MSE: 958.2059192833964
Epoch 91 , Loss: 8989.745292663574 , Train MSE: 958.4956750308368
Epoch 92 , Loss: 8989.384994506836 , Train MSE: 958.7727229366184
Epoch 93 , Loss: 8989.0570602417 , Train MSE: 959.0375355257421
Epoch 94 , Loss: 8988.75872039795 , Train MSE: 959.2906153098116
Epoch 95 , Loss: 8988.486915588379 , Train MSE: 959.5324504728333
Epoch 96 , Loss: 8988.23998260498 , Train MSE: 959.7634553393599
Epoch 97 , Loss: 8988.014869689941 , Train MSE: 959.9840800644611
Epoch 98 , Loss: 8987.810989379883 , Train MSE: 960.194767077542
Epoch 99 , Loss: 8987.625129699707 , Train MSE: 960.3958965470945
Validation MSE: 186.46051346620993
split 5 :
Epoch 0 , Loss: 11635.652526855469 , Train MSE: 803.4222733763305
Epoch 1 , Loss: 11610.358322143555 , Train MSE: 804.750488305473
Epoch 2 , Loss: 11500.035415649414 , Train MSE: 803.918244306262
Epoch 3 , Loss: 11424.27865600586 , Train MSE: 804.2962426033877
Epoch 4 , Loss: 11350.572402954102 , Train MSE: 804.7757427030085
Epoch 5 , Loss: 11279.64030456543 , Train MSE: 805.3612854002845
Epoch 6 , Loss: 11207.419952392578 , Train MSE: 806.0043982352606
Epoch 7 , Loss: 11136.167465209961 , Train MSE: 806.7305716970567
Epoch 8 , Loss: 11065.71875 , Train MSE: 807.5380571801225
Epoch 9 , Loss: 10996.261184692383 , Train MSE: 808.436370741753
Epoch 10 , Loss: 10928.333953857422 , Train MSE: 809.4038685251782
Epoch 11 , Loss: 10861.218887329102 , Train MSE: 810.4794033081588
Epoch 12 , Loss: 10794.219436645508 , Train MSE: 811.5734037301384
Epoch 13 , Loss: 10726.708145141602 , Train MSE: 812.7563610752189
Epoch 14 , Loss: 10661.490509033203 , Train MSE: 813.9986969913663
Epoch 15 , Loss: 10595.743591308594 , Train MSE: 815.3104180612303
Epoch 16 , Loss: 10531.850860595703 , Train MSE: 816.6898312895802
Epoch 17 , Loss: 10468.559753417969 , Train MSE: 818.1397178638078
Epoch 18 , Loss: 10408.954208374023 , Train MSE: 819.6569915625254
Epoch 19 , Loss: 10350.026092529297 , Train MSE: 821.2744960520272
Epoch 20 , Loss: 10330.626930236816 , Train MSE: 842.6008571608594
Epoch 21 , Loss: 10268.384353637695 , Train MSE: 825.6745078568155
Epoch 22 , Loss: 10217.506439208984 , Train MSE: 827.358446511811
Epoch 23 , Loss: 10168.018920898438 , Train MSE: 829.0763145001163
Epoch 24 , Loss: 10120.014221191406 , Train MSE: 830.8233711094845
Epoch 25 , Loss: 10073.591331481934 , Train MSE: 832.5944537417705
Epoch 26 , Loss: 10028.854873657227 , Train MSE: 834.3839777513493
Epoch 27 , Loss: 9985.907821655273 , Train MSE: 836.1859748786974
Epoch 28 , Loss: 9944.851287841797 , Train MSE: 837.9942172462205
Epoch 29 , Loss: 9905.775024414062 , Train MSE: 839.8022470917181
Epoch 30 , Loss: 9868.752227783203 , Train MSE: 841.6036031974717
Epoch 31 , Loss: 9833.833633422852 , Train MSE: 843.3920144130626
Epoch 32 , Loss: 9801.04507446289 , Train MSE: 845.1615115747496
Epoch 33 , Loss: 9770.383079528809 , Train MSE: 846.9066900751423
Epoch 34 , Loss: 9741.815490722656 , Train MSE: 848.6227242326669
Epoch 35 , Loss: 9715.288131713867 , Train MSE: 850.3054836568774
Epoch 36 , Loss: 9690.724128723145 , Train MSE: 851.9515270108874
Epoch 37 , Loss: 9668.03450012207 , Train MSE: 853.5580719261316
Epoch 38 , Loss: 9647.119102478027 , Train MSE: 855.1228475290567
Epoch 39 , Loss: 9627.87744140625 , Train MSE: 856.6440828308039
Epoch 40 , Loss: 9610.204902648926 , Train MSE: 858.1203833042097
Epoch 41 , Loss: 9594.004051208496 , Train MSE: 859.5506439084253
Epoch 42 , Loss: 9579.177978515625 , Train MSE: 860.9339409614347
Epoch 43 , Loss: 9565.637283325195 , Train MSE: 862.2695818416033
Epoch 44 , Loss: 9553.295631408691 , Train MSE: 863.5570270947464
Epoch 45 , Loss: 9542.073303222656 , Train MSE: 864.7958820686372
Epoch 46 , Loss: 9531.892921447754 , Train MSE: 865.9859202677507
Epoch 47 , Loss: 9522.68106842041 , Train MSE: 867.1270240027958
Epoch 48 , Loss: 9514.368507385254 , Train MSE: 868.2193337747871
Epoch 49 , Loss: 9506.887042999268 , Train MSE: 869.2630636867472
Epoch 50 , Loss: 9500.172912597656 , Train MSE: 870.258713340124
Epoch 51 , Loss: 9494.163551330566 , Train MSE: 871.2069554026108
Epoch 52 , Loss: 9488.799579620361 , Train MSE: 872.1085829135974
Epoch 53 , Loss: 9484.02430343628 , Train MSE: 872.9646076809515
Epoch 54 , Loss: 9479.782283782959 , Train MSE: 873.7762104560449
Epoch 55 , Loss: 9476.025371551514 , Train MSE: 874.5446928915009
Epoch 56 , Loss: 9472.703411102295 , Train MSE: 875.2714522323188
Epoch 57 , Loss: 9469.773818969727 , Train MSE: 875.957995883609
Epoch 58 , Loss: 9467.1943359375 , Train MSE: 876.6059017389542
Epoch 59 , Loss: 9464.927047729492 , Train MSE: 877.2167852519375
Epoch 60 , Loss: 9462.937412261963 , Train MSE: 877.7923011482096
Epoch 61 , Loss: 9461.195087432861 , Train MSE: 878.3341469832567
Epoch 62 , Loss: 9459.670539855957 , Train MSE: 878.8439245523842
Epoch 63 , Loss: 9458.3388671875 , Train MSE: 879.3232616992459
Epoch 64 , Loss: 9457.176555633545 , Train MSE: 879.7737930233275
Epoch 65 , Loss: 9456.16444015503 , Train MSE: 880.1970638061669
Epoch 66 , Loss: 9455.283203125 , Train MSE: 880.5944960916014
Epoch 67 , Loss: 9454.516384124756 , Train MSE: 880.9675784170813
Epoch 68 , Loss: 9453.851039886475 , Train MSE: 881.3177771378871
Epoch 69 , Loss: 9453.273677825928 , Train MSE: 881.6463511076186
Epoch 70 , Loss: 9452.774112701416 , Train MSE: 881.9545825009204
Epoch 71 , Loss: 9452.340461730957 , Train MSE: 882.2436477279568
Epoch 72 , Loss: 9451.966953277588 , Train MSE: 882.5147295887539
Epoch 73 , Loss: 9451.643882751465 , Train MSE: 882.7688793015553
Epoch 74 , Loss: 9451.365295410156 , Train MSE: 883.0071243667342
Epoch 75 , Loss: 9451.126407623291 , Train MSE: 883.2304657448659
Epoch 76 , Loss: 9450.921096801758 , Train MSE: 883.4397637560947
Epoch 77 , Loss: 9450.744396209717 , Train MSE: 883.6359154988462
Epoch 78 , Loss: 9450.59345626831 , Train MSE: 883.8197606829267
Epoch 79 , Loss: 9450.464748382568 , Train MSE: 883.992009646468
Epoch 80 , Loss: 9450.35489654541 , Train MSE: 884.1533995681297
Epoch 81 , Loss: 9450.262420654297 , Train MSE: 884.3046352209664
Epoch 82 , Loss: 9450.18342590332 , Train MSE: 884.446263738437
Epoch 83 , Loss: 9450.117687225342 , Train MSE: 884.5789781277085
Epoch 84 , Loss: 9450.061779022217 , Train MSE: 884.7032722034813
Epoch 85 , Loss: 9450.015342712402 , Train MSE: 884.8197430521172
Epoch 86 , Loss: 9449.97689819336 , Train MSE: 884.9288188917054
Epoch 87 , Loss: 9449.945213317871 , Train MSE: 885.0309928170437
Epoch 88 , Loss: 9449.91939163208 , Train MSE: 885.1266848373741
Epoch 89 , Loss: 9449.898887634277 , Train MSE: 885.2163330400911
Epoch 90 , Loss: 9449.882061004639 , Train MSE: 885.3002605620306
Epoch 91 , Loss: 9449.869590759277 , Train MSE: 885.3788859740916
Epoch 92 , Loss: 9449.859519958496 , Train MSE: 885.4525067401186
Epoch 93 , Loss: 9449.852348327637 , Train MSE: 885.5214578251671
Epoch 94 , Loss: 9449.846691131592 , Train MSE: 885.5860257990028
Epoch 95 , Loss: 9449.843574523926 , Train MSE: 885.6464988074322
Epoch 96 , Loss: 9449.841789245605 , Train MSE: 885.7031244959869
Epoch 97 , Loss: 9449.841941833496 , Train MSE: 885.7561626256592
Epoch 98 , Loss: 9449.842254638672 , Train MSE: 885.8058049637032
Epoch 99 , Loss: 9449.842948913574 , Train MSE: 885.8523289818614
Validation MSE: 491.40435821431066
Test MSE: 597.4785439708273
Average validation MSE: 716.8174800403372
import matplotlib.pyplot as plt
test_RMSEs_nn_avg = [test_RMSEs_nn_avg[0] for i in range(5)]
plt.plot(range(1,6), train_RMSEs_nn_avg, color='red', label='Training RMSEs')
plt.plot(range(1,6), valid_RMSEs_nn_avg, color='green', label='Validation RMSEs')
plt.plot(range(1,6), test_RMSEs_nn_avg, color='blue', label='Testing RMSEs')
plt.legend(loc='best')
plt.title('Train/Valid/Test RMSEs Across the 5 Splits')
plt.xlabel('Splits')
plt.ylabel('RMSE')
plt.show()
Train the model with a single county data
#cross validate the model on single county data
cv = 5
EPOCHS = 100
lr = 1
HIDDEN_DIMs = [4]
counties = ['06037','06013','06073','06075']#, '06013', '06019', '06029', '06037', '06059', '06065',
#'06067', '06071', '06073', '06075', '06077', '06081', '06085',
#'06111']
county_avg_valid_MSEs = []
for HIDDEN_DIM in HIDDEN_DIMs:
for county in counties:
print("County:",county)
data = df[df["fips"]==county]
headers = ['dailyCases_chng_t-1', 'dailyCases_chng_t-2',
'outpatient_cli_t-1', 'outpatient_cli_t-2', 'hospitalAdm_claim_t-1',
'hospitalAdm_claim_t-2', 'googleSym_sum_t-1', 'googleSym_sum_t-2',
'doctorVisits_t-1', 'doctorVisits_t-2','dailyCases_t']
mean_dailyCases = data['dailyCases_t'].astype(float).mean()
for column_name in headers:
data[column_name] = data[column_name].astype(float)
data[column_name] = data[column_name]-data[column_name].mean()
idx = data.index[data["time"]==np.datetime64(date(2021,9,1))]
test_data = data.loc[idx[0]:]
y_test = test_data["dailyCases_t"]
y_test = np.array(y_test,dtype=np.float32)
X_test = test_data.drop(columns=['dailyCases_t','time','fips'])
X_test = np.array(X_test,dtype=np.float32)
X_test = X_test.reshape(X_test.shape[0], 1, X_test.shape[1])
test_set = TensorDataset(torch.from_numpy(X_test),
torch.from_numpy(y_test))
test_dataloader = torch.utils.data.DataLoader(test_set, batch_size=16, shuffle=False)
train_data = data.loc[:idx[0]-1]
y = train_data["dailyCases_t"]
y = np.array(y,dtype=np.float32)
X = train_data.drop(columns=['dailyCases_t','time','fips'])
X = np.array(X,dtype=np.float32)
X = X.reshape(X.shape[0], 1, X.shape[1])
LEARNING_RATE = lr
i=0
tscv = TimeSeriesSplit(n_splits=cv)
avg_valid_mse = 0.0
for train_index, valid_index in tscv.split(X):
X_train, X_valid = X[train_index], X[valid_index]
y_train, y_valid = y[train_index], y[valid_index]
training_set = TensorDataset(torch.from_numpy(X_train),
torch.from_numpy(y_train))
training_dataloader = torch.utils.data.DataLoader(training_set, batch_size=16, shuffle=False)
valid_set = TensorDataset(torch.from_numpy(X_valid),
torch.from_numpy(y_valid))
validation_dataloader = torch.utils.data.DataLoader(valid_set, batch_size=16, shuffle=False)
print("split",i+1,":")
model = LSTM(input_dim=10, hidden_dim=HIDDEN_DIM,n_layers=1)
# optimizer = torch.optim.Adam(model.parameters(), lr=LEARNING_RATE)
optimizer = torch.optim.SGD(model.parameters(), lr=LEARNING_RATE)
loss_fn = nn.MSELoss()
model.train()
#losses = []
for epoch in range(EPOCHS):
# Train the network by filling in this block of code
epoch_loss = 0.0
#epoch_loss = []
#train_mse = 0.0
train_prediction = []
train_ground_truth = []
for inputs, labels in training_dataloader:
optimizer.zero_grad()
output = model(inputs)
loss = torch.sqrt(loss_fn(output, torch.reshape(labels,output.shape)))
loss.backward()
optimizer.step()
epoch_loss += loss.item()
#epoch_loss.append(loss.item())
#train_MSEs[i].append(loss.item())
train_prediction = np.concatenate((train_prediction,torch.flatten(output).detach().numpy()))
train_ground_truth = np.concatenate((train_ground_truth,labels.detach().numpy()))
# train_MSEs[i].append(mean_squared_error(torch.flatten(output).detach().numpy(), labels.detach().numpy()))
#train_prediction[i] = np.concatenate((train_prediction[i],torch.flatten(output).detach().numpy()))
# train_ground_truth[i] = np.concatenate((train_ground_truth[i],labels.detach().numpy()))
#train_acc += torch.sum(prediction == labels)
#train_accs.append(train_acc/count)
#losses.append(np.mean(epoch_loss))
train_mse = np.sqrt(mean_squared_error(train_prediction,train_ground_truth))
print("Epoch", epoch, ", Loss:", epoch_loss, ", Train MSE:", train_mse)
model.eval()
valid_mse = 0.0
#valid_mse = []
valid_prediction=[]
valid_ground_truth =[]
with torch.set_grad_enabled(False):
for inputs, labels in validation_dataloader:
output = model(inputs)
valid_prediction = np.concatenate((valid_prediction,torch.flatten(output).detach().numpy()))
valid_ground_truth = np.concatenate((valid_ground_truth,labels.detach().numpy()))
#valid_MSEs[i].append(mean_squared_error(torch.flatten(output).detach().numpy(), labels.detach().numpy()))
#valid_MSEs[i].append(valid_mse)
valid_mse = np.sqrt(mean_squared_error(valid_prediction,valid_ground_truth))
avg_valid_mse+=valid_mse
print("Validation MSE: ", valid_mse)
# plt.plot(np.arange(len(valid_prediction)),valid_prediction,label="prediction")
# plt.plot(np.arange(len(valid_ground_truth)),valid_ground_truth,label="ground truth")
# plt.legend()
# plt.show()
# valid_prediction[i] = np.concatenate((valid_prediction[i],torch.flatten(output).detach().numpy()))
# valid_ground_truth[i] = np.concatenate((valid_ground_truth[i],labels.detach().numpy()))
#valid_acc += torch.sum(prediction == labels)
#valid_accs.append(valid_acc/len(X_valid)
i += 1
if i==cv:
test_prediction=[]
test_ground_truth =[]
test_mse = 0.0
with torch.set_grad_enabled(False):
for inputs, labels in test_dataloader:
output = model(inputs)
test_prediction = np.concatenate((test_prediction,torch.flatten(output).detach().numpy()))
test_ground_truth = np.concatenate((test_ground_truth,labels.detach().numpy()))
test_mse = np.sqrt(mean_squared_error(test_prediction,test_ground_truth))
print("Test MSE: ", test_mse)
plt.plot(np.arange(len(test_prediction)),test_prediction+mean_dailyCases,label="prediction")
plt.plot(np.arange(len(test_ground_truth)),test_ground_truth+mean_dailyCases,label="ground truth")
plt.title("Test data Prediction")
plt.legend()
plt.show()
print()
avg_valid_mse /= cv
county_avg_valid_MSEs.append(avg_valid_mse)
print("Average validation MSE:",avg_valid_mse)
print()
County: 06037
split 1 :
Epoch 0 , Loss: 6722.134765625 , Train MSE: 1701.1381417878724
Epoch 1 , Loss: 6671.0380859375 , Train MSE: 1688.5329965555404
Epoch 2 , Loss: 6601.95849609375 , Train MSE: 1671.2931944031006
Epoch 3 , Loss: 6531.9130859375 , Train MSE: 1653.8151126653838
Epoch 4 , Loss: 6463.395751953125 , Train MSE: 1636.6161159555909
Epoch 5 , Loss: 6393.43359375 , Train MSE: 1619.1453780952595
Epoch 6 , Loss: 6325.011962890625 , Train MSE: 1601.9813615518303
Epoch 7 , Loss: 6256.0811767578125 , Train MSE: 1584.7272499036603
Epoch 8 , Loss: 6187.3836669921875 , Train MSE: 1567.5064388553603
Epoch 9 , Loss: 6119.1929931640625 , Train MSE: 1550.4084098479796
Epoch 10 , Loss: 6051.36767578125 , Train MSE: 1533.3915751930717
Epoch 11 , Loss: 5983.8521728515625 , Train MSE: 1516.445969980852
Epoch 12 , Loss: 5916.656494140625 , Train MSE: 1499.5737280486683
Epoch 13 , Loss: 5849.7913818359375 , Train MSE: 1482.7772100348877
Epoch 14 , Loss: 5783.2677001953125 , Train MSE: 1466.0588529771576
Epoch 15 , Loss: 5717.0965576171875 , Train MSE: 1449.4211500182923
Epoch 16 , Loss: 5651.2901611328125 , Train MSE: 1432.8666661717887
Epoch 17 , Loss: 5585.8592529296875 , Train MSE: 1416.3980254978442
Epoch 18 , Loss: 5520.81689453125 , Train MSE: 1400.0179107189756
Epoch 19 , Loss: 5456.17431640625 , Train MSE: 1383.729055484174
Epoch 20 , Loss: 5391.945251464844 , Train MSE: 1367.5343004980655
Epoch 21 , Loss: 5328.14208984375 , Train MSE: 1351.4365373175622
Epoch 22 , Loss: 5264.777893066406 , Train MSE: 1335.4387588272518
Epoch 23 , Loss: 5201.8665771484375 , Train MSE: 1319.543987693626
Epoch 24 , Loss: 5139.421203613281 , Train MSE: 1303.7553090619035
Epoch 25 , Loss: 5077.456237792969 , Train MSE: 1288.075927164238
Epoch 26 , Loss: 5015.9854736328125 , Train MSE: 1272.5090909532514
Epoch 27 , Loss: 4955.0238037109375 , Train MSE: 1257.0581126222596
Epoch 28 , Loss: 4894.585388183594 , Train MSE: 1241.7263497494127
Epoch 29 , Loss: 4834.6849365234375 , Train MSE: 1226.517260831453
Epoch 30 , Loss: 4775.337219238281 , Train MSE: 1211.4343249176109
/shared-libs/python3.7/py-core/lib/python3.7/site-packages/ipykernel_launcher.py:21: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
/shared-libs/python3.7/py-core/lib/python3.7/site-packages/ipykernel_launcher.py:22: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
Epoch 31 , Loss: 4716.557434082031 , Train MSE: 1196.481100822985
Epoch 32 , Loss: 4658.360534667969 , Train MSE: 1181.6611709727458
Epoch 33 , Loss: 4600.761291503906 , Train MSE: 1166.9781580058782
Epoch 34 , Loss: 4543.7747802734375 , Train MSE: 1152.4357261408047
Epoch 35 , Loss: 4487.415832519531 , Train MSE: 1138.0375652690173
Epoch 36 , Loss: 4431.699279785156 , Train MSE: 1123.7873444719976
Epoch 37 , Loss: 4376.6392822265625 , Train MSE: 1109.6887875830284
Epoch 38 , Loss: 4322.2503662109375 , Train MSE: 1095.7456665458085
Epoch 39 , Loss: 4268.546936035156 , Train MSE: 1081.9616610308644
Epoch 40 , Loss: 4215.542419433594 , Train MSE: 1068.3404670481484
Epoch 41 , Loss: 4163.250061035156 , Train MSE: 1054.8857472560176
Epoch 42 , Loss: 4111.6822509765625 , Train MSE: 1041.6011084774384
Epoch 43 , Loss: 4060.85205078125 , Train MSE: 1028.490116325023
Epoch 44 , Loss: 4010.7708129882812 , Train MSE: 1015.5563516769158
Epoch 45 , Loss: 3961.4498901367188 , Train MSE: 1002.8032316789595
Epoch 46 , Loss: 3912.899169921875 , Train MSE: 990.234063072621
Epoch 47 , Loss: 3865.129150390625 , Train MSE: 977.8521959202778
Epoch 48 , Loss: 3818.1482543945312 , Train MSE: 965.6607472364527
Epoch 49 , Loss: 3771.9644775390625 , Train MSE: 953.6627360169314
Epoch 50 , Loss: 3726.5859375 , Train MSE: 941.8611957906743
Epoch 51 , Loss: 3682.0186767578125 , Train MSE: 930.258883883442
Epoch 52 , Loss: 3638.2684936523438 , Train MSE: 918.8584159375355
Epoch 53 , Loss: 3595.340087890625 , Train MSE: 907.6623003112852
Epoch 54 , Loss: 3553.2373046875 , Train MSE: 896.6729041303046
Epoch 55 , Loss: 3511.9627685546875 , Train MSE: 885.8923627772224
Epoch 56 , Loss: 3471.5189208984375 , Train MSE: 875.3226953104908
Epoch 57 , Loss: 3431.9071655273438 , Train MSE: 864.9657629816153
Epoch 58 , Loss: 3393.1273803710938 , Train MSE: 854.8231638898628
Epoch 59 , Loss: 3355.1793212890625 , Train MSE: 844.8964240727439
Epoch 60 , Loss: 3318.0620727539062 , Train MSE: 835.1868281401238
Epoch 61 , Loss: 3281.7732543945312 , Train MSE: 825.6954310520798
Epoch 62 , Loss: 3246.3099975585938 , Train MSE: 816.423128983407
Epoch 63 , Loss: 3211.6688842773438 , Train MSE: 807.3705763379049
Epoch 64 , Loss: 3177.8455810546875 , Train MSE: 798.5382852448494
Epoch 65 , Loss: 3144.8353881835938 , Train MSE: 789.9265315985043
Epoch 66 , Loss: 3112.6329345703125 , Train MSE: 781.5354750446327
Epoch 67 , Loss: 3081.232421875 , Train MSE: 773.3649724477488
Epoch 68 , Loss: 3050.6272583007812 , Train MSE: 765.4147818304607
Epoch 69 , Loss: 3020.8108520507812 , Train MSE: 757.6844096763809
Epoch 70 , Loss: 2991.775390625 , Train MSE: 750.1731018811427
Epoch 71 , Loss: 2963.5131225585938 , Train MSE: 742.879938840395
Epoch 72 , Loss: 2936.0153198242188 , Train MSE: 735.803698369092
Epoch 73 , Loss: 2909.2723999023438 , Train MSE: 728.9428148689474
Epoch 74 , Loss: 2883.2750854492188 , Train MSE: 722.2956965481154
Epoch 75 , Loss: 2858.0172119140625 , Train MSE: 715.8613324178888
Epoch 76 , Loss: 2833.3704223632812 , Train MSE: 709.6091890637654
Epoch 77 , Loss: 2809.5260009765625 , Train MSE: 703.5851380704767
Epoch 78 , Loss: 2786.3861694335938 , Train MSE: 697.7659776042569
Epoch 79 , Loss: 2763.9599609375 , Train MSE: 692.1540451452152
Epoch 80 , Loss: 2742.1539916992188 , Train MSE: 686.725902870012
Epoch 81 , Loss: 2721.0094604492188 , Train MSE: 681.4913991880547
Epoch 82 , Loss: 2700.4384765625 , Train MSE: 676.427536379714
Epoch 83 , Loss: 2680.2371215820312 , Train MSE: 671.48044035732
Epoch 84 , Loss: 2660.0042114257812 , Train MSE: 666.5424533158578
Epoch 85 , Loss: 2642.56103515625 , Train MSE: 662.3570458755429
Epoch 86 , Loss: 2621.921875 , Train MSE: 657.3378169228447
Epoch 87 , Loss: 2609.7763061523438 , Train MSE: 654.5878247987855
Epoch 88 , Loss: 2592.4317626953125 , Train MSE: 650.4898795750715
Epoch 89 , Loss: 2576.3712768554688 , Train MSE: 646.7397477656785
Epoch 90 , Loss: 2560.941650390625 , Train MSE: 643.1692173203926
Epoch 91 , Loss: 2546.0985107421875 , Train MSE: 639.7659510608763
Epoch 92 , Loss: 2531.810791015625 , Train MSE: 636.5207691400208
Epoch 93 , Loss: 2518.0423583984375 , Train MSE: 633.4226194246437
Epoch 94 , Loss: 2504.7183227539062 , Train MSE: 630.4487828160869
Epoch 95 , Loss: 2491.5435180664062 , Train MSE: 627.5118077128038
Epoch 96 , Loss: 2476.0307006835938 , Train MSE: 623.878511623723
Epoch 97 , Loss: 2469.4915771484375 , Train MSE: 622.85473857397
Epoch 98 , Loss: 2446.1803588867188 , Train MSE: 617.0332164289925
Epoch 99 , Loss: 2444.1781005859375 , Train MSE: 617.460519415918
Validation MSE: 8819.122081804027
split 2 :
Epoch 0 , Loss: 35166.81066894531 , Train MSE: 5632.475578136789
Epoch 1 , Loss: 35163.93908691406 , Train MSE: 5633.367463031182
Epoch 2 , Loss: 35157.76574707031 , Train MSE: 5634.596810917998
Epoch 3 , Loss: 35134.389892578125 , Train MSE: 5633.4461446172045
Epoch 4 , Loss: 35123.07629394531 , Train MSE: 5634.03555715442
Epoch 5 , Loss: 35070.69958496094 , Train MSE: 5630.919142359271
Epoch 6 , Loss: 35030.60827636719 , Train MSE: 5629.210629255101
Epoch 7 , Loss: 34989.76403808594 , Train MSE: 5627.870143832739
Epoch 8 , Loss: 34947.073486328125 , Train MSE: 5626.53319063484
Epoch 9 , Loss: 34906.388671875 , Train MSE: 5625.3476017229705
Epoch 10 , Loss: 34923.66027832031 , Train MSE: 5629.174448992664
Epoch 11 , Loss: 34953.04113769531 , Train MSE: 5633.964719738975
Epoch 12 , Loss: 34811.8876953125 , Train MSE: 5624.047917222561
Epoch 13 , Loss: 34772.41174316406 , Train MSE: 5622.978757128889
Epoch 14 , Loss: 34732.986083984375 , Train MSE: 5621.88474557443
Epoch 15 , Loss: 34693.15148925781 , Train MSE: 5620.67205807264
Epoch 16 , Loss: 34653.071533203125 , Train MSE: 5619.392672112995
Epoch 17 , Loss: 34613.002685546875 , Train MSE: 5618.1131962368645
Epoch 18 , Loss: 34572.65173339844 , Train MSE: 5616.766206529663
Epoch 19 , Loss: 34532.394287109375 , Train MSE: 5615.45275118171
Epoch 20 , Loss: 34492.80969238281 , Train MSE: 5614.308849009198
Epoch 21 , Loss: 34453.651611328125 , Train MSE: 5613.258848854634
Epoch 22 , Loss: 34414.7236328125 , Train MSE: 5612.246597553075
Epoch 23 , Loss: 34375.977294921875 , Train MSE: 5611.258953837103
Epoch 24 , Loss: 34337.403564453125 , Train MSE: 5610.29128035729
Epoch 25 , Loss: 34298.992919921875 , Train MSE: 5609.341431325323
Epoch 26 , Loss: 34260.74768066406 , Train MSE: 5608.408238250448
Epoch 27 , Loss: 34222.664306640625 , Train MSE: 5607.4909778693545
Epoch 28 , Loss: 34184.74499511719 , Train MSE: 5606.589159300175
Epoch 29 , Loss: 34146.98815917969 , Train MSE: 5605.702437206581
Epoch 30 , Loss: 34109.3974609375 , Train MSE: 5604.830539447181
Epoch 31 , Loss: 34071.97448730469 , Train MSE: 5603.973241450193
Epoch 32 , Loss: 34034.71691894531 , Train MSE: 5603.130344120989
Epoch 33 , Loss: 33997.630615234375 , Train MSE: 5602.301688655128
Epoch 34 , Loss: 33960.71154785156 , Train MSE: 5601.487093559334
Epoch 35 , Loss: 33923.964111328125 , Train MSE: 5600.686266815313
Epoch 36 , Loss: 33887.4775390625 , Train MSE: 5599.914075015966
Epoch 37 , Loss: 33850.83154296875 , Train MSE: 5599.078906009487
Epoch 38 , Loss: 33814.61047363281 , Train MSE: 5598.3209816055
Epoch 39 , Loss: 33778.56774902344 , Train MSE: 5597.576368949341
Epoch 40 , Loss: 33742.70544433594 , Train MSE: 5596.844969940671
Epoch 41 , Loss: 33707.021484375 , Train MSE: 5596.12667421358
Epoch 42 , Loss: 33671.52197265625 , Train MSE: 5595.421369444715
Epoch 43 , Loss: 33636.20703125 , Train MSE: 5594.728954754646
Epoch 44 , Loss: 33601.07995605469 , Train MSE: 5594.0493083079145
Epoch 45 , Loss: 33566.137939453125 , Train MSE: 5593.382314824419
Epoch 46 , Loss: 33531.3876953125 , Train MSE: 5592.727863275114
Epoch 47 , Loss: 33496.828369140625 , Train MSE: 5592.085831875465
Epoch 48 , Loss: 33462.46203613281 , Train MSE: 5591.456106184977
Epoch 49 , Loss: 33428.29113769531 , Train MSE: 5590.8385643436595
Epoch 50 , Loss: 33394.31677246094 , Train MSE: 5590.233078222904
Epoch 51 , Loss: 33360.54333496094 , Train MSE: 5589.639529434673
Epoch 52 , Loss: 33326.968017578125 , Train MSE: 5589.057785258935
Epoch 53 , Loss: 33293.59729003906 , Train MSE: 5588.487719427303
Epoch 54 , Loss: 33260.42932128906 , Train MSE: 5587.9291982788245
Epoch 55 , Loss: 33227.46813964844 , Train MSE: 5587.382097351974
Epoch 56 , Loss: 33194.7158203125 , Train MSE: 5586.846276554015
Epoch 57 , Loss: 33162.17590332031 , Train MSE: 5586.32159409529
Epoch 58 , Loss: 33129.846923828125 , Train MSE: 5585.807921695444
Epoch 59 , Loss: 33097.732666015625 , Train MSE: 5585.305109510576
Epoch 60 , Loss: 33065.83703613281 , Train MSE: 5584.813022660995
Epoch 61 , Loss: 33034.15637207031 , Train MSE: 5584.331502161079
Epoch 62 , Loss: 33002.701599121094 , Train MSE: 5583.860398386225
Epoch 63 , Loss: 32971.46472167969 , Train MSE: 5583.399517015949
Epoch 64 , Loss: 32940.43933105469 , Train MSE: 5582.948378658938
Epoch 65 , Loss: 32908.79553222656 , Train MSE: 5582.487060790078
Epoch 66 , Loss: 32878.88397216797 , Train MSE: 5582.075765647678
Epoch 67 , Loss: 32847.77764892578 , Train MSE: 5581.6396803876705
Epoch 68 , Loss: 32817.224182128906 , Train MSE: 5581.221201829968
Epoch 69 , Loss: 32786.96405029297 , Train MSE: 5580.813863172067
Epoch 70 , Loss: 32756.963317871094 , Train MSE: 5580.416590361213
Epoch 71 , Loss: 32727.209106445312 , Train MSE: 5580.0288988436205
Epoch 72 , Loss: 32697.700134277344 , Train MSE: 5579.650446954254
Epoch 73 , Loss: 32668.434814453125 , Train MSE: 5579.280956342891
Epoch 74 , Loss: 32639.414123535156 , Train MSE: 5578.920198252161
Epoch 75 , Loss: 32610.64276123047 , Train MSE: 5578.567956391752
Epoch 76 , Loss: 32582.119140625 , Train MSE: 5578.224021384297
Epoch 77 , Loss: 32553.84600830078 , Train MSE: 5577.888209848328
Epoch 78 , Loss: 32525.830505371094 , Train MSE: 5577.560318520648
Epoch 79 , Loss: 32498.069396972656 , Train MSE: 5577.240154471195
Epoch 80 , Loss: 32470.571044921875 , Train MSE: 5576.927520530215
Epoch 81 , Loss: 32443.336364746094 , Train MSE: 5576.622212478501
Epoch 82 , Loss: 32416.368408203125 , Train MSE: 5576.32404037309
Epoch 83 , Loss: 32389.671813964844 , Train MSE: 5576.032804985668
Epoch 84 , Loss: 32363.24786376953 , Train MSE: 5575.7482873842855
Epoch 85 , Loss: 32337.101684570312 , Train MSE: 5575.470286968748
Epoch 86 , Loss: 32311.236755371094 , Train MSE: 5575.198595390657
Epoch 87 , Loss: 32285.657958984375 , Train MSE: 5574.932997207864
Epoch 88 , Loss: 32260.3642578125 , Train MSE: 5574.67326389599
Epoch 89 , Loss: 32235.363891601562 , Train MSE: 5574.419179133298
Epoch 90 , Loss: 32210.6572265625 , Train MSE: 5574.170511380514
Epoch 91 , Loss: 32186.25067138672 , Train MSE: 5573.927039609206
Epoch 92 , Loss: 32162.143310546875 , Train MSE: 5573.688523384783
Epoch 93 , Loss: 32138.343688964844 , Train MSE: 5573.454733712658
Epoch 94 , Loss: 32114.85223388672 , Train MSE: 5573.225417112353
Epoch 95 , Loss: 32091.673278808594 , Train MSE: 5573.000335071154
Epoch 96 , Loss: 32068.809997558594 , Train MSE: 5572.779247797128
Epoch 97 , Loss: 32046.265747070312 , Train MSE: 5572.561887677294
Epoch 98 , Loss: 32024.04229736328 , Train MSE: 5572.348005520813
Epoch 99 , Loss: 32002.145874023438 , Train MSE: 5572.137343739504
Validation MSE: 6116.159422512414
split 3 :
Epoch 0 , Loss: 48646.02722167969 , Train MSE: 5710.282006300173
Epoch 1 , Loss: 48620.83239746094 , Train MSE: 5711.522349832651
Epoch 2 , Loss: 48536.00256347656 , Train MSE: 5708.890096376984
Epoch 3 , Loss: 48428.331298828125 , Train MSE: 5706.319661781275
Epoch 4 , Loss: 48323.24841308594 , Train MSE: 5704.387767449407
Epoch 5 , Loss: 48218.957275390625 , Train MSE: 5702.611881793227
Epoch 6 , Loss: 48112.99304199219 , Train MSE: 5700.786819619084
Epoch 7 , Loss: 48006.35400390625 , Train MSE: 5699.029610428479
Epoch 8 , Loss: 47962.21838378906 , Train MSE: 5701.395766280326
Epoch 9 , Loss: 47846.63293457031 , Train MSE: 5696.4672310969445
Epoch 10 , Loss: 47732.87390136719 , Train MSE: 5695.115616206496
Epoch 11 , Loss: 47736.292236328125 , Train MSE: 5699.963078373251
Epoch 12 , Loss: 47581.40930175781 , Train MSE: 5692.495823951202
Epoch 13 , Loss: 47442.67224121094 , Train MSE: 5690.175242620039
Epoch 14 , Loss: 47330.591796875 , Train MSE: 5688.398794828483
Epoch 15 , Loss: 47526.16809082031 , Train MSE: 5702.193907307089
Epoch 16 , Loss: 47186.80212402344 , Train MSE: 5687.315501486913
Epoch 17 , Loss: 47061.99328613281 , Train MSE: 5685.184056930799
Epoch 18 , Loss: 46961.75134277344 , Train MSE: 5683.43110590791
Epoch 19 , Loss: 46863.74072265625 , Train MSE: 5682.040958665741
Epoch 20 , Loss: 46766.82824707031 , Train MSE: 5680.69318841828
Epoch 21 , Loss: 46670.61279296875 , Train MSE: 5679.401179613185
Epoch 22 , Loss: 46575.04455566406 , Train MSE: 5678.150888136849
Epoch 23 , Loss: 46480.1005859375 , Train MSE: 5676.936933232441
Epoch 24 , Loss: 46385.78527832031 , Train MSE: 5675.7571324522505
Epoch 25 , Loss: 46292.09753417969 , Train MSE: 5674.610062638806
Epoch 26 , Loss: 46199.052490234375 , Train MSE: 5673.494985655321
Epoch 27 , Loss: 46106.650390625 , Train MSE: 5672.4112696189495
Epoch 28 , Loss: 46014.90478515625 , Train MSE: 5671.358623502693
Epoch 29 , Loss: 45923.82373046875 , Train MSE: 5670.336891666638
Epoch 30 , Loss: 45833.42614746094 , Train MSE: 5669.346630252703
Epoch 31 , Loss: 45743.868408203125 , Train MSE: 5668.391023500269
Epoch 32 , Loss: 45654.88800048828 , Train MSE: 5667.457440654021
Epoch 33 , Loss: 45566.54962158203 , Train MSE: 5666.550859076933
Epoch 34 , Loss: 45477.674255371094 , Train MSE: 5665.670322260261
Epoch 35 , Loss: 45474.70861816406 , Train MSE: 5666.667943552683
Epoch 36 , Loss: 45318.49493408203 , Train MSE: 5664.402350435038
Epoch 37 , Loss: 45225.81024169922 , Train MSE: 5663.347721437806
Epoch 38 , Loss: 45140.065185546875 , Train MSE: 5662.537894284004
Epoch 39 , Loss: 45056.097900390625 , Train MSE: 5661.733531864169
Epoch 40 , Loss: 44973.17840576172 , Train MSE: 5660.982734961548
Epoch 41 , Loss: 44891.16534423828 , Train MSE: 5660.265116253645
Epoch 42 , Loss: 44810.044677734375 , Train MSE: 5659.575524934493
Epoch 43 , Loss: 44729.82452392578 , Train MSE: 5658.911564003626
Epoch 44 , Loss: 44650.518615722656 , Train MSE: 5658.271730812074
Epoch 45 , Loss: 44572.14538574219 , Train MSE: 5657.654910307584
Epoch 46 , Loss: 44494.723083496094 , Train MSE: 5657.060150947574
Epoch 47 , Loss: 44418.27209472656 , Train MSE: 5656.486588203755
Epoch 48 , Loss: 44342.81610107422 , Train MSE: 5655.933408324264
Epoch 49 , Loss: 44268.37225341797 , Train MSE: 5655.399810067257
Epoch 50 , Loss: 44194.96533203125 , Train MSE: 5654.884981116123
Epoch 51 , Loss: 44122.61798095703 , Train MSE: 5654.3881128404455
Epoch 52 , Loss: 44051.354919433594 , Train MSE: 5653.9083858100585
Epoch 53 , Loss: 43981.19982910156 , Train MSE: 5653.444972204202
Epoch 54 , Loss: 43912.176330566406 , Train MSE: 5652.99699942771
Epoch 55 , Loss: 43844.308837890625 , Train MSE: 5652.563588743662
Epoch 56 , Loss: 43777.624938964844 , Train MSE: 5652.143827225812
Epoch 57 , Loss: 43712.14636230469 , Train MSE: 5651.736792284843
Epoch 58 , Loss: 43647.897216796875 , Train MSE: 5651.341508374392
Epoch 59 , Loss: 43584.90539550781 , Train MSE: 5650.957000721419
Epoch 60 , Loss: 43523.188232421875 , Train MSE: 5650.58225104754
Epoch 61 , Loss: 43462.77331542969 , Train MSE: 5650.216215317952
Epoch 62 , Loss: 43403.68273925781 , Train MSE: 5649.8578470338225
Epoch 63 , Loss: 43345.935302734375 , Train MSE: 5649.5060515726045
Epoch 64 , Loss: 43289.552734375 , Train MSE: 5649.1597289461615
Epoch 65 , Loss: 43234.55065917969 , Train MSE: 5648.817749981887
Epoch 66 , Loss: 43180.94982910156 , Train MSE: 5648.478981210318
Epoch 67 , Loss: 43128.760803222656 , Train MSE: 5648.142263083787
Epoch 68 , Loss: 43077.99621582031 , Train MSE: 5647.806440229039
Epoch 69 , Loss: 43028.666015625 , Train MSE: 5647.470346735851
Epoch 70 , Loss: 42980.774658203125 , Train MSE: 5647.132823291406
Epoch 71 , Loss: 42934.331298828125 , Train MSE: 5646.792704475715
Epoch 72 , Loss: 42889.332580566406 , Train MSE: 5646.448842356119
Epoch 73 , Loss: 42845.776916503906 , Train MSE: 5646.1000997083365
Epoch 74 , Loss: 42803.65899658203 , Train MSE: 5645.7453616284165
Epoch 75 , Loss: 42762.966735839844 , Train MSE: 5645.383554387107
Epoch 76 , Loss: 42723.69201660156 , Train MSE: 5645.013614588392
Epoch 77 , Loss: 42685.817626953125 , Train MSE: 5644.634531084252
Epoch 78 , Loss: 42649.319091796875 , Train MSE: 5644.245329595344
Epoch 79 , Loss: 42614.179595947266 , Train MSE: 5643.845090862661
Epoch 80 , Loss: 42580.369384765625 , Train MSE: 5643.4329425144315
Epoch 81 , Loss: 42547.858459472656 , Train MSE: 5643.0080767184
Epoch 82 , Loss: 42516.61520385742 , Train MSE: 5642.5697407622565
Epoch 83 , Loss: 42486.60534667969 , Train MSE: 5642.117242786939
Epoch 84 , Loss: 42457.79229736328 , Train MSE: 5641.649955134908
Epoch 85 , Loss: 42430.132720947266 , Train MSE: 5641.16732367399
Epoch 86 , Loss: 42403.584381103516 , Train MSE: 5640.668862392957
Epoch 87 , Loss: 42378.1064453125 , Train MSE: 5640.1541355100835
Epoch 88 , Loss: 42353.64569091797 , Train MSE: 5639.62275927143
Epoch 89 , Loss: 42330.15051269531 , Train MSE: 5639.074407278279
Epoch 90 , Loss: 42307.55383300781 , Train MSE: 5638.508724071513
Epoch 91 , Loss: 42285.749267578125 , Train MSE: 5637.925209110827
Epoch 92 , Loss: 42264.47967529297 , Train MSE: 5637.322536365843
Epoch 93 , Loss: 42243.52996826172 , Train MSE: 5636.701135405468
Epoch 94 , Loss: 42224.108978271484 , Train MSE: 5636.077366416923
Epoch 95 , Loss: 42205.45852661133 , Train MSE: 5635.435155159592
Epoch 96 , Loss: 42188.03726196289 , Train MSE: 5634.78458603664
Epoch 97 , Loss: 42170.9079284668 , Train MSE: 5634.103675035093
Epoch 98 , Loss: 42155.150939941406 , Train MSE: 5633.429670731579
Epoch 99 , Loss: 42137.492095947266 , Train MSE: 5632.697051592046
Validation MSE: 699.0602199319095
split 4 :
Epoch 0 , Loss: 58041.55029296875 , Train MSE: 5082.795701175495
Epoch 1 , Loss: 57847.07946777344 , Train MSE: 5082.972920268925
Epoch 2 , Loss: 57513.34509277344 , Train MSE: 5075.710752959831
Epoch 3 , Loss: 57142.24157714844 , Train MSE: 5066.9367711226205
Epoch 4 , Loss: 56786.54162597656 , Train MSE: 5060.862660022097
Epoch 5 , Loss: 56436.127197265625 , Train MSE: 5054.92137879799
Epoch 6 , Loss: 56080.79138183594 , Train MSE: 5048.917306558129
Epoch 7 , Loss: 55737.255126953125 , Train MSE: 5043.564996910378
Epoch 8 , Loss: 56450.99719238281 , Train MSE: 5142.869769248034
Epoch 9 , Loss: 55362.00598144531 , Train MSE: 5060.429937948283
Epoch 10 , Loss: 54739.96276855469 , Train MSE: 5030.312753467736
Epoch 11 , Loss: 54312.42822265625 , Train MSE: 5022.653485019178
Epoch 12 , Loss: 53962.09069824219 , Train MSE: 5017.83115218298
Epoch 13 , Loss: 53755.24719238281 , Train MSE: 5021.057767491568
Epoch 14 , Loss: 53254.130126953125 , Train MSE: 5008.473228904538
Epoch 15 , Loss: 52898.199768066406 , Train MSE: 5004.041140222237
Epoch 16 , Loss: 52878.67956542969 , Train MSE: 5016.168614275942
Epoch 17 , Loss: 52298.740295410156 , Train MSE: 4997.380796038573
Epoch 18 , Loss: 51980.283264160156 , Train MSE: 4993.914058410966
Epoch 19 , Loss: 51639.560791015625 , Train MSE: 4990.304142810683
Epoch 20 , Loss: 51306.91961669922 , Train MSE: 4986.9037798449235
Epoch 21 , Loss: 50981.63555908203 , Train MSE: 4983.777250312847
Epoch 22 , Loss: 50662.677978515625 , Train MSE: 4980.861759438192
Epoch 23 , Loss: 50348.744689941406 , Train MSE: 4978.138298684566
Epoch 24 , Loss: 50040.07354736328 , Train MSE: 4975.602874825375
Epoch 25 , Loss: 49736.916931152344 , Train MSE: 4973.251423274711
Epoch 26 , Loss: 49439.541076660156 , Train MSE: 4971.07959596366
Epoch 27 , Loss: 49148.22937011719 , Train MSE: 4969.082943215729
Epoch 28 , Loss: 48863.28747558594 , Train MSE: 4967.256646895843
Epoch 29 , Loss: 48585.04724121094 , Train MSE: 4965.595542757992
Epoch 30 , Loss: 48313.86145019531 , Train MSE: 4964.094014007805
Epoch 31 , Loss: 48050.09948730469 , Train MSE: 4962.746046549891
Epoch 32 , Loss: 47794.166259765625 , Train MSE: 4961.545087771907
Epoch 33 , Loss: 47546.47235107422 , Train MSE: 4960.484014006723
Epoch 34 , Loss: 47307.4423828125 , Train MSE: 4959.555188955916
Epoch 35 , Loss: 47077.50372314453 , Train MSE: 4958.750267775142
Epoch 36 , Loss: 46857.06851196289 , Train MSE: 4958.060337164908
Epoch 37 , Loss: 48077.929290771484 , Train MSE: 5047.1197463959925
Epoch 38 , Loss: 47753.75564575195 , Train MSE: 5086.566099083461
Epoch 39 , Loss: 47231.282135009766 , Train MSE: 5068.966833518937
Epoch 40 , Loss: 46207.66125488281 , Train MSE: 4959.851922930204
Epoch 41 , Loss: 45991.277435302734 , Train MSE: 4956.292099147311
Epoch 42 , Loss: 46281.380767822266 , Train MSE: 4972.314081126752
Epoch 43 , Loss: 45662.88800048828 , Train MSE: 4955.625417480312
Epoch 44 , Loss: 45511.318267822266 , Train MSE: 4955.465232932413
Epoch 45 , Loss: 45370.51773071289 , Train MSE: 4955.3275219455445
Epoch 46 , Loss: 45240.248046875 , Train MSE: 4955.22670109073
Epoch 47 , Loss: 45119.92367553711 , Train MSE: 4955.140668079879
Epoch 48 , Loss: 45008.98666381836 , Train MSE: 4955.061755070649
Epoch 49 , Loss: 44906.821685791016 , Train MSE: 4954.98334550622
Epoch 50 , Loss: 44812.78991699219 , Train MSE: 4954.899704607079
Epoch 51 , Loss: 44726.2341003418 , Train MSE: 4954.805974012957
Epoch 52 , Loss: 44646.5153503418 , Train MSE: 4954.698666067055
Epoch 53 , Loss: 44573.023681640625 , Train MSE: 4954.5746682159715
Epoch 54 , Loss: 44505.17010498047 , Train MSE: 4954.431431523994
Epoch 55 , Loss: 44442.41683959961 , Train MSE: 4954.2670823709395
Epoch 56 , Loss: 44384.26937866211 , Train MSE: 4954.080255435835
Epoch 57 , Loss: 44330.27536010742 , Train MSE: 4953.869939370137
Epoch 58 , Loss: 44280.035247802734 , Train MSE: 4953.635441637687
Epoch 59 , Loss: 44233.185119628906 , Train MSE: 4953.376274390676
Epoch 60 , Loss: 44189.39781188965 , Train MSE: 4953.09211009451
Epoch 61 , Loss: 44148.38917541504 , Train MSE: 4952.782718778002
Epoch 62 , Loss: 44109.89813232422 , Train MSE: 4952.4477937112415
Epoch 63 , Loss: 44073.67492675781 , Train MSE: 4952.086779628543
Epoch 64 , Loss: 44039.435775756836 , Train MSE: 4951.6976270696905
Epoch 65 , Loss: 44006.42449951172 , Train MSE: 4951.265410916186
Epoch 66 , Loss: 43952.29670715332 , Train MSE: 4950.551117179917
Epoch 67 , Loss: 43922.67875671387 , Train MSE: 4950.022638735794
Epoch 68 , Loss: 43897.3992767334 , Train MSE: 4949.478088490162
Epoch 69 , Loss: 43871.21327209473 , Train MSE: 4948.851573578892
Epoch 70 , Loss: 43847.042572021484 , Train MSE: 4948.254364483733
Epoch 71 , Loss: 43823.99853515625 , Train MSE: 4947.631380411731
Epoch 72 , Loss: 43801.934799194336 , Train MSE: 4946.9813513516265
Epoch 73 , Loss: 43780.455001831055 , Train MSE: 4946.3011468194945
Epoch 74 , Loss: 43757.49565124512 , Train MSE: 4945.558169913879
Epoch 75 , Loss: 43742.842277526855 , Train MSE: 4944.814500313038
Epoch 76 , Loss: 43724.77233123779 , Train MSE: 4944.077345381563
Epoch 77 , Loss: 43707.24814605713 , Train MSE: 4943.306108664199
Epoch 78 , Loss: 43690.427154541016 , Train MSE: 4942.509333048099
Epoch 79 , Loss: 43674.250411987305 , Train MSE: 4941.687230266217
Epoch 80 , Loss: 43658.654609680176 , Train MSE: 4940.840178889649
Epoch 81 , Loss: 43643.58218383789 , Train MSE: 4939.968626901841
Epoch 82 , Loss: 43628.9782409668 , Train MSE: 4939.073199239256
Epoch 83 , Loss: 43614.78884887695 , Train MSE: 4938.154609467447
Epoch 84 , Loss: 43600.968688964844 , Train MSE: 4937.213704233863
Epoch 85 , Loss: 43587.46936035156 , Train MSE: 4936.251373562753
Epoch 86 , Loss: 43574.24993133545 , Train MSE: 4935.2686446701755
Epoch 87 , Loss: 43561.27298736572 , Train MSE: 4934.266567275293
Epoch 88 , Loss: 43548.50785064697 , Train MSE: 4933.246234741238
Epoch 89 , Loss: 43535.92102813721 , Train MSE: 4932.208770210927
Epoch 90 , Loss: 43523.490661621094 , Train MSE: 4931.155298530778
Epoch 91 , Loss: 43511.1916809082 , Train MSE: 4930.086916230105
Epoch 92 , Loss: 43499.00606536865 , Train MSE: 4929.004710175644
Epoch 93 , Loss: 43486.91594696045 , Train MSE: 4927.909728353735
Epoch 94 , Loss: 43474.91234588623 , Train MSE: 4926.8029943005495
Epoch 95 , Loss: 43462.97880554199 , Train MSE: 4925.685451494412
Epoch 96 , Loss: 43451.10668182373 , Train MSE: 4924.5580462980315
Epoch 97 , Loss: 43439.28844451904 , Train MSE: 4923.421649807321
Epoch 98 , Loss: 43427.51988983154 , Train MSE: 4922.277091077544
Epoch 99 , Loss: 43415.79034423828 , Train MSE: 4921.125095031375
Validation MSE: 388.1347299331429
split 5 :
Epoch 0 , Loss: 65389.32373046875 , Train MSE: 4681.089649910336
Epoch 1 , Loss: 64974.43298339844 , Train MSE: 4678.257219090854
Epoch 2 , Loss: 64418.12634277344 , Train MSE: 4666.3147976396895
Epoch 3 , Loss: 63731.3349609375 , Train MSE: 4648.96562321443
Epoch 4 , Loss: 63066.61926269531 , Train MSE: 4636.211486844144
Epoch 5 , Loss: 62447.42053222656 , Train MSE: 4624.547266605329
Epoch 6 , Loss: 61787.67578125 , Train MSE: 4613.10133145754
Epoch 7 , Loss: 61157.1669921875 , Train MSE: 4602.263967525246
Epoch 8 , Loss: 60520.31530761719 , Train MSE: 4591.610179075961
Epoch 9 , Loss: 59894.07568359375 , Train MSE: 4581.690465799002
Epoch 10 , Loss: 59273.14611816406 , Train MSE: 4572.239895009964
Epoch 11 , Loss: 58655.94055175781 , Train MSE: 4563.2122839856265
Epoch 12 , Loss: 58044.45593261719 , Train MSE: 4554.645072642315
Epoch 13 , Loss: 57717.12854003906 , Train MSE: 4550.6638250472715
Epoch 14 , Loss: 56897.16229248047 , Train MSE: 4539.512849077746
Epoch 15 , Loss: 56431.01025390625 , Train MSE: 4533.745507702379
Epoch 16 , Loss: 55863.18737792969 , Train MSE: 4527.226364779818
Epoch 17 , Loss: 56200.09973144531 , Train MSE: 4562.759125305162
Epoch 18 , Loss: 54763.42352294922 , Train MSE: 4515.826696729782
Epoch 19 , Loss: 54102.54089355469 , Train MSE: 4508.834089564076
Epoch 20 , Loss: 53461.06512451172 , Train MSE: 4502.969446131291
Epoch 21 , Loss: 53212.57580566406 , Train MSE: 4515.6313522895
Epoch 22 , Loss: 53273.42156982422 , Train MSE: 4564.515676589681
Epoch 23 , Loss: 53011.03894042969 , Train MSE: 4585.04135009089
Epoch 24 , Loss: 52504.92135620117 , Train MSE: 4586.129547634436
Epoch 25 , Loss: 52038.77328491211 , Train MSE: 4583.087472750394
Epoch 26 , Loss: 51619.5185546875 , Train MSE: 4583.162258028816
Epoch 27 , Loss: 51216.72286987305 , Train MSE: 4583.513784315083
Epoch 28 , Loss: 50828.17706298828 , Train MSE: 4584.199190430927
Epoch 29 , Loss: 50454.99612426758 , Train MSE: 4584.648084993405
Epoch 30 , Loss: 50105.042877197266 , Train MSE: 4585.520070329251
Epoch 31 , Loss: 49764.74591064453 , Train MSE: 4586.462016764621
Epoch 32 , Loss: 49458.15791320801 , Train MSE: 4587.663387217742
Epoch 33 , Loss: 49163.055908203125 , Train MSE: 4588.799363215583
Epoch 34 , Loss: 48897.49028015137 , Train MSE: 4590.031832446318
Epoch 35 , Loss: 48764.82131958008 , Train MSE: 4591.307008913239
Epoch 36 , Loss: 48637.119537353516 , Train MSE: 4595.486357304411
Epoch 37 , Loss: 48269.199615478516 , Train MSE: 4593.080700940168
Epoch 38 , Loss: 48079.75341796875 , Train MSE: 4594.254921403542
Epoch 39 , Loss: 47904.888244628906 , Train MSE: 4595.408597050498
Epoch 40 , Loss: 47750.12048339844 , Train MSE: 4596.566934218354
Epoch 41 , Loss: 47608.69258117676 , Train MSE: 4597.658226437985
Epoch 42 , Loss: 47485.70985412598 , Train MSE: 4598.705252708351
Epoch 43 , Loss: 48294.6181640625 , Train MSE: 4604.818419416821
Epoch 44 , Loss: 47275.947509765625 , Train MSE: 4600.524625945248
Epoch 45 , Loss: 47176.25244140625 , Train MSE: 4601.252089969276
Epoch 46 , Loss: 47085.564224243164 , Train MSE: 4601.914257415785
Epoch 47 , Loss: 47000.12113952637 , Train MSE: 4602.467993643783
Epoch 48 , Loss: 50492.9723815918 , Train MSE: 4626.676566361614
Epoch 49 , Loss: 50068.634857177734 , Train MSE: 4626.281915754517
Epoch 50 , Loss: 49864.247802734375 , Train MSE: 4627.960317977344
Epoch 51 , Loss: 49676.8454284668 , Train MSE: 4629.659364328726
Epoch 52 , Loss: 49502.03479003906 , Train MSE: 4631.323179311075
Epoch 53 , Loss: 49339.29025268555 , Train MSE: 4632.9402401021425
Epoch 54 , Loss: 49187.96969604492 , Train MSE: 4634.501321280027
Epoch 55 , Loss: 49047.32681274414 , Train MSE: 4635.999411936835
Epoch 56 , Loss: 48916.57229614258 , Train MSE: 4637.429476959606
Epoch 57 , Loss: 48794.90188598633 , Train MSE: 4638.788321624985
Epoch 58 , Loss: 48681.518646240234 , Train MSE: 4640.074107688453
Epoch 59 , Loss: 48575.67349243164 , Train MSE: 4641.286455927517
Epoch 60 , Loss: 48476.664459228516 , Train MSE: 4642.4256719361965
Epoch 61 , Loss: 48383.85043334961 , Train MSE: 4643.492792541412
Epoch 62 , Loss: 48296.65368652344 , Train MSE: 4644.489200646853
Epoch 63 , Loss: 48214.5616607666 , Train MSE: 4645.416514471978
Epoch 64 , Loss: 48137.12040710449 , Train MSE: 4646.276305173721
Epoch 65 , Loss: 48063.93632507324 , Train MSE: 4647.070093177786
Epoch 66 , Loss: 47994.669036865234 , Train MSE: 4647.799224760692
Epoch 67 , Loss: 47929.02258300781 , Train MSE: 4648.464696177787
Epoch 68 , Loss: 47866.75085449219 , Train MSE: 4649.067321029809
Epoch 69 , Loss: 47807.64636230469 , Train MSE: 4649.607304767711
Epoch 70 , Loss: 47751.53643798828 , Train MSE: 4650.084537967912
Epoch 71 , Loss: 47698.27767944336 , Train MSE: 4650.498352552154
Epoch 72 , Loss: 47647.753829956055 , Train MSE: 4650.847670989765
Epoch 73 , Loss: 47599.86836242676 , Train MSE: 4651.130883304245
Epoch 74 , Loss: 47554.53887939453 , Train MSE: 4651.345796885548
Epoch 75 , Loss: 47511.68312072754 , Train MSE: 4651.489967041774
Epoch 76 , Loss: 47471.220626831055 , Train MSE: 4651.560771956862
Epoch 77 , Loss: 47433.05495452881 , Train MSE: 4651.555294878875
Epoch 78 , Loss: 47397.072273254395 , Train MSE: 4651.471132631679
Epoch 79 , Loss: 47363.136375427246 , Train MSE: 4651.306091257717
Epoch 80 , Loss: 47331.07627105713 , Train MSE: 4651.058827684568
Epoch 81 , Loss: 47300.67459106445 , Train MSE: 4650.728607568009
Epoch 82 , Loss: 47271.536460876465 , Train MSE: 4650.314557288987
Epoch 83 , Loss: 47243.02356719971 , Train MSE: 4649.820727736488
Epoch 84 , Loss: 47216.84126281738 , Train MSE: 4649.259723305005
Epoch 85 , Loss: 47192.00089263916 , Train MSE: 4648.636381347263
Epoch 86 , Loss: 47169.211570739746 , Train MSE: 4647.937070465814
Epoch 87 , Loss: 47145.59048461914 , Train MSE: 4647.153587571082
Epoch 88 , Loss: 47121.98513793945 , Train MSE: 4646.303683214854
Epoch 89 , Loss: 47099.97679901123 , Train MSE: 4645.428383990908
Epoch 90 , Loss: 47079.21839141846 , Train MSE: 4644.483257771142
Epoch 91 , Loss: 47057.87651824951 , Train MSE: 4643.477921995579
Epoch 92 , Loss: 47036.849853515625 , Train MSE: 4642.42701696303
Epoch 93 , Loss: 47016.03603363037 , Train MSE: 4641.333594600027
Epoch 94 , Loss: 46995.088150024414 , Train MSE: 4640.198092966008
Epoch 95 , Loss: 46973.231384277344 , Train MSE: 4639.026057506021
Epoch 96 , Loss: 46954.20877075195 , Train MSE: 4637.841729751631
Epoch 97 , Loss: 46935.720779418945 , Train MSE: 4636.655098233869
Epoch 98 , Loss: 46916.09767150879 , Train MSE: 4635.393782682271
Epoch 99 , Loss: 46896.071350097656 , Train MSE: 4634.103166491642
Validation MSE: 1962.8679638289113
Test MSE: 2136.392489288677
Average validation MSE: 3597.068883602081
County: 06013
split 1 :
Epoch 0 , Loss: 459.2612991333008 , Train MSE: 115.33927660749367
Epoch 1 , Loss: 407.7088165283203 , Train MSE: 102.43226942492062
Epoch 2 , Loss: 340.4011459350586 , Train MSE: 85.64835166584123
Epoch 3 , Loss: 278.02635955810547 , Train MSE: 70.08529626212369
Epoch 4 , Loss: 225.01187133789062 , Train MSE: 56.83355372974526
Epoch 5 , Loss: 185.93036270141602 , Train MSE: 47.028096336808964
Epoch 6 , Loss: 162.72516632080078 , Train MSE: 41.20321448282809
Epoch 7 , Loss: 151.83998107910156 , Train MSE: 38.53968843406094
Epoch 8 , Loss: 147.52245140075684 , Train MSE: 37.58039542293629
Epoch 9 , Loss: 145.9028720855713 , Train MSE: 37.29628558503397
Epoch 10 , Loss: 145.16584968566895 , Train MSE: 37.19851110842659
Epoch 11 , Loss: 143.95258331298828 , Train MSE: 36.904615992672014
Epoch 12 , Loss: 142.63530158996582 , Train MSE: 36.650467262669785
Epoch 13 , Loss: 144.49946403503418 , Train MSE: 37.390241398232334
Epoch 14 , Loss: 144.80029296875 , Train MSE: 37.44329641177494
Epoch 15 , Loss: 145.5827121734619 , Train MSE: 37.6944165860137
Epoch 16 , Loss: 144.94673538208008 , Train MSE: 37.46231234933806
Epoch 17 , Loss: 144.87043952941895 , Train MSE: 37.33331703871809
Epoch 18 , Loss: 143.18075942993164 , Train MSE: 36.985742006469636
Epoch 19 , Loss: 142.69583892822266 , Train MSE: 36.64604158611324
Epoch 20 , Loss: 142.71307945251465 , Train MSE: 36.703874919596764
Epoch 21 , Loss: 140.95222663879395 , Train MSE: 36.20903563990049
Epoch 22 , Loss: 137.70489501953125 , Train MSE: 35.55802441372737
Epoch 23 , Loss: 140.4655246734619 , Train MSE: 36.339987700488535
Epoch 24 , Loss: 133.5322666168213 , Train MSE: 34.58331381787583
Epoch 25 , Loss: 131.98407363891602 , Train MSE: 34.40506987139129
Epoch 26 , Loss: 134.62652397155762 , Train MSE: 35.2012873857526
Epoch 27 , Loss: 133.0382957458496 , Train MSE: 34.8276368251448
Epoch 28 , Loss: 141.7662124633789 , Train MSE: 37.60130744109264
Epoch 29 , Loss: 138.36432456970215 , Train MSE: 36.40959181416127
Epoch 30 , Loss: 142.4287986755371 , Train MSE: 37.16126487521277
Epoch 31 , Loss: 131.01737594604492 , Train MSE: 34.133176766603704
Epoch 32 , Loss: 130.92339706420898 , Train MSE: 34.10487073049224
Epoch 33 , Loss: 131.62072563171387 , Train MSE: 34.296518665364076
Epoch 34 , Loss: 129.9957218170166 , Train MSE: 33.69310994007419
Epoch 35 , Loss: 131.17959594726562 , Train MSE: 34.58669890928268
/shared-libs/python3.7/py-core/lib/python3.7/site-packages/ipykernel_launcher.py:21: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
/shared-libs/python3.7/py-core/lib/python3.7/site-packages/ipykernel_launcher.py:22: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
Epoch 36 , Loss: 131.40043830871582 , Train MSE: 34.914356930505605
Epoch 37 , Loss: 133.2016429901123 , Train MSE: 34.79257787057687
Epoch 38 , Loss: 134.5582389831543 , Train MSE: 35.47935975453271
Epoch 39 , Loss: 129.0920524597168 , Train MSE: 34.17333719997618
Epoch 40 , Loss: 138.80391120910645 , Train MSE: 35.58489847490096
Epoch 41 , Loss: 132.04280471801758 , Train MSE: 34.93055401722647
Epoch 42 , Loss: 129.67583465576172 , Train MSE: 33.98042249393681
Epoch 43 , Loss: 127.32312774658203 , Train MSE: 33.43297286991848
Epoch 44 , Loss: 128.1727237701416 , Train MSE: 33.61054508541002
Epoch 45 , Loss: 132.3396759033203 , Train MSE: 34.26038386908266
Epoch 46 , Loss: 142.39600372314453 , Train MSE: 37.85023007863149
Epoch 47 , Loss: 130.8061866760254 , Train MSE: 33.96430138776319
Epoch 48 , Loss: 140.42937660217285 , Train MSE: 37.44166616262864
Epoch 49 , Loss: 139.08201026916504 , Train MSE: 36.901281480942586
Epoch 50 , Loss: 144.65345191955566 , Train MSE: 37.528928746368116
Epoch 51 , Loss: 139.4617519378662 , Train MSE: 36.82463110173076
Epoch 52 , Loss: 138.87012481689453 , Train MSE: 36.588628809570864
Epoch 53 , Loss: 138.30986976623535 , Train MSE: 36.39513533753115
Epoch 54 , Loss: 138.02636909484863 , Train MSE: 36.29919424900957
Epoch 55 , Loss: 138.00663566589355 , Train MSE: 36.25299612842123
Epoch 56 , Loss: 139.02061462402344 , Train MSE: 36.42546915184099
Epoch 57 , Loss: 138.1912326812744 , Train MSE: 36.2227065659952
Epoch 58 , Loss: 137.90581893920898 , Train MSE: 36.16346433355501
Epoch 59 , Loss: 138.83622932434082 , Train MSE: 36.219754384672584
Epoch 60 , Loss: 137.92456436157227 , Train MSE: 36.201905885781
Epoch 61 , Loss: 137.48981094360352 , Train MSE: 36.13855900681186
Epoch 62 , Loss: 137.43385124206543 , Train MSE: 36.03729256117736
Epoch 63 , Loss: 138.05468559265137 , Train MSE: 36.120274868429796
Epoch 64 , Loss: 130.89994430541992 , Train MSE: 34.33786901965034
Epoch 65 , Loss: 134.8126564025879 , Train MSE: 35.289391358554326
Epoch 66 , Loss: 135.86822700500488 , Train MSE: 35.56999943307377
Epoch 67 , Loss: 140.11168098449707 , Train MSE: 36.398361570599725
Epoch 68 , Loss: 134.66982078552246 , Train MSE: 35.394284776112315
Epoch 69 , Loss: 126.88593101501465 , Train MSE: 33.25591138622313
Epoch 70 , Loss: 126.27878761291504 , Train MSE: 33.14949741872786
Epoch 71 , Loss: 132.89854431152344 , Train MSE: 34.455840122870924
Epoch 72 , Loss: 131.61709785461426 , Train MSE: 34.161735719229576
Epoch 73 , Loss: 129.631498336792 , Train MSE: 33.92888955837745
Epoch 74 , Loss: 127.28200340270996 , Train MSE: 33.20983321557902
Epoch 75 , Loss: 132.07367706298828 , Train MSE: 34.619724725609004
Epoch 76 , Loss: 131.1852149963379 , Train MSE: 34.47200975794671
Epoch 77 , Loss: 127.31128692626953 , Train MSE: 33.2527749764735
Epoch 78 , Loss: 127.28007125854492 , Train MSE: 33.30475187589511
Epoch 79 , Loss: 130.45199394226074 , Train MSE: 33.954492170607026
Epoch 80 , Loss: 132.03939628601074 , Train MSE: 34.789479748146476
Epoch 81 , Loss: 127.07842063903809 , Train MSE: 33.28582799731074
Epoch 82 , Loss: 129.00834846496582 , Train MSE: 33.66967440055336
Epoch 83 , Loss: 128.08979034423828 , Train MSE: 33.510787497245644
Epoch 84 , Loss: 130.98684883117676 , Train MSE: 34.140162114044955
Epoch 85 , Loss: 126.04625701904297 , Train MSE: 33.10187534857538
Epoch 86 , Loss: 126.44639587402344 , Train MSE: 33.13739314949341
Epoch 87 , Loss: 131.0667381286621 , Train MSE: 34.51081251162694
Epoch 88 , Loss: 131.38409996032715 , Train MSE: 34.166602157613355
Epoch 89 , Loss: 126.49263381958008 , Train MSE: 33.10000650915681
Epoch 90 , Loss: 126.81953811645508 , Train MSE: 33.12677999490261
Epoch 91 , Loss: 128.33189010620117 , Train MSE: 33.48945681376544
Epoch 92 , Loss: 126.3679428100586 , Train MSE: 33.15452235141796
Epoch 93 , Loss: 141.5282382965088 , Train MSE: 37.79678762157135
Epoch 94 , Loss: 140.1580753326416 , Train MSE: 37.20561216988552
Epoch 95 , Loss: 139.39650344848633 , Train MSE: 36.834664898893756
Epoch 96 , Loss: 139.36113929748535 , Train MSE: 36.76661670915713
Epoch 97 , Loss: 134.62121772766113 , Train MSE: 35.065768821495126
Epoch 98 , Loss: 140.66724395751953 , Train MSE: 37.22944984828879
Epoch 99 , Loss: 139.89619064331055 , Train MSE: 36.898684262286544
Validation MSE: 337.44099681192074
split 2 :
Epoch 0 , Loss: 1482.8368225097656 , Train MSE: 215.1724887510996
Epoch 1 , Loss: 1472.58638381958 , Train MSE: 216.52021736913005
Epoch 2 , Loss: 1432.105266571045 , Train MSE: 212.80704778141833
Epoch 3 , Loss: 1397.155860900879 , Train MSE: 212.2011601543127
Epoch 4 , Loss: 1359.4651336669922 , Train MSE: 209.01699979749353
Epoch 5 , Loss: 1300.6817169189453 , Train MSE: 205.59204656947276
Epoch 6 , Loss: 1347.573314666748 , Train MSE: 209.2329568968503
Epoch 7 , Loss: 1343.5626983642578 , Train MSE: 215.54607799603465
Epoch 8 , Loss: 1388.0325164794922 , Train MSE: 224.3443676187586
Epoch 9 , Loss: 1308.9835777282715 , Train MSE: 214.50919527201393
Epoch 10 , Loss: 1251.5002174377441 , Train MSE: 204.17975956874068
Epoch 11 , Loss: 1246.3260192871094 , Train MSE: 206.8309452090735
Epoch 12 , Loss: 1273.3926162719727 , Train MSE: 206.01905911020634
Epoch 13 , Loss: 1212.7767791748047 , Train MSE: 202.3153017776247
Epoch 14 , Loss: 1210.5247650146484 , Train MSE: 201.68272986851534
Epoch 15 , Loss: 1183.6953201293945 , Train MSE: 200.3213780324455
Epoch 16 , Loss: 1188.6645317077637 , Train MSE: 199.9692299383645
Epoch 17 , Loss: 1173.2101516723633 , Train MSE: 198.97677480304495
Epoch 18 , Loss: 1161.0383071899414 , Train MSE: 198.04150813328565
Epoch 19 , Loss: 1217.8016204833984 , Train MSE: 199.757239765572
Epoch 20 , Loss: 1174.1678085327148 , Train MSE: 197.2209511194018
Epoch 21 , Loss: 1166.3238372802734 , Train MSE: 196.23202395356708
Epoch 22 , Loss: 1160.6408824920654 , Train MSE: 195.35908716533086
Epoch 23 , Loss: 1153.938720703125 , Train MSE: 194.45170149232584
Epoch 24 , Loss: 1147.4187660217285 , Train MSE: 193.61830693356424
Epoch 25 , Loss: 1131.0753917694092 , Train MSE: 192.47013882762315
Epoch 26 , Loss: 1126.961814880371 , Train MSE: 191.81094955615617
Epoch 27 , Loss: 1121.0307998657227 , Train MSE: 191.07470280566176
Epoch 28 , Loss: 1122.6502075195312 , Train MSE: 190.54091553982266
Epoch 29 , Loss: 1118.7889347076416 , Train MSE: 189.89273328156042
Epoch 30 , Loss: 1114.7222423553467 , Train MSE: 189.1996039395113
Epoch 31 , Loss: 1141.9619617462158 , Train MSE: 189.92100952404778
Epoch 32 , Loss: 1100.9435806274414 , Train MSE: 187.52711464812083
Epoch 33 , Loss: 1099.9036178588867 , Train MSE: 186.95478368308295
Epoch 34 , Loss: 1104.6886940002441 , Train MSE: 186.55639513614824
Epoch 35 , Loss: 1096.5974597930908 , Train MSE: 185.80184115418228
Epoch 36 , Loss: 1092.9829654693604 , Train MSE: 185.17258813577573
Epoch 37 , Loss: 1089.373203277588 , Train MSE: 184.5500790358521
Epoch 38 , Loss: 1086.3375339508057 , Train MSE: 183.93771421700484
Epoch 39 , Loss: 1082.7862033843994 , Train MSE: 183.31198367665644
Epoch 40 , Loss: 1123.1621589660645 , Train MSE: 184.92011862908709
Epoch 41 , Loss: 1120.957202911377 , Train MSE: 183.94659239629868
Epoch 42 , Loss: 1090.3412437438965 , Train MSE: 181.5880660309102
Epoch 43 , Loss: 1116.255844116211 , Train MSE: 181.94875954464015
Epoch 44 , Loss: 1110.5135231018066 , Train MSE: 181.38864995680456
Epoch 45 , Loss: 1106.1122512817383 , Train MSE: 180.83775108929976
Epoch 46 , Loss: 1101.7162590026855 , Train MSE: 180.23524716870412
Epoch 47 , Loss: 1098.6625175476074 , Train MSE: 179.66463500225382
Epoch 48 , Loss: 1095.5886039733887 , Train MSE: 179.10750571645403
Epoch 49 , Loss: 1092.5926055908203 , Train MSE: 178.54886272926692
Epoch 50 , Loss: 1089.6002807617188 , Train MSE: 177.99919873204294
Epoch 51 , Loss: 1086.6557350158691 , Train MSE: 177.45119257527605
Epoch 52 , Loss: 1083.7245025634766 , Train MSE: 176.91023901613977
Epoch 53 , Loss: 1080.8256492614746 , Train MSE: 176.37240716731466
Epoch 54 , Loss: 1077.930576324463 , Train MSE: 175.84011653158515
Epoch 55 , Loss: 1074.9936790466309 , Train MSE: 175.30860941155316
Epoch 56 , Loss: 1071.9215126037598 , Train MSE: 174.7731129434897
Epoch 57 , Loss: 1069.0941009521484 , Train MSE: 174.26320378556076
Epoch 58 , Loss: 1066.520980834961 , Train MSE: 173.75123883450834
Epoch 59 , Loss: 1064.277702331543 , Train MSE: 173.2830574293664
Epoch 60 , Loss: 1061.4068908691406 , Train MSE: 172.75887965794848
Epoch 61 , Loss: 1058.2916069030762 , Train MSE: 172.2528601246922
Epoch 62 , Loss: 1055.6655387878418 , Train MSE: 171.75843617334598
Epoch 63 , Loss: 1053.0827598571777 , Train MSE: 171.28472463917552
Epoch 64 , Loss: 1050.5906105041504 , Train MSE: 170.79739271075607
Epoch 65 , Loss: 1048.3504600524902 , Train MSE: 170.3526738113251
Epoch 66 , Loss: 1045.8234252929688 , Train MSE: 169.8604523205198
Epoch 67 , Loss: 1043.2615089416504 , Train MSE: 169.4136495950144
Epoch 68 , Loss: 1040.973819732666 , Train MSE: 168.93912898151177
Epoch 69 , Loss: 1038.5594635009766 , Train MSE: 168.50817016919763
Epoch 70 , Loss: 1035.9167175292969 , Train MSE: 168.03311628150595
Epoch 71 , Loss: 1033.1580085754395 , Train MSE: 167.5861444068119
Epoch 72 , Loss: 1030.7962493896484 , Train MSE: 167.1450274376453
Epoch 73 , Loss: 1028.4444618225098 , Train MSE: 166.71506470154623
Epoch 74 , Loss: 1026.1533660888672 , Train MSE: 166.28669014176225
Epoch 75 , Loss: 1023.8752593994141 , Train MSE: 165.86522807198978
Epoch 76 , Loss: 1021.6326866149902 , Train MSE: 165.44721929435602
Epoch 77 , Loss: 1019.4061965942383 , Train MSE: 165.03459748775768
Epoch 78 , Loss: 1017.2074279785156 , Train MSE: 164.62613090685758
Epoch 79 , Loss: 1015.0295486450195 , Train MSE: 164.22260890303286
Epoch 80 , Loss: 1012.8783645629883 , Train MSE: 163.82358479479575
Epoch 81 , Loss: 1010.7506446838379 , Train MSE: 163.4293566740601
Epoch 82 , Loss: 1008.6487426757812 , Train MSE: 163.03973487944648
Epoch 83 , Loss: 1006.5709915161133 , Train MSE: 162.6548186615287
Epoch 84 , Loss: 1004.5180473327637 , Train MSE: 162.27451585324914
Epoch 85 , Loss: 1002.4893493652344 , Train MSE: 161.8988824602347
Epoch 86 , Loss: 1000.4854698181152 , Train MSE: 161.52792646106946
Epoch 87 , Loss: 998.5061149597168 , Train MSE: 161.16170728735042
Epoch 88 , Loss: 996.5520172119141 , Train MSE: 160.8001016199108
Epoch 89 , Loss: 994.6224822998047 , Train MSE: 160.44310391587888
Epoch 90 , Loss: 992.7178421020508 , Train MSE: 160.0906009033252
Epoch 91 , Loss: 990.8368988037109 , Train MSE: 159.74267025206936
Epoch 92 , Loss: 988.9809417724609 , Train MSE: 159.3991075314325
Epoch 93 , Loss: 987.147403717041 , Train MSE: 159.0601698563328
Epoch 94 , Loss: 985.340217590332 , Train MSE: 158.72530037291068
Epoch 95 , Loss: 983.5517044067383 , Train MSE: 158.39545621241587
Epoch 96 , Loss: 981.7975540161133 , Train MSE: 158.0687633472473
Epoch 97 , Loss: 980.052906036377 , Train MSE: 157.74903149929918
Epoch 98 , Loss: 978.3685302734375 , Train MSE: 157.42940086577022
Epoch 99 , Loss: 976.678409576416 , Train MSE: 157.1219381133364
Validation MSE: 294.76856075209975
split 3 :
Epoch 0 , Loss: 2181.817653656006 , Train MSE: 231.23890714889987
Epoch 1 , Loss: 2133.531810760498 , Train MSE: 230.3870153997431
Epoch 2 , Loss: 2097.2996559143066 , Train MSE: 234.53128022518868
Epoch 3 , Loss: 1990.6778087615967 , Train MSE: 226.97624546105993
Epoch 4 , Loss: 1914.8621444702148 , Train MSE: 224.58033918082097
Epoch 5 , Loss: 1888.7663650512695 , Train MSE: 223.9206830057061
Epoch 6 , Loss: 1880.0302619934082 , Train MSE: 223.24596710717464
Epoch 7 , Loss: 1845.4232654571533 , Train MSE: 222.45901424421416
Epoch 8 , Loss: 1836.9816703796387 , Train MSE: 222.24918194086615
Epoch 9 , Loss: 1865.7538414001465 , Train MSE: 223.60397473448987
Epoch 10 , Loss: 1827.075397491455 , Train MSE: 221.118367968442
Epoch 11 , Loss: 1841.8517894744873 , Train MSE: 222.04908487675132
Epoch 12 , Loss: 1810.9796543121338 , Train MSE: 219.1414621791561
Epoch 13 , Loss: 1841.0976238250732 , Train MSE: 218.60965458442476
Epoch 14 , Loss: 1791.9196681976318 , Train MSE: 215.9940796486867
Epoch 15 , Loss: 1793.203577041626 , Train MSE: 215.81422111251908
Epoch 16 , Loss: 1785.6576652526855 , Train MSE: 214.61057552724014
Epoch 17 , Loss: 1785.3682174682617 , Train MSE: 216.07017467248914
Epoch 18 , Loss: 1779.7677917480469 , Train MSE: 214.2594900601305
Epoch 19 , Loss: 1762.1811981201172 , Train MSE: 212.6858528389284
Epoch 20 , Loss: 1745.3451805114746 , Train MSE: 210.82240488595576
Epoch 21 , Loss: 1735.489658355713 , Train MSE: 209.54016698653828
Epoch 22 , Loss: 1727.30979347229 , Train MSE: 208.29808613246027
Epoch 23 , Loss: 1711.8713722229004 , Train MSE: 206.6724039613959
Epoch 24 , Loss: 1703.4126605987549 , Train MSE: 205.76877831466228
Epoch 25 , Loss: 1704.8578510284424 , Train MSE: 205.0178629890432
Epoch 26 , Loss: 1692.003345489502 , Train MSE: 203.95658449649588
Epoch 27 , Loss: 1697.4657459259033 , Train MSE: 203.28984615339462
Epoch 28 , Loss: 1679.4327373504639 , Train MSE: 201.8352564680222
Epoch 29 , Loss: 1680.1243782043457 , Train MSE: 201.71339016346678
Epoch 30 , Loss: 1672.7728958129883 , Train MSE: 200.55661408220422
Epoch 31 , Loss: 1698.8835124969482 , Train MSE: 203.41812693013964
Epoch 32 , Loss: 1664.5688362121582 , Train MSE: 199.0280230458102
Epoch 33 , Loss: 1656.3508644104004 , Train MSE: 198.27986088727476
Epoch 34 , Loss: 1647.8715744018555 , Train MSE: 197.17930134654284
Epoch 35 , Loss: 1637.7359828948975 , Train MSE: 195.96658333802642
Epoch 36 , Loss: 1631.286153793335 , Train MSE: 195.08944103109857
Epoch 37 , Loss: 1625.7164402008057 , Train MSE: 195.08437843595107
Epoch 38 , Loss: 1624.4596576690674 , Train MSE: 194.0850288992136
Epoch 39 , Loss: 1611.2721424102783 , Train MSE: 192.81298237331845
Epoch 40 , Loss: 1607.7701416015625 , Train MSE: 193.15841851505184
Epoch 41 , Loss: 1595.1238098144531 , Train MSE: 190.92501596768886
Epoch 42 , Loss: 1608.2597961425781 , Train MSE: 190.5951899236105
Epoch 43 , Loss: 1618.7060413360596 , Train MSE: 191.21992132068436
Epoch 44 , Loss: 1610.3757762908936 , Train MSE: 190.38841048799233
Epoch 45 , Loss: 1595.234224319458 , Train MSE: 188.93033666698886
Epoch 46 , Loss: 1594.4130229949951 , Train MSE: 188.20128486512357
Epoch 47 , Loss: 1591.6391010284424 , Train MSE: 187.65009308013455
Epoch 48 , Loss: 1590.7874164581299 , Train MSE: 187.0276244912581
Epoch 49 , Loss: 1577.7549953460693 , Train MSE: 186.5164944199671
Epoch 50 , Loss: 1579.1222858428955 , Train MSE: 185.9820707360739
Epoch 51 , Loss: 1573.3186569213867 , Train MSE: 184.93711456977738
Epoch 52 , Loss: 1610.5922355651855 , Train MSE: 191.0955112256623
Epoch 53 , Loss: 1563.9026336669922 , Train MSE: 183.27062298300723
Epoch 54 , Loss: 1584.7551765441895 , Train MSE: 182.54224280961594
Epoch 55 , Loss: 1580.4832458496094 , Train MSE: 183.52502274442895
Epoch 56 , Loss: 1575.7026996612549 , Train MSE: 183.65762610585776
Epoch 57 , Loss: 1564.0757808685303 , Train MSE: 181.74511878308385
Epoch 58 , Loss: 1541.7630519866943 , Train MSE: 180.1903406357665
Epoch 59 , Loss: 1565.2672634124756 , Train MSE: 181.87958586049064
Epoch 60 , Loss: 1546.5862770080566 , Train MSE: 179.2721271833518
Epoch 61 , Loss: 1543.9363498687744 , Train MSE: 178.82481278565143
Epoch 62 , Loss: 1541.4435272216797 , Train MSE: 178.37092987824556
Epoch 63 , Loss: 1539.0401611328125 , Train MSE: 177.9191908647545
Epoch 64 , Loss: 1536.7025661468506 , Train MSE: 177.475162976025
Epoch 65 , Loss: 1534.4405555725098 , Train MSE: 177.04531850741898
Epoch 66 , Loss: 1532.2883262634277 , Train MSE: 176.63507836211596
Epoch 67 , Loss: 1530.1770553588867 , Train MSE: 176.22591905456446
Epoch 68 , Loss: 1528.069808959961 , Train MSE: 175.81049076963447
Epoch 69 , Loss: 1526.0014553070068 , Train MSE: 175.4018591108565
Epoch 70 , Loss: 1523.9726905822754 , Train MSE: 174.99946906108426
Epoch 71 , Loss: 1521.9818782806396 , Train MSE: 174.60192371642435
Epoch 72 , Loss: 1520.027244567871 , Train MSE: 174.2076730227657
Epoch 73 , Loss: 1518.1081409454346 , Train MSE: 173.81571148761125
Epoch 74 , Loss: 1516.225793838501 , Train MSE: 173.4261951364567
Epoch 75 , Loss: 1514.3821868896484 , Train MSE: 173.04025245169498
Epoch 76 , Loss: 1512.5784950256348 , Train MSE: 172.6593058182633
Epoch 77 , Loss: 1510.8148040771484 , Train MSE: 172.28444507328123
Epoch 78 , Loss: 1509.0898571014404 , Train MSE: 171.91622077796777
Epoch 79 , Loss: 1507.4019527435303 , Train MSE: 171.55480576798584
Epoch 80 , Loss: 1505.749074935913 , Train MSE: 171.200129856851
Epoch 81 , Loss: 1504.129358291626 , Train MSE: 170.85202183427378
Epoch 82 , Loss: 1502.5411128997803 , Train MSE: 170.51025769914315
Epoch 83 , Loss: 1500.9827060699463 , Train MSE: 170.17459229804308
Epoch 84 , Loss: 1499.4522285461426 , Train MSE: 169.84478490619568
Epoch 85 , Loss: 1497.9469089508057 , Train MSE: 169.52056994251856
Epoch 86 , Loss: 1496.4624042510986 , Train MSE: 169.20168211766614
Epoch 87 , Loss: 1494.9907264709473 , Train MSE: 168.88780998027875
Epoch 88 , Loss: 1493.5178260803223 , Train MSE: 168.57855365920693
Epoch 89 , Loss: 1492.0259189605713 , Train MSE: 168.27376073716292
Epoch 90 , Loss: 1490.5230731964111 , Train MSE: 167.97476071236946
Epoch 91 , Loss: 1489.0716819763184 , Train MSE: 167.6848849154804
Epoch 92 , Loss: 1487.6654090881348 , Train MSE: 167.40195057708286
Epoch 93 , Loss: 1486.219316482544 , Train MSE: 167.11848724313833
Epoch 94 , Loss: 1484.7734813690186 , Train MSE: 166.83859275973072
Epoch 95 , Loss: 1483.4232082366943 , Train MSE: 166.56738735322202
Epoch 96 , Loss: 1482.1560955047607 , Train MSE: 166.3009604070595
Epoch 97 , Loss: 1480.924150466919 , Train MSE: 166.0341003448454
Epoch 98 , Loss: 1479.7367973327637 , Train MSE: 165.77223804653687
Epoch 99 , Loss: 1478.5782318115234 , Train MSE: 165.51461087312396
Validation MSE: 30.80795682130072
split 4 :
Epoch 0 , Loss: 2646.777145385742 , Train MSE: 208.3017044012707
Epoch 1 , Loss: 2375.46244430542 , Train MSE: 203.1779239091257
Epoch 2 , Loss: 2139.540910720825 , Train MSE: 199.12543256205464
Epoch 3 , Loss: 2009.524709701538 , Train MSE: 198.01119468539818
Epoch 4 , Loss: 1967.371259689331 , Train MSE: 197.42527875095948
Epoch 5 , Loss: 1978.9980659484863 , Train MSE: 197.15864982048154
Epoch 6 , Loss: 1969.3906745910645 , Train MSE: 197.40747521197153
Epoch 7 , Loss: 1956.7309665679932 , Train MSE: 195.11049899832588
Epoch 8 , Loss: 1996.6967735290527 , Train MSE: 195.13401322139384
Epoch 9 , Loss: 2004.0551109313965 , Train MSE: 195.81736453112478
Epoch 10 , Loss: 1996.8297100067139 , Train MSE: 197.57055665871064
Epoch 11 , Loss: 1976.673713684082 , Train MSE: 200.0094853159082
Epoch 12 , Loss: 2121.995594024658 , Train MSE: 209.71306838269632
Epoch 13 , Loss: 1994.580316543579 , Train MSE: 203.69590960425273
Epoch 14 , Loss: 1900.1792888641357 , Train MSE: 191.77098439288514
Epoch 15 , Loss: 1875.381971359253 , Train MSE: 190.31001758996447
Epoch 16 , Loss: 1865.587730884552 , Train MSE: 189.79735792564566
Epoch 17 , Loss: 1896.1437072753906 , Train MSE: 191.1070851405453
Epoch 18 , Loss: 1892.349603652954 , Train MSE: 188.65003946724727
Epoch 19 , Loss: 1903.3084411621094 , Train MSE: 189.44067271849923
Epoch 20 , Loss: 1877.9449653625488 , Train MSE: 188.75590022686265
Epoch 21 , Loss: 1878.7059650421143 , Train MSE: 187.41046530957186
Epoch 22 , Loss: 1858.0219917297363 , Train MSE: 186.32625947524065
Epoch 23 , Loss: 1852.5712795257568 , Train MSE: 185.43596492436888
Epoch 24 , Loss: 1848.44020652771 , Train MSE: 184.74556774296843
Epoch 25 , Loss: 1822.0123844146729 , Train MSE: 183.37659501500826
Epoch 26 , Loss: 1812.8175106048584 , Train MSE: 182.39316761333149
Epoch 27 , Loss: 1804.9031085968018 , Train MSE: 181.47776045931744
Epoch 28 , Loss: 1792.2484073638916 , Train MSE: 180.5209338362872
Epoch 29 , Loss: 1782.0170345306396 , Train MSE: 179.60448174345612
Epoch 30 , Loss: 1768.8108730316162 , Train MSE: 178.58182352177946
Epoch 31 , Loss: 1763.672529220581 , Train MSE: 177.91953530500552
Epoch 32 , Loss: 1755.2256870269775 , Train MSE: 177.0504235347647
Epoch 33 , Loss: 1749.0708179473877 , Train MSE: 176.2718759556132
Epoch 34 , Loss: 1742.6642875671387 , Train MSE: 175.49591578681913
Epoch 35 , Loss: 1736.3556938171387 , Train MSE: 174.73831374530695
Epoch 36 , Loss: 1730.7113571166992 , Train MSE: 174.01744538681723
Epoch 37 , Loss: 1834.5837306976318 , Train MSE: 182.03546911462197
Epoch 38 , Loss: 1771.5664653778076 , Train MSE: 177.66702406103246
Epoch 39 , Loss: 1738.3693809509277 , Train MSE: 172.80575621621222
Epoch 40 , Loss: 1725.6138401031494 , Train MSE: 171.9214514015268
Epoch 41 , Loss: 1719.9827861785889 , Train MSE: 171.18687902905492
Epoch 42 , Loss: 1716.0479526519775 , Train MSE: 170.53769829029048
Epoch 43 , Loss: 1703.0495834350586 , Train MSE: 169.69149391691613
Epoch 44 , Loss: 1698.6809406280518 , Train MSE: 169.07430572942388
Epoch 45 , Loss: 1693.1199359893799 , Train MSE: 168.4373212462879
Epoch 46 , Loss: 1682.0912055969238 , Train MSE: 167.58850351671916
Epoch 47 , Loss: 1678.0436420440674 , Train MSE: 166.98173509634182
Epoch 48 , Loss: 1673.3713417053223 , Train MSE: 166.37412380679223
Epoch 49 , Loss: 1669.1154022216797 , Train MSE: 165.78997697518503
Epoch 50 , Loss: 1664.9392719268799 , Train MSE: 165.2124251040964
Epoch 51 , Loss: 1660.9310989379883 , Train MSE: 164.65271510181165
Epoch 52 , Loss: 1657.0997409820557 , Train MSE: 164.11022606172358
Epoch 53 , Loss: 1653.3989868164062 , Train MSE: 163.583444919646
Epoch 54 , Loss: 1649.6385288238525 , Train MSE: 163.06202820468036
Epoch 55 , Loss: 1645.047040939331 , Train MSE: 162.5153962369216
Epoch 56 , Loss: 1642.0065364837646 , Train MSE: 162.0361168257301
Epoch 57 , Loss: 1638.8652057647705 , Train MSE: 161.56565848687094
Epoch 58 , Loss: 1635.8349208831787 , Train MSE: 161.10805477075988
Epoch 59 , Loss: 1632.899211883545 , Train MSE: 160.66364517476754
Epoch 60 , Loss: 1633.2663011550903 , Train MSE: 160.32105312879364
Epoch 61 , Loss: 1627.7589025497437 , Train MSE: 159.81251411990795
Epoch 62 , Loss: 1624.6990404129028 , Train MSE: 159.3957308247121
Epoch 63 , Loss: 1624.4098682403564 , Train MSE: 159.0639947010924
Epoch 64 , Loss: 1621.2083597183228 , Train MSE: 158.67164044666325
Epoch 65 , Loss: 1617.2184371948242 , Train MSE: 158.2604990678109
Epoch 66 , Loss: 1615.1565771102905 , Train MSE: 157.90471004110464
Epoch 67 , Loss: 1613.3680610656738 , Train MSE: 157.5615675247785
Epoch 68 , Loss: 1610.9405431747437 , Train MSE: 157.2138344379495
Epoch 69 , Loss: 1608.7803735733032 , Train MSE: 156.86457628479135
Epoch 70 , Loss: 1606.7800617218018 , Train MSE: 156.52738809988983
Epoch 71 , Loss: 1604.7447547912598 , Train MSE: 156.20249870717754
Epoch 72 , Loss: 1602.865689277649 , Train MSE: 155.88033461431462
Epoch 73 , Loss: 1601.061939239502 , Train MSE: 155.571247276756
Epoch 74 , Loss: 1599.5051555633545 , Train MSE: 155.27483300533484
Epoch 75 , Loss: 1597.627106666565 , Train MSE: 154.97829271122737
Epoch 76 , Loss: 1596.087272644043 , Train MSE: 154.6998817251094
Epoch 77 , Loss: 1594.4492893218994 , Train MSE: 154.41783169096612
Epoch 78 , Loss: 1592.8620958328247 , Train MSE: 154.14334415009296
Epoch 79 , Loss: 1591.3002614974976 , Train MSE: 153.87491959473667
Epoch 80 , Loss: 1589.7146682739258 , Train MSE: 153.6109231439353
Epoch 81 , Loss: 1587.854242324829 , Train MSE: 153.3443698216342
Epoch 82 , Loss: 1581.747817993164 , Train MSE: 152.970065007769
Epoch 83 , Loss: 1579.2597885131836 , Train MSE: 152.81210808477167
Epoch 84 , Loss: 1576.4207792282104 , Train MSE: 152.37981439564356
Epoch 85 , Loss: 1574.7783880233765 , Train MSE: 152.11403345044494
Epoch 86 , Loss: 1573.231029510498 , Train MSE: 151.85548580036732
Epoch 87 , Loss: 1572.2083263397217 , Train MSE: 151.61670666002627
Epoch 88 , Loss: 1570.552381515503 , Train MSE: 151.3697951963267
Epoch 89 , Loss: 1569.2769451141357 , Train MSE: 151.14325870919035
Epoch 90 , Loss: 1568.1061010360718 , Train MSE: 150.91741688898634
Epoch 91 , Loss: 1566.8351259231567 , Train MSE: 150.68890205526645
Epoch 92 , Loss: 1565.6004810333252 , Train MSE: 150.46735056248687
Epoch 93 , Loss: 1564.7244653701782 , Train MSE: 150.25749766261725
Epoch 94 , Loss: 1563.3449840545654 , Train MSE: 150.0416588823275
Epoch 95 , Loss: 1562.148657798767 , Train MSE: 149.8402285567692
Epoch 96 , Loss: 1560.1898078918457 , Train MSE: 149.61841447688053
Epoch 97 , Loss: 1693.9886741638184 , Train MSE: 165.35559680716108
Epoch 98 , Loss: 1614.214913368225 , Train MSE: 154.6151683995463
Epoch 99 , Loss: 1736.691318511963 , Train MSE: 161.25542726148234
Validation MSE: 22.13451688103068
split 5 :
Epoch 0 , Loss: 2968.537582397461 , Train MSE: 192.80261627538766
Epoch 1 , Loss: 2460.599521636963 , Train MSE: 182.56362326276275
Epoch 2 , Loss: 2206.143579483032 , Train MSE: 180.45188435868025
Epoch 3 , Loss: 2076.461513519287 , Train MSE: 179.8108265913638
Epoch 4 , Loss: 2082.9034748077393 , Train MSE: 179.80380780425176
Epoch 5 , Loss: 2078.7438583374023 , Train MSE: 179.1637724653204
Epoch 6 , Loss: 2043.790774345398 , Train MSE: 177.33174083981825
Epoch 7 , Loss: 2063.830328941345 , Train MSE: 177.70975108665522
Epoch 8 , Loss: 2027.3496360778809 , Train MSE: 175.20348729338633
Epoch 9 , Loss: 2011.2000255584717 , Train MSE: 174.3071535995676
Epoch 10 , Loss: 1992.9276123046875 , Train MSE: 173.2518411857331
Epoch 11 , Loss: 1994.8614702224731 , Train MSE: 172.7364702353181
Epoch 12 , Loss: 1985.6975631713867 , Train MSE: 172.02143467726017
Epoch 13 , Loss: 1980.0697021484375 , Train MSE: 171.33432534324243
Epoch 14 , Loss: 1985.033634185791 , Train MSE: 170.89292155235444
Epoch 15 , Loss: 2070.5071907043457 , Train MSE: 171.48191661243803
Epoch 16 , Loss: 1996.4540662765503 , Train MSE: 170.215573595583
Epoch 17 , Loss: 1984.2683210372925 , Train MSE: 169.86436953759917
Epoch 18 , Loss: 1976.7902879714966 , Train MSE: 170.47669483291517
Epoch 19 , Loss: 1916.7267599105835 , Train MSE: 167.24160801467298
Epoch 20 , Loss: 1927.3379468917847 , Train MSE: 166.4814917667243
Epoch 21 , Loss: 1896.0741186141968 , Train MSE: 165.21628605138926
Epoch 22 , Loss: 1872.6025218963623 , Train MSE: 164.0820505639899
Epoch 23 , Loss: 1851.8620891571045 , Train MSE: 163.20643767603235
Epoch 24 , Loss: 1843.4009923934937 , Train MSE: 162.2401028831178
Epoch 25 , Loss: 1829.8754444122314 , Train MSE: 161.44187404707927
Epoch 26 , Loss: 1813.2897596359253 , Train MSE: 160.41510054911637
Epoch 27 , Loss: 1798.8273992538452 , Train MSE: 159.4938384136433
Epoch 28 , Loss: 1896.0806722640991 , Train MSE: 160.07085115461243
Epoch 29 , Loss: 1807.5830478668213 , Train MSE: 158.60064914903475
Epoch 30 , Loss: 1807.0236072540283 , Train MSE: 158.1694571507635
Epoch 31 , Loss: 1789.4502000808716 , Train MSE: 156.93205958929704
Epoch 32 , Loss: 1775.9654207229614 , Train MSE: 156.17106367809285
Epoch 33 , Loss: 1760.799373626709 , Train MSE: 155.285685642171
Epoch 34 , Loss: 1760.145917892456 , Train MSE: 154.7088679576009
Epoch 35 , Loss: 1779.6922435760498 , Train MSE: 154.49387164616067
Epoch 36 , Loss: 1799.0446043014526 , Train MSE: 154.46662544208695
Epoch 37 , Loss: 1791.6835222244263 , Train MSE: 153.93788303795327
Epoch 38 , Loss: 1786.24361038208 , Train MSE: 153.44069833770706
Epoch 39 , Loss: 1781.7650203704834 , Train MSE: 152.97690449037586
Epoch 40 , Loss: 1778.2748889923096 , Train MSE: 152.52364889527988
Epoch 41 , Loss: 1773.8123235702515 , Train MSE: 152.09152107185074
Epoch 42 , Loss: 1773.295781135559 , Train MSE: 151.69370342440936
Epoch 43 , Loss: 1768.3135061264038 , Train MSE: 151.27434877572045
Epoch 44 , Loss: 1765.2195081710815 , Train MSE: 150.8993765530354
Epoch 45 , Loss: 1762.1984395980835 , Train MSE: 150.52889020251766
Epoch 46 , Loss: 1759.3132095336914 , Train MSE: 150.16782041810202
Epoch 47 , Loss: 1756.561692237854 , Train MSE: 149.81639413673773
Epoch 48 , Loss: 1753.9321718215942 , Train MSE: 149.47435712107404
Epoch 49 , Loss: 1751.4046449661255 , Train MSE: 149.14138479441564
Epoch 50 , Loss: 1748.7521800994873 , Train MSE: 148.81527544275013
Epoch 51 , Loss: 1746.117031097412 , Train MSE: 148.4992816409035
Epoch 52 , Loss: 1744.0245532989502 , Train MSE: 148.19854198316304
Epoch 53 , Loss: 1741.8965501785278 , Train MSE: 147.89878837893116
Epoch 54 , Loss: 1739.7941818237305 , Train MSE: 147.60510347701467
Epoch 55 , Loss: 1737.744324684143 , Train MSE: 147.31850463970838
Epoch 56 , Loss: 1735.772006034851 , Train MSE: 147.03902741998638
Epoch 57 , Loss: 1733.8866081237793 , Train MSE: 146.76643857828643
Epoch 58 , Loss: 1732.1078414916992 , Train MSE: 146.50057207754443
Epoch 59 , Loss: 1730.507939338684 , Train MSE: 146.24136867314976
Epoch 60 , Loss: 1728.511435508728 , Train MSE: 145.98432229169418
Epoch 61 , Loss: 1727.013599395752 , Train MSE: 145.74221746492276
Epoch 62 , Loss: 1725.461503982544 , Train MSE: 145.50011868155224
Epoch 63 , Loss: 1723.916090965271 , Train MSE: 145.26194702043725
Epoch 64 , Loss: 1722.3253698349 , Train MSE: 145.02540905501044
Epoch 65 , Loss: 1720.1965656280518 , Train MSE: 144.76543919636168
Epoch 66 , Loss: 1741.30650806427 , Train MSE: 146.3968181072398
Epoch 67 , Loss: 1704.7706537246704 , Train MSE: 143.89905945901626
Epoch 68 , Loss: 1915.5322284698486 , Train MSE: 154.72166452932615
Epoch 69 , Loss: 1767.1253108978271 , Train MSE: 145.00271252699662
Epoch 70 , Loss: 1751.774736404419 , Train MSE: 144.85777592501762
Epoch 71 , Loss: 1733.318510055542 , Train MSE: 144.17511278469325
Epoch 72 , Loss: 1731.5948400497437 , Train MSE: 143.56231828542522
Epoch 73 , Loss: 1727.221326828003 , Train MSE: 143.1583812614688
Epoch 74 , Loss: 1725.111557006836 , Train MSE: 142.84598720822277
Epoch 75 , Loss: 1722.876844406128 , Train MSE: 142.53514873068005
Epoch 76 , Loss: 1720.7056350708008 , Train MSE: 142.24066594063058
Epoch 77 , Loss: 1718.6822328567505 , Train MSE: 141.9650727648544
Epoch 78 , Loss: 1716.2098445892334 , Train MSE: 141.67811367684118
Epoch 79 , Loss: 1714.2622938156128 , Train MSE: 141.40642165031602
Epoch 80 , Loss: 1711.5560932159424 , Train MSE: 141.1191596117035
Epoch 81 , Loss: 1694.7938251495361 , Train MSE: 140.55758863717259
Epoch 82 , Loss: 1656.6286821365356 , Train MSE: 139.79950192929036
Epoch 83 , Loss: 1636.1877136230469 , Train MSE: 139.42198431394763
Epoch 84 , Loss: 1655.5824565887451 , Train MSE: 138.90714602202604
Epoch 85 , Loss: 1652.2688941955566 , Train MSE: 138.50721443838543
Epoch 86 , Loss: 1642.4861183166504 , Train MSE: 137.87692421906323
Epoch 87 , Loss: 1632.4043836593628 , Train MSE: 137.4819743881118
Epoch 88 , Loss: 1615.340085029602 , Train MSE: 136.6764250495944
Epoch 89 , Loss: 1712.3689250946045 , Train MSE: 139.85137967392612
Epoch 90 , Loss: 1895.8901653289795 , Train MSE: 159.60249538193105
Epoch 91 , Loss: 1788.866057395935 , Train MSE: 155.37702519351865
Epoch 92 , Loss: 1805.7005968093872 , Train MSE: 154.3383604882059
Epoch 93 , Loss: 1771.5374546051025 , Train MSE: 153.30906522997893
Epoch 94 , Loss: 1747.8036127090454 , Train MSE: 151.68212198469828
Epoch 95 , Loss: 1727.0162544250488 , Train MSE: 150.35174576698034
Epoch 96 , Loss: 1714.325695991516 , Train MSE: 149.22684817289252
Epoch 97 , Loss: 1703.950605392456 , Train MSE: 148.11016594297146
Epoch 98 , Loss: 1693.741693496704 , Train MSE: 147.03326260959872
Epoch 99 , Loss: 1683.6009941101074 , Train MSE: 146.00126506066343
Validation MSE: 325.98094109951967
Test MSE: 252.5139061288529
Average validation MSE: 202.22659447317432
County: 06073
split 1 :
Epoch 0 , Loss: 1902.8764343261719 , Train MSE: 476.70432588901696
Epoch 1 , Loss: 1853.8186340332031 , Train MSE: 464.57966169357695
Epoch 2 , Loss: 1782.6621704101562 , Train MSE: 446.79266498944264
Epoch 3 , Loss: 1712.0756530761719 , Train MSE: 429.1431884993912
Epoch 4 , Loss: 1642.4028625488281 , Train MSE: 411.7207604325649
Epoch 5 , Loss: 1573.6646118164062 , Train MSE: 394.5296212602806
Epoch 6 , Loss: 1505.9631958007812 , Train MSE: 377.5948160539324
Epoch 7 , Loss: 1439.4242858886719 , Train MSE: 360.94714799681014
Epoch 8 , Loss: 1374.1924438476562 , Train MSE: 344.6220971412237
Epoch 9 , Loss: 1310.43212890625 , Train MSE: 328.66001382673744
Epoch 10 , Loss: 1248.3287658691406 , Train MSE: 313.1064241846039
Epoch 11 , Loss: 1188.0895690917969 , Train MSE: 298.0123070303717
Epoch 12 , Loss: 1129.9433135986328 , Train MSE: 283.4340472308668
Epoch 13 , Loss: 1074.1387939453125 , Train MSE: 269.4331502780821
Epoch 14 , Loss: 1020.9403381347656 , Train MSE: 256.0752993246213
Epoch 15 , Loss: 970.6206512451172 , Train MSE: 243.4287177968092
Epoch 16 , Loss: 923.4497680664062 , Train MSE: 231.56162171239188
Epoch 17 , Loss: 879.6800384521484 , Train MSE: 220.53861669191778
Epoch 18 , Loss: 839.5278930664062 , Train MSE: 210.41625250770204
Epoch 19 , Loss: 803.1540679931641 , Train MSE: 201.23799716518238
Epoch 20 , Loss: 770.6446075439453 , Train MSE: 193.02926004440675
Epoch 21 , Loss: 741.9977569580078 , Train MSE: 185.7935765854537
Epoch 22 , Loss: 717.1174468994141 , Train MSE: 179.51047555469611
Epoch 23 , Loss: 695.8179321289062 , Train MSE: 174.13603730145874
Epoch 24 , Loss: 677.8370361328125 , Train MSE: 169.60594256745082
Epoch 25 , Loss: 662.8569030761719 , Train MSE: 165.84053177000246
Epoch 26 , Loss: 650.5276184082031 , Train MSE: 162.75102780618042
Epoch 27 , Loss: 640.4906311035156 , Train MSE: 160.24568500573343
Epoch 28 , Loss: 632.3982238769531 , Train MSE: 158.23512480323018
Epoch 29 , Loss: 625.9276733398438 , Train MSE: 156.63624428591692
Epoch 30 , Loss: 620.7904663085938 , Train MSE: 155.37470160810193
/shared-libs/python3.7/py-core/lib/python3.7/site-packages/ipykernel_launcher.py:21: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
/shared-libs/python3.7/py-core/lib/python3.7/site-packages/ipykernel_launcher.py:22: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
Epoch 31 , Loss: 616.7357788085938 , Train MSE: 154.3859758086204
Epoch 32 , Loss: 613.5510406494141 , Train MSE: 153.61549744069683
Epoch 33 , Loss: 611.0593566894531 , Train MSE: 153.0180161374312
Epoch 34 , Loss: 609.1160430908203 , Train MSE: 152.556652276778
Epoch 35 , Loss: 607.6040802001953 , Train MSE: 152.20170211098747
Epoch 36 , Loss: 606.4299621582031 , Train MSE: 151.92953526575545
Epoch 37 , Loss: 605.5194244384766 , Train MSE: 151.72147523398058
Epoch 38 , Loss: 604.8140869140625 , Train MSE: 151.5628918919914
Epoch 39 , Loss: 604.2679290771484 , Train MSE: 151.4423691222413
Epoch 40 , Loss: 603.8452606201172 , Train MSE: 151.35104866401926
Epoch 41 , Loss: 603.5181427001953 , Train MSE: 151.2820719747774
Epoch 42 , Loss: 603.2648773193359 , Train MSE: 151.23014730988936
Epoch 43 , Loss: 603.0687866210938 , Train MSE: 151.19121893981713
Epoch 44 , Loss: 602.9167938232422 , Train MSE: 151.1621675536549
Epoch 45 , Loss: 602.7989807128906 , Train MSE: 151.14059828180044
Epoch 46 , Loss: 602.7074584960938 , Train MSE: 151.12467927577546
Epoch 47 , Loss: 602.6363525390625 , Train MSE: 151.11303664371604
Epoch 48 , Loss: 602.5810089111328 , Train MSE: 151.1045885004062
Epoch 49 , Loss: 602.5378112792969 , Train MSE: 151.09853980464877
Epoch 50 , Loss: 602.5041046142578 , Train MSE: 151.09427520245734
Epoch 51 , Loss: 602.4777069091797 , Train MSE: 151.09133539205715
Epoch 52 , Loss: 602.4569702148438 , Train MSE: 151.08936899975828
Epoch 53 , Loss: 602.4407043457031 , Train MSE: 151.08811549530785
Epoch 54 , Loss: 602.4278259277344 , Train MSE: 151.08737540793933
Epoch 55 , Loss: 602.4176635742188 , Train MSE: 151.08700199958884
Epoch 56 , Loss: 602.4096221923828 , Train MSE: 151.08688541628598
Epoch 57 , Loss: 602.4031677246094 , Train MSE: 151.08694410927166
Epoch 58 , Loss: 602.3980255126953 , Train MSE: 151.0871142539267
Epoch 59 , Loss: 602.3938903808594 , Train MSE: 151.08736262542357
Epoch 60 , Loss: 602.3905487060547 , Train MSE: 151.08764878253345
Epoch 61 , Loss: 602.3878631591797 , Train MSE: 151.08795713206388
Epoch 62 , Loss: 602.3856658935547 , Train MSE: 151.08827350457491
Epoch 63 , Loss: 602.3838958740234 , Train MSE: 151.08857833781852
Epoch 64 , Loss: 602.3824157714844 , Train MSE: 151.0888694788739
Epoch 65 , Loss: 602.3812103271484 , Train MSE: 151.0891471040195
Epoch 66 , Loss: 602.3801879882812 , Train MSE: 151.0894040808073
Epoch 67 , Loss: 602.3793487548828 , Train MSE: 151.08963068126306
Epoch 68 , Loss: 602.3786773681641 , Train MSE: 151.0898520740337
Epoch 69 , Loss: 602.3780670166016 , Train MSE: 151.0900451171894
Epoch 70 , Loss: 602.3776092529297 , Train MSE: 151.09022112666713
Epoch 71 , Loss: 602.3771820068359 , Train MSE: 151.09037798155273
Epoch 72 , Loss: 602.3768310546875 , Train MSE: 151.0905167641383
Epoch 73 , Loss: 602.3765106201172 , Train MSE: 151.0906399326249
Epoch 74 , Loss: 602.3762512207031 , Train MSE: 151.09075345650638
Epoch 75 , Loss: 602.3760375976562 , Train MSE: 151.09084819531836
Epoch 76 , Loss: 602.3758850097656 , Train MSE: 151.09093519538408
Epoch 77 , Loss: 602.3757171630859 , Train MSE: 151.0910127844496
Epoch 78 , Loss: 602.3755645751953 , Train MSE: 151.09108521523174
Epoch 79 , Loss: 602.3754730224609 , Train MSE: 151.09114716041114
Epoch 80 , Loss: 602.3753204345703 , Train MSE: 151.0911925784601
Epoch 81 , Loss: 602.3752746582031 , Train MSE: 151.0912449585388
Epoch 82 , Loss: 602.3751525878906 , Train MSE: 151.0912835708565
Epoch 83 , Loss: 602.3750915527344 , Train MSE: 151.09131449324968
Epoch 84 , Loss: 602.3750457763672 , Train MSE: 151.09134676272214
Epoch 85 , Loss: 602.3749694824219 , Train MSE: 151.0913726800788
Epoch 86 , Loss: 602.3749542236328 , Train MSE: 151.09140038932438
Epoch 87 , Loss: 602.3748779296875 , Train MSE: 151.09142044242412
Epoch 88 , Loss: 602.3748626708984 , Train MSE: 151.0914340304232
Epoch 89 , Loss: 602.3748474121094 , Train MSE: 151.09145672647497
Epoch 90 , Loss: 602.3748016357422 , Train MSE: 151.0914645164201
Epoch 91 , Loss: 602.3747711181641 , Train MSE: 151.09147601924656
Epoch 92 , Loss: 602.3747406005859 , Train MSE: 151.09148587370677
Epoch 93 , Loss: 602.3747253417969 , Train MSE: 151.09149507343838
Epoch 94 , Loss: 602.3746948242188 , Train MSE: 151.09150406406414
Epoch 95 , Loss: 602.3747100830078 , Train MSE: 151.09151488013316
Epoch 96 , Loss: 602.3746948242188 , Train MSE: 151.09151801068023
Epoch 97 , Loss: 602.3746795654297 , Train MSE: 151.09151750690947
Epoch 98 , Loss: 602.3746795654297 , Train MSE: 151.09152852163007
Epoch 99 , Loss: 602.3746337890625 , Train MSE: 151.09152942387774
Validation MSE: 1783.9217187612103
split 2 :
Epoch 0 , Loss: 7472.97900390625 , Train MSE: 1079.6097426919166
Epoch 1 , Loss: 7454.836761474609 , Train MSE: 1078.6197802536337
Epoch 2 , Loss: 7405.733428955078 , Train MSE: 1075.9989877613307
Epoch 3 , Loss: 7344.267364501953 , Train MSE: 1072.9668477989696
Epoch 4 , Loss: 7283.457336425781 , Train MSE: 1070.0955594198658
Epoch 5 , Loss: 7223.533660888672 , Train MSE: 1067.3808464032134
Epoch 6 , Loss: 7164.53515625 , Train MSE: 1064.8304698868872
Epoch 7 , Loss: 7106.715911865234 , Train MSE: 1062.4254170807728
Epoch 8 , Loss: 7049.933074951172 , Train MSE: 1060.1719372510472
Epoch 9 , Loss: 6998.148132324219 , Train MSE: 1058.60184209812
Epoch 10 , Loss: 6941.0040283203125 , Train MSE: 1056.1265806349336
Epoch 11 , Loss: 6887.922882080078 , Train MSE: 1054.2678562089695
Epoch 12 , Loss: 6836.574066162109 , Train MSE: 1052.5581068001034
Epoch 13 , Loss: 6786.871520996094 , Train MSE: 1050.9764335897482
Epoch 14 , Loss: 6738.900390625 , Train MSE: 1049.511938724354
Epoch 15 , Loss: 6692.804000854492 , Train MSE: 1048.1648572941783
Epoch 16 , Loss: 6648.669372558594 , Train MSE: 1046.9230317684605
Epoch 17 , Loss: 6612.082473754883 , Train MSE: 1046.5584742155534
Epoch 18 , Loss: 6667.296829223633 , Train MSE: 1049.7920990878845
Epoch 19 , Loss: 6534.789352416992 , Train MSE: 1043.8954722868318
Epoch 20 , Loss: 6499.623107910156 , Train MSE: 1043.0023975267172
Epoch 21 , Loss: 6466.957473754883 , Train MSE: 1042.1749440628253
Epoch 22 , Loss: 6436.821319580078 , Train MSE: 1041.403023070835
Epoch 23 , Loss: 6409.20231628418 , Train MSE: 1040.676485625985
Epoch 24 , Loss: 6384.050216674805 , Train MSE: 1039.9853714713045
Epoch 25 , Loss: 6361.2735595703125 , Train MSE: 1039.3201651350496
Epoch 26 , Loss: 6340.743789672852 , Train MSE: 1038.6720241974347
Epoch 27 , Loss: 6322.304473876953 , Train MSE: 1038.0329859450223
Epoch 28 , Loss: 6305.775009155273 , Train MSE: 1037.3961018226817
Epoch 29 , Loss: 6290.963943481445 , Train MSE: 1036.755525976037
Epoch 30 , Loss: 6277.673965454102 , Train MSE: 1036.1065337648897
Epoch 31 , Loss: 6265.712478637695 , Train MSE: 1035.4454775877991
Epoch 32 , Loss: 6254.896347045898 , Train MSE: 1034.769717008922
Epoch 33 , Loss: 6245.056427001953 , Train MSE: 1034.0775039074238
Epoch 34 , Loss: 6236.040100097656 , Train MSE: 1033.3678512684305
Epoch 35 , Loss: 6227.713226318359 , Train MSE: 1032.6404078567318
Epoch 36 , Loss: 6219.9583740234375 , Train MSE: 1031.8953208378155
Epoch 37 , Loss: 6212.676605224609 , Train MSE: 1031.1331283801858
Epoch 38 , Loss: 6205.783416748047 , Train MSE: 1030.3546476775437
Epoch 39 , Loss: 6199.209030151367 , Train MSE: 1029.5608902464946
Epoch 40 , Loss: 6192.895401000977 , Train MSE: 1028.752988821021
Epoch 41 , Loss: 6186.794937133789 , Train MSE: 1027.932139872985
Epoch 42 , Loss: 6180.86946105957 , Train MSE: 1027.0995586978763
Epoch 43 , Loss: 6175.086959838867 , Train MSE: 1026.256447902461
Epoch 44 , Loss: 6169.422760009766 , Train MSE: 1025.4039693035531
Epoch 45 , Loss: 6163.856002807617 , Train MSE: 1024.5432303065013
Epoch 46 , Loss: 6158.371246337891 , Train MSE: 1023.6752727979513
Epoch 47 , Loss: 6152.954666137695 , Train MSE: 1022.8010659110106
Epoch 48 , Loss: 6147.596343994141 , Train MSE: 1021.9215046683033
Epoch 49 , Loss: 6142.287689208984 , Train MSE: 1021.0374100476998
Epoch 50 , Loss: 6137.022521972656 , Train MSE: 1020.1495289020676
Epoch 51 , Loss: 6131.795257568359 , Train MSE: 1019.2585407992837
Epoch 52 , Loss: 6126.601989746094 , Train MSE: 1018.3650572246409
Epoch 53 , Loss: 6121.439041137695 , Train MSE: 1017.4696293770937
Epoch 54 , Loss: 6116.304000854492 , Train MSE: 1016.5727508017503
Epoch 55 , Loss: 6111.194641113281 , Train MSE: 1015.6748672188763
Epoch 56 , Loss: 6106.109527587891 , Train MSE: 1014.7763735997846
Epoch 57 , Loss: 6101.046813964844 , Train MSE: 1013.8776229555805
Epoch 58 , Loss: 6096.005950927734 , Train MSE: 1012.9789250590132
Epoch 59 , Loss: 6090.985626220703 , Train MSE: 1012.0805603859083
Epoch 60 , Loss: 6085.9853515625 , Train MSE: 1011.1827776960716
Epoch 61 , Loss: 6081.003753662109 , Train MSE: 1010.2857920600104
Epoch 62 , Loss: 6076.041931152344 , Train MSE: 1009.3897997068414
Epoch 63 , Loss: 6071.097869873047 , Train MSE: 1008.494967344809
Epoch 64 , Loss: 6066.172256469727 , Train MSE: 1007.6014459519589
Epoch 65 , Loss: 6061.264678955078 , Train MSE: 1006.7093759788853
Epoch 66 , Loss: 6056.374328613281 , Train MSE: 1005.8188680793013
Epoch 67 , Loss: 6051.501815795898 , Train MSE: 1004.9300270248091
Epoch 68 , Loss: 6046.646423339844 , Train MSE: 1004.0429442110469
Epoch 69 , Loss: 6041.808059692383 , Train MSE: 1003.1576968413184
Epoch 70 , Loss: 6036.986724853516 , Train MSE: 1002.2743597155637
Epoch 71 , Loss: 6032.182312011719 , Train MSE: 1001.3929939649786
Epoch 72 , Loss: 6027.394744873047 , Train MSE: 1000.5136489128713
Epoch 73 , Loss: 6022.623687744141 , Train MSE: 999.636377941741
Epoch 74 , Loss: 6017.869323730469 , Train MSE: 998.7612148029604
Epoch 75 , Loss: 6013.131805419922 , Train MSE: 997.8881998555353
Epoch 76 , Loss: 6008.410369873047 , Train MSE: 997.0173616610456
Epoch 77 , Loss: 6003.705535888672 , Train MSE: 996.1487339571988
Epoch 78 , Loss: 5999.016952514648 , Train MSE: 995.2823332113104
Epoch 79 , Loss: 5994.345123291016 , Train MSE: 994.418180788311
Epoch 80 , Loss: 5989.688980102539 , Train MSE: 993.5562892796943
Epoch 81 , Loss: 5985.049057006836 , Train MSE: 992.6966845695143
Epoch 82 , Loss: 5980.42561340332 , Train MSE: 991.8393777172074
Epoch 83 , Loss: 5975.818084716797 , Train MSE: 990.9843705820207
Epoch 84 , Loss: 5971.227111816406 , Train MSE: 990.1316786402684
Epoch 85 , Loss: 5966.65153503418 , Train MSE: 989.2813067177457
Epoch 86 , Loss: 5962.091949462891 , Train MSE: 988.4332678292367
Epoch 87 , Loss: 5957.548614501953 , Train MSE: 987.5875623535521
Epoch 88 , Loss: 5953.021133422852 , Train MSE: 986.744200958339
Epoch 89 , Loss: 5948.509689331055 , Train MSE: 985.9031755585531
Epoch 90 , Loss: 5944.013732910156 , Train MSE: 985.0644939657437
Epoch 91 , Loss: 5939.533630371094 , Train MSE: 984.2281576043887
Epoch 92 , Loss: 5935.069290161133 , Train MSE: 983.3941721571941
Epoch 93 , Loss: 5930.62060546875 , Train MSE: 982.5625276301156
Epoch 94 , Loss: 5926.187744140625 , Train MSE: 981.7332343169406
Epoch 95 , Loss: 5921.770568847656 , Train MSE: 980.9062889969684
Epoch 96 , Loss: 5917.368606567383 , Train MSE: 980.081690054863
Epoch 97 , Loss: 5912.982513427734 , Train MSE: 979.2594359196992
Epoch 98 , Loss: 5908.611923217773 , Train MSE: 978.4395208289181
Epoch 99 , Loss: 5904.25651550293 , Train MSE: 977.6219520030712
Validation MSE: 1296.6249902464951
split 3 :
Epoch 0 , Loss: 11036.348358154297 , Train MSE: 1181.770821224594
Epoch 1 , Loss: 11021.76171875 , Train MSE: 1182.3559803596252
Epoch 2 , Loss: 10958.895446777344 , Train MSE: 1180.7600013262206
Epoch 3 , Loss: 10860.37417602539 , Train MSE: 1178.2786270051563
Epoch 4 , Loss: 10770.571350097656 , Train MSE: 1176.74796952206
Epoch 5 , Loss: 10634.60659790039 , Train MSE: 1173.1120231909792
Epoch 6 , Loss: 10572.899963378906 , Train MSE: 1173.0122612685457
Epoch 7 , Loss: 10445.42056274414 , Train MSE: 1169.4079794731974
Epoch 8 , Loss: 10390.876037597656 , Train MSE: 1171.3807889822033
Epoch 9 , Loss: 10276.742156982422 , Train MSE: 1166.6415521418933
Epoch 10 , Loss: 10187.783874511719 , Train MSE: 1165.2809463282335
Epoch 11 , Loss: 10102.032913208008 , Train MSE: 1164.05888202167
Epoch 12 , Loss: 10019.858688354492 , Train MSE: 1162.9677088073474
Epoch 13 , Loss: 9941.698699951172 , Train MSE: 1161.997975994059
Epoch 14 , Loss: 9867.829345703125 , Train MSE: 1161.1302147065194
Epoch 15 , Loss: 9798.626281738281 , Train MSE: 1160.3479013461742
Epoch 16 , Loss: 9731.335311889648 , Train MSE: 1159.5592603873008
Epoch 17 , Loss: 9678.372360229492 , Train MSE: 1159.3405500260253
Epoch 18 , Loss: 9619.500900268555 , Train MSE: 1158.3017557079368
Epoch 19 , Loss: 9560.01596069336 , Train MSE: 1157.5155802051122
Epoch 20 , Loss: 9631.664901733398 , Train MSE: 1169.0104998173144
Epoch 21 , Loss: 9590.319763183594 , Train MSE: 1165.206112409622
Epoch 22 , Loss: 9580.826499938965 , Train MSE: 1169.6445879520227
Epoch 23 , Loss: 9554.537887573242 , Train MSE: 1169.011123319641
Epoch 24 , Loss: 9533.976509094238 , Train MSE: 1168.570256332024
Epoch 25 , Loss: 9515.54867553711 , Train MSE: 1168.0462782199554
Epoch 26 , Loss: 9499.042259216309 , Train MSE: 1167.4622723268378
Epoch 27 , Loss: 9538.302780151367 , Train MSE: 1167.24243624226
Epoch 28 , Loss: 9336.286315917969 , Train MSE: 1151.0596180653151
Epoch 29 , Loss: 9318.423599243164 , Train MSE: 1150.1994868044799
Epoch 30 , Loss: 9303.889373779297 , Train MSE: 1149.3229807357218
Epoch 31 , Loss: 9290.762893676758 , Train MSE: 1148.4021501793761
Epoch 32 , Loss: 9278.740272521973 , Train MSE: 1147.4401537334238
Epoch 33 , Loss: 9267.574317932129 , Train MSE: 1146.4406774419379
Epoch 34 , Loss: 9257.069839477539 , Train MSE: 1145.4076685598604
Epoch 35 , Loss: 9247.072769165039 , Train MSE: 1144.34512728564
Epoch 36 , Loss: 9237.464431762695 , Train MSE: 1143.25695814659
Epoch 37 , Loss: 9228.158073425293 , Train MSE: 1142.1469709313342
Epoch 38 , Loss: 9219.094383239746 , Train MSE: 1141.018829949507
Epoch 39 , Loss: 9210.21965789795 , Train MSE: 1139.875640613458
Epoch 40 , Loss: 9201.49373626709 , Train MSE: 1138.7202226675029
Epoch 41 , Loss: 9192.889442443848 , Train MSE: 1137.5551249902278
Epoch 42 , Loss: 9184.386226654053 , Train MSE: 1136.3825951733804
Epoch 43 , Loss: 9175.9702835083 , Train MSE: 1135.2045933894494
Epoch 44 , Loss: 9167.630760192871 , Train MSE: 1134.022821371468
Epoch 45 , Loss: 9159.360458374023 , Train MSE: 1132.8387400385664
Epoch 46 , Loss: 9151.154026031494 , Train MSE: 1131.6536011289343
Epoch 47 , Loss: 9143.007331848145 , Train MSE: 1130.468470457126
Epoch 48 , Loss: 9134.917339324951 , Train MSE: 1129.2842512001766
Epoch 49 , Loss: 9126.88224029541 , Train MSE: 1128.101707116089
Epoch 50 , Loss: 9118.900562286377 , Train MSE: 1126.9214835867938
Epoch 51 , Loss: 9110.971340179443 , Train MSE: 1125.7441136176437
Epoch 52 , Loss: 9103.09330368042 , Train MSE: 1124.5700624282752
Epoch 53 , Loss: 9095.26598739624 , Train MSE: 1123.3997049698364
Epoch 54 , Loss: 9087.488960266113 , Train MSE: 1122.2333551394308
Epoch 55 , Loss: 9079.76163482666 , Train MSE: 1121.0712751207939
Epoch 56 , Loss: 9072.08369064331 , Train MSE: 1119.9136886990755
Epoch 57 , Loss: 9064.455207824707 , Train MSE: 1118.7607700839267
Epoch 58 , Loss: 9056.875984191895 , Train MSE: 1117.6126720576308
Epoch 59 , Loss: 9049.34465789795 , Train MSE: 1116.469520579596
Epoch 60 , Loss: 9041.862815856934 , Train MSE: 1115.3314088228824
Epoch 61 , Loss: 9034.428924560547 , Train MSE: 1114.1984228181336
Epoch 62 , Loss: 9027.043312072754 , Train MSE: 1113.0706290078501
Epoch 63 , Loss: 9019.705390930176 , Train MSE: 1111.948080637073
Epoch 64 , Loss: 9012.416259765625 , Train MSE: 1110.830820732098
Epoch 65 , Loss: 9005.173721313477 , Train MSE: 1109.7188780764711
Epoch 66 , Loss: 8997.979351043701 , Train MSE: 1108.612286662832
Epoch 67 , Loss: 8990.832420349121 , Train MSE: 1107.5110630168929
Epoch 68 , Loss: 8983.732635498047 , Train MSE: 1106.4152277349153
Epoch 69 , Loss: 8976.679912567139 , Train MSE: 1105.3247779069509
Epoch 70 , Loss: 8969.674293518066 , Train MSE: 1104.2397295014555
Epoch 71 , Loss: 8962.715503692627 , Train MSE: 1103.1600806321658
Epoch 72 , Loss: 8955.80360031128 , Train MSE: 1102.0858303627836
Epoch 73 , Loss: 8948.937866210938 , Train MSE: 1101.016984010872
Epoch 74 , Loss: 8942.118671417236 , Train MSE: 1099.9535321054636
Epoch 75 , Loss: 8935.34561920166 , Train MSE: 1098.8954647185703
Epoch 76 , Loss: 8928.61873626709 , Train MSE: 1097.8427862131482
Epoch 77 , Loss: 8921.9380569458 , Train MSE: 1096.7954888346715
Epoch 78 , Loss: 8915.302822113037 , Train MSE: 1095.7535609222723
Epoch 79 , Loss: 8908.713596343994 , Train MSE: 1094.7169989678312
Epoch 80 , Loss: 8902.169624328613 , Train MSE: 1093.6857837276755
Epoch 81 , Loss: 8895.671348571777 , Train MSE: 1092.6599128784458
Epoch 82 , Loss: 8889.217720031738 , Train MSE: 1091.6393614751387
Epoch 83 , Loss: 8882.809303283691 , Train MSE: 1090.624123675463
Epoch 84 , Loss: 8876.445724487305 , Train MSE: 1089.614193141908
Epoch 85 , Loss: 8870.126796722412 , Train MSE: 1088.6095547759603
Epoch 86 , Loss: 8863.85256576538 , Train MSE: 1087.6101899072194
Epoch 87 , Loss: 8857.621948242188 , Train MSE: 1086.6160865307838
Epoch 88 , Loss: 8851.435775756836 , Train MSE: 1085.627228776577
Epoch 89 , Loss: 8845.293464660645 , Train MSE: 1084.643610929048
Epoch 90 , Loss: 8839.194679260254 , Train MSE: 1083.6652102003156
Epoch 91 , Loss: 8833.13874053955 , Train MSE: 1082.6919785677906
Epoch 92 , Loss: 8827.125881195068 , Train MSE: 1081.7239314360424
Epoch 93 , Loss: 8821.155029296875 , Train MSE: 1080.7610350903283
Epoch 94 , Loss: 8815.22583770752 , Train MSE: 1079.8032404389369
Epoch 95 , Loss: 8809.336437225342 , Train MSE: 1078.850499292059
Epoch 96 , Loss: 8803.485340118408 , Train MSE: 1077.9027463461653
Epoch 97 , Loss: 8797.668067932129 , Train MSE: 1076.9598221678436
Epoch 98 , Loss: 8791.874607086182 , Train MSE: 1076.0213651699908
Epoch 99 , Loss: 8786.076133728027 , Train MSE: 1075.086444024235
Validation MSE: 102.71905776182412
split 4 :
Epoch 0 , Loss: 13263.394622802734 , Train MSE: 1059.6265984668805
Epoch 1 , Loss: 12935.874084472656 , Train MSE: 1052.1192444823878
Epoch 2 , Loss: 12687.228973388672 , Train MSE: 1049.9613675582802
Epoch 3 , Loss: 12296.646453857422 , Train MSE: 1039.578651026462
Epoch 4 , Loss: 12049.440002441406 , Train MSE: 1041.803023579391
Epoch 5 , Loss: 11719.152923583984 , Train MSE: 1033.1746531535591
Epoch 6 , Loss: 11434.954772949219 , Train MSE: 1036.2887382939548
Epoch 7 , Loss: 11191.944030761719 , Train MSE: 1035.0502114610701
Epoch 8 , Loss: 10822.298934936523 , Train MSE: 1021.8901405785737
Epoch 9 , Loss: 10574.592498779297 , Train MSE: 1020.2466635239988
Epoch 10 , Loss: 10352.525268554688 , Train MSE: 1019.1748491493915
Epoch 11 , Loss: 10257.144271850586 , Train MSE: 1021.6426384456571
Epoch 12 , Loss: 10063.291580200195 , Train MSE: 1018.8931751472302
Epoch 13 , Loss: 9991.014297485352 , Train MSE: 1020.3457702728941
Epoch 14 , Loss: 9901.302658081055 , Train MSE: 1019.3564037664556
Epoch 15 , Loss: 9834.526821136475 , Train MSE: 1018.8052740596423
Epoch 16 , Loss: 9778.085865020752 , Train MSE: 1018.257102606337
Epoch 17 , Loss: 9759.297492980957 , Train MSE: 1018.0185350043079
Epoch 18 , Loss: 9735.661388397217 , Train MSE: 1017.0490927835413
Epoch 19 , Loss: 9808.138198852539 , Train MSE: 1021.2967891348098
Epoch 20 , Loss: 9751.824523925781 , Train MSE: 1015.8668316083673
Epoch 21 , Loss: 9743.039741516113 , Train MSE: 1014.8659775611934
Epoch 22 , Loss: 9734.215538024902 , Train MSE: 1013.8053387566298
Epoch 23 , Loss: 9725.303974151611 , Train MSE: 1012.7076616203412
Epoch 24 , Loss: 9716.32428741455 , Train MSE: 1011.5880656421849
Epoch 25 , Loss: 9707.307567596436 , Train MSE: 1010.4563679555056
Epoch 26 , Loss: 9698.28122329712 , Train MSE: 1009.3188780721362
Epoch 27 , Loss: 9689.266384124756 , Train MSE: 1008.1796081277007
Epoch 28 , Loss: 9680.276721954346 , Train MSE: 1007.0411081559495
Epoch 29 , Loss: 9671.321517944336 , Train MSE: 1005.9049813283737
Epoch 30 , Loss: 9662.406707763672 , Train MSE: 1004.7722307576034
Epoch 31 , Loss: 9653.535171508789 , Train MSE: 1003.6434843816318
Epoch 32 , Loss: 9644.709205627441 , Train MSE: 1002.519122967087
Epoch 33 , Loss: 9635.927494049072 , Train MSE: 1001.3993584029522
Epoch 34 , Loss: 9627.189167022705 , Train MSE: 1000.2842868156495
Epoch 35 , Loss: 9618.48592376709 , Train MSE: 999.1738606870013
Epoch 36 , Loss: 9609.79680633545 , Train MSE: 998.0677027173735
Epoch 37 , Loss: 9601.008602142334 , Train MSE: 996.9637276573668
Epoch 38 , Loss: 9589.759662628174 , Train MSE: 995.8160236068866
Epoch 39 , Loss: 9579.375385284424 , Train MSE: 997.3909119353528
Epoch 40 , Loss: 9600.515628814697 , Train MSE: 996.3052182507129
Epoch 41 , Loss: 9567.589660644531 , Train MSE: 992.6097318439068
Epoch 42 , Loss: 9559.305786132812 , Train MSE: 991.5397434347368
Epoch 43 , Loss: 9551.061305999756 , Train MSE: 990.473032701677
Epoch 44 , Loss: 9542.858604431152 , Train MSE: 989.4102173741264
Epoch 45 , Loss: 9534.701824188232 , Train MSE: 988.3516912151631
Epoch 46 , Loss: 9526.591087341309 , Train MSE: 987.2976927794128
Epoch 47 , Loss: 9518.527767181396 , Train MSE: 986.2483717294436
Epoch 48 , Loss: 9510.51258468628 , Train MSE: 985.2038198571486
Epoch 49 , Loss: 9502.545265197754 , Train MSE: 984.1640802485873
Epoch 50 , Loss: 9494.62608718872 , Train MSE: 983.1291898897115
Epoch 51 , Loss: 9486.754936218262 , Train MSE: 982.0991566023888
Epoch 52 , Loss: 9478.931343078613 , Train MSE: 981.0739823074626
Epoch 53 , Loss: 9471.155742645264 , Train MSE: 980.0536585056868
Epoch 54 , Loss: 9463.42663192749 , Train MSE: 979.0381726467976
Epoch 55 , Loss: 9455.743759155273 , Train MSE: 978.0275052355169
Epoch 56 , Loss: 9448.105220794678 , Train MSE: 977.021607122326
Epoch 57 , Loss: 9440.50855255127 , Train MSE: 976.0203955488558
Epoch 58 , Loss: 9432.943710327148 , Train MSE: 975.0236506889275
Epoch 59 , Loss: 9425.376766204834 , Train MSE: 974.0305374501422
Epoch 60 , Loss: 9417.47946548462 , Train MSE: 973.0333498723512
Epoch 61 , Loss: 9349.977153778076 , Train MSE: 970.7638213079819
Epoch 62 , Loss: 9895.918579101562 , Train MSE: 1032.0808407790296
Epoch 63 , Loss: 9877.705219268799 , Train MSE: 1029.706097082703
Epoch 64 , Loss: 9861.09194946289 , Train MSE: 1027.5370915773726
Epoch 65 , Loss: 9845.357852935791 , Train MSE: 1025.500721884494
Epoch 66 , Loss: 9830.154766082764 , Train MSE: 1023.552502076635
Epoch 67 , Loss: 9815.30708694458 , Train MSE: 1021.6648604291128
Epoch 68 , Loss: 9800.719932556152 , Train MSE: 1019.820419560633
Epoch 69 , Loss: 9786.339420318604 , Train MSE: 1018.008053391965
Epoch 70 , Loss: 9772.13338470459 , Train MSE: 1016.2206340684262
Epoch 71 , Loss: 9758.081935882568 , Train MSE: 1014.4534976517671
Epoch 72 , Loss: 9744.172470092773 , Train MSE: 1012.703627213923
Epoch 73 , Loss: 9730.396800994873 , Train MSE: 1010.9690708135624
Epoch 74 , Loss: 9716.74919128418 , Train MSE: 1009.2484727681123
Epoch 75 , Loss: 9703.225887298584 , Train MSE: 1007.5409851274221
Epoch 76 , Loss: 9689.823989868164 , Train MSE: 1005.8460482685032
Epoch 77 , Loss: 9676.541439056396 , Train MSE: 1004.1632448583914
Epoch 78 , Loss: 9663.376392364502 , Train MSE: 1002.4922933288881
Epoch 79 , Loss: 9650.327899932861 , Train MSE: 1000.8330103837677
Epoch 80 , Loss: 9637.394813537598 , Train MSE: 999.1852789605736
Epoch 81 , Loss: 9624.575862884521 , Train MSE: 997.5489552285833
Epoch 82 , Loss: 9611.870666503906 , Train MSE: 995.9239917585297
Epoch 83 , Loss: 9599.278469085693 , Train MSE: 994.3102954833446
Epoch 84 , Loss: 9586.798259735107 , Train MSE: 992.7078137602089
Epoch 85 , Loss: 9574.430053710938 , Train MSE: 991.1165146028181
Epoch 86 , Loss: 9562.17264175415 , Train MSE: 989.5363459104915
Epoch 87 , Loss: 9550.025566101074 , Train MSE: 987.9672655178559
Epoch 88 , Loss: 9537.9885597229 , Train MSE: 986.4092307127007
Epoch 89 , Loss: 9526.060451507568 , Train MSE: 984.8621834554496
Epoch 90 , Loss: 9514.241344451904 , Train MSE: 983.326126470523
Epoch 91 , Loss: 9502.530796051025 , Train MSE: 981.8010210534136
Epoch 92 , Loss: 9490.928134918213 , Train MSE: 980.286801030753
Epoch 93 , Loss: 9479.432693481445 , Train MSE: 978.7834471410021
Epoch 94 , Loss: 9468.044021606445 , Train MSE: 977.2909438810051
Epoch 95 , Loss: 9456.761623382568 , Train MSE: 975.8092285998149
Epoch 96 , Loss: 9445.585182189941 , Train MSE: 974.3382825990665
Epoch 97 , Loss: 9434.514938354492 , Train MSE: 972.8780753048048
Epoch 98 , Loss: 9423.548976898193 , Train MSE: 971.4285730741326
Epoch 99 , Loss: 9412.687717437744 , Train MSE: 969.9897407625187
Validation MSE: 145.75715607913125
split 5 :
Epoch 0 , Loss: 15198.628204345703 , Train MSE: 990.649989514922
Epoch 1 , Loss: 14639.64974975586 , Train MSE: 976.9860564755948
Epoch 2 , Loss: 14002.487243652344 , Train MSE: 962.8000477608316
Epoch 3 , Loss: 13357.574127197266 , Train MSE: 950.4136872790391
Epoch 4 , Loss: 12792.404479980469 , Train MSE: 941.5952413660655
Epoch 5 , Loss: 12206.61752319336 , Train MSE: 932.9762335529558
Epoch 6 , Loss: 11614.149703979492 , Train MSE: 926.434999938202
Epoch 7 , Loss: 11871.597274780273 , Train MSE: 968.5514279019097
Epoch 8 , Loss: 11551.01065826416 , Train MSE: 984.4242177658547
Epoch 9 , Loss: 11212.783058166504 , Train MSE: 964.8266042736051
Epoch 10 , Loss: 10857.18968963623 , Train MSE: 949.6159829719045
Epoch 11 , Loss: 10716.450729370117 , Train MSE: 950.8721830803327
Epoch 12 , Loss: 10647.533435821533 , Train MSE: 952.1635781571775
Epoch 13 , Loss: 10766.244552612305 , Train MSE: 977.6734227051523
Epoch 14 , Loss: 10821.51174545288 , Train MSE: 982.5212159046588
Epoch 15 , Loss: 10579.72798538208 , Train MSE: 958.9585891916416
Epoch 16 , Loss: 10568.217441558838 , Train MSE: 958.0082476441613
Epoch 17 , Loss: 10556.408191680908 , Train MSE: 956.8497926537938
Epoch 18 , Loss: 10544.182304382324 , Train MSE: 955.5616022373738
Epoch 19 , Loss: 10531.600601196289 , Train MSE: 954.191604092297
Epoch 20 , Loss: 10518.775722503662 , Train MSE: 952.7713624027464
Epoch 21 , Loss: 10505.810237884521 , Train MSE: 951.3214297228841
Epoch 22 , Loss: 10492.781715393066 , Train MSE: 949.8551106461375
Epoch 23 , Loss: 10479.747409820557 , Train MSE: 948.3811051921288
Epoch 24 , Loss: 10466.745212554932 , Train MSE: 946.9048914890675
Epoch 25 , Loss: 10453.793426513672 , Train MSE: 945.4297688814038
Epoch 26 , Loss: 10440.9118309021 , Train MSE: 943.9585345382329
Epoch 27 , Loss: 10428.11022567749 , Train MSE: 942.4927117144133
Epoch 28 , Loss: 10415.394672393799 , Train MSE: 941.0331542347762
Epoch 29 , Loss: 10402.765884399414 , Train MSE: 939.5802661293532
Epoch 30 , Loss: 10390.22322845459 , Train MSE: 938.134257243028
Epoch 31 , Loss: 10377.766731262207 , Train MSE: 936.6952997900257
Epoch 32 , Loss: 10365.400478363037 , Train MSE: 935.2638118892883
Epoch 33 , Loss: 10353.121982574463 , Train MSE: 933.8397640864621
Epoch 34 , Loss: 10340.92915725708 , Train MSE: 932.4230154298516
Epoch 35 , Loss: 10328.820556640625 , Train MSE: 931.0135108072687
Epoch 36 , Loss: 10316.794818878174 , Train MSE: 929.6111864006365
Epoch 37 , Loss: 10304.851173400879 , Train MSE: 928.2159957786986
Epoch 38 , Loss: 10292.987190246582 , Train MSE: 926.827864713963
Epoch 39 , Loss: 10281.202602386475 , Train MSE: 925.4467118924167
Epoch 40 , Loss: 10269.495239257812 , Train MSE: 924.0724463161778
Epoch 41 , Loss: 10257.863872528076 , Train MSE: 922.7049573670073
Epoch 42 , Loss: 10246.30683517456 , Train MSE: 921.3440920974317
Epoch 43 , Loss: 10234.821102142334 , Train MSE: 919.9895771430542
Epoch 44 , Loss: 10223.40253829956 , Train MSE: 918.640843217944
Epoch 45 , Loss: 10212.036865234375 , Train MSE: 917.2962508801927
Epoch 46 , Loss: 10200.667423248291 , Train MSE: 915.947747429703
Epoch 47 , Loss: 10188.791622161865 , Train MSE: 913.7547406642935
Epoch 48 , Loss: 10077.344528198242 , Train MSE: 900.4321052826671
Epoch 49 , Loss: 10070.24324798584 , Train MSE: 899.6154128008804
Epoch 50 , Loss: 10062.889549255371 , Train MSE: 898.7663042858604
Epoch 51 , Loss: 10055.319789886475 , Train MSE: 897.8939286180083
Epoch 52 , Loss: 10047.595264434814 , Train MSE: 897.0074962632209
Epoch 53 , Loss: 10039.772220611572 , Train MSE: 896.1138170254162
Epoch 54 , Loss: 10031.950805664062 , Train MSE: 895.2177627453972
Epoch 55 , Loss: 10024.202781677246 , Train MSE: 894.3211400384122
Epoch 56 , Loss: 10016.312404632568 , Train MSE: 893.4224781846747
Epoch 57 , Loss: 10008.547218322754 , Train MSE: 892.5277011954314
Epoch 58 , Loss: 10000.572383880615 , Train MSE: 891.6319676768478
Epoch 59 , Loss: 9992.785270690918 , Train MSE: 890.7441413473437
Epoch 60 , Loss: 9984.635215759277 , Train MSE: 889.8525633702069
Epoch 61 , Loss: 9976.409809112549 , Train MSE: 888.9644216339037
Epoch 62 , Loss: 9968.176517486572 , Train MSE: 888.0725110516962
Epoch 63 , Loss: 9960.067623138428 , Train MSE: 887.1759411594138
Epoch 64 , Loss: 9952.188049316406 , Train MSE: 886.2838365443472
Epoch 65 , Loss: 9944.53048324585 , Train MSE: 885.4022309794173
Epoch 66 , Loss: 9936.994987487793 , Train MSE: 884.528637334534
Epoch 67 , Loss: 9929.527484893799 , Train MSE: 883.6603576928236
Epoch 68 , Loss: 9922.106800079346 , Train MSE: 882.7964258883068
Epoch 69 , Loss: 9914.702522277832 , Train MSE: 881.936121796453
Epoch 70 , Loss: 9907.024478912354 , Train MSE: 881.0734302906503
Epoch 71 , Loss: 9901.897003173828 , Train MSE: 880.4266305115839
Epoch 72 , Loss: 9891.11667251587 , Train MSE: 879.36977973005
Epoch 73 , Loss: 9883.96976852417 , Train MSE: 878.5260404109109
Epoch 74 , Loss: 9876.934700012207 , Train MSE: 877.6872929762432
Epoch 75 , Loss: 9869.776824951172 , Train MSE: 876.8595858870799
Epoch 76 , Loss: 9865.371047973633 , Train MSE: 876.0562599766332
Epoch 77 , Loss: 9857.346996307373 , Train MSE: 875.2027238450186
Epoch 78 , Loss: 9855.548526763916 , Train MSE: 874.5159816445248
Epoch 79 , Loss: 9841.793632507324 , Train MSE: 873.5428198171342
Epoch 80 , Loss: 9834.825607299805 , Train MSE: 872.7272565011709
Epoch 81 , Loss: 9827.979347229004 , Train MSE: 871.91159994148
Epoch 82 , Loss: 9821.189140319824 , Train MSE: 871.10191677828
Epoch 83 , Loss: 9814.43535232544 , Train MSE: 870.29626460282
Epoch 84 , Loss: 9812.491104125977 , Train MSE: 869.7740104002216
Epoch 85 , Loss: 9805.015830993652 , Train MSE: 868.9150676765269
Epoch 86 , Loss: 9811.762580871582 , Train MSE: 868.162869456006
Epoch 87 , Loss: 9791.90242767334 , Train MSE: 867.1780492977023
Epoch 88 , Loss: 9784.707126617432 , Train MSE: 866.3589198591957
Epoch 89 , Loss: 9830.66928100586 , Train MSE: 873.2078772139369
Epoch 90 , Loss: 9813.625061035156 , Train MSE: 866.916949020823
Epoch 91 , Loss: 9728.44010925293 , Train MSE: 858.9851723357572
Epoch 92 , Loss: 9722.084712982178 , Train MSE: 858.2070155666382
Epoch 93 , Loss: 9715.76565170288 , Train MSE: 857.4323649693591
Epoch 94 , Loss: 9709.485607147217 , Train MSE: 856.6615604906049
Epoch 95 , Loss: 9703.243297576904 , Train MSE: 855.8947186170419
Epoch 96 , Loss: 9697.019207000732 , Train MSE: 855.1310000690182
Epoch 97 , Loss: 9690.769512176514 , Train MSE: 854.3686777076069
Epoch 98 , Loss: 9684.392208099365 , Train MSE: 853.6039892223956
Epoch 99 , Loss: 9681.535243988037 , Train MSE: 853.0559987631451
Validation MSE: 728.3191951185022
Test MSE: 1001.953670215076
Average validation MSE: 811.4684235934326
County: 06075
split 1 :
Epoch 0 , Loss: 224.63794708251953 , Train MSE: 57.22213041176537
Epoch 1 , Loss: 186.7775115966797 , Train MSE: 47.7621471157243
Epoch 2 , Loss: 139.97072792053223 , Train MSE: 36.13278835359135
Epoch 3 , Loss: 116.21277809143066 , Train MSE: 30.064342033962955
Epoch 4 , Loss: 109.89566040039062 , Train MSE: 28.146188598721704
Epoch 5 , Loss: 108.60370826721191 , Train MSE: 27.640505122437244
Epoch 6 , Loss: 107.99849891662598 , Train MSE: 27.404230675573388
Epoch 7 , Loss: 107.10196685791016 , Train MSE: 27.09436823324581
Epoch 8 , Loss: 105.36886787414551 , Train MSE: 26.6403053132199
Epoch 9 , Loss: 103.44294357299805 , Train MSE: 26.095602566939718
Epoch 10 , Loss: 101.16484832763672 , Train MSE: 25.514111377455972
Epoch 11 , Loss: 100.2734432220459 , Train MSE: 25.271913269161736
Epoch 12 , Loss: 99.9674301147461 , Train MSE: 25.17502501447527
Epoch 13 , Loss: 99.80944442749023 , Train MSE: 25.128599917847563
Epoch 14 , Loss: 99.26063346862793 , Train MSE: 24.987336926773526
Epoch 15 , Loss: 98.7509536743164 , Train MSE: 24.87420113454609
Epoch 16 , Loss: 99.7960262298584 , Train MSE: 25.105113279808208
Epoch 17 , Loss: 98.99306869506836 , Train MSE: 24.925780886066793
Epoch 18 , Loss: 98.95278358459473 , Train MSE: 24.89527160837852
Epoch 19 , Loss: 98.69743728637695 , Train MSE: 24.85137730762422
Epoch 20 , Loss: 97.35237503051758 , Train MSE: 24.55695441011487
Epoch 21 , Loss: 101.81636428833008 , Train MSE: 25.756922566319776
Epoch 22 , Loss: 99.30188369750977 , Train MSE: 25.024235516960847
Epoch 23 , Loss: 97.98100090026855 , Train MSE: 24.714741911020976
Epoch 24 , Loss: 102.93938827514648 , Train MSE: 25.85251711864616
Epoch 25 , Loss: 98.5356502532959 , Train MSE: 24.805579655692682
Epoch 26 , Loss: 96.68374824523926 , Train MSE: 24.425047136387022
Epoch 27 , Loss: 97.72855758666992 , Train MSE: 24.63735338621214
Epoch 28 , Loss: 96.23735237121582 , Train MSE: 24.313447356638356
Epoch 29 , Loss: 97.14619827270508 , Train MSE: 24.496942492561054
Epoch 30 , Loss: 98.0685977935791 , Train MSE: 24.693258840633824
Epoch 31 , Loss: 97.52686882019043 , Train MSE: 24.568222463091306
Epoch 32 , Loss: 97.20298957824707 , Train MSE: 24.500243370973777
Epoch 33 , Loss: 97.42498016357422 , Train MSE: 24.560663337875983
Epoch 34 , Loss: 97.04927635192871 , Train MSE: 24.469938128993835
/shared-libs/python3.7/py-core/lib/python3.7/site-packages/ipykernel_launcher.py:21: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
/shared-libs/python3.7/py-core/lib/python3.7/site-packages/ipykernel_launcher.py:22: SettingWithCopyWarning:
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead
See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
Epoch 35 , Loss: 97.09212875366211 , Train MSE: 24.490980952054336
Epoch 36 , Loss: 98.38743782043457 , Train MSE: 24.79721392391142
Epoch 37 , Loss: 97.09771728515625 , Train MSE: 24.48039595301792
Epoch 38 , Loss: 97.09912109375 , Train MSE: 24.469977974689318
Epoch 39 , Loss: 96.92204856872559 , Train MSE: 24.463035982306113
Epoch 40 , Loss: 96.2254524230957 , Train MSE: 24.308283802176057
Epoch 41 , Loss: 96.35666084289551 , Train MSE: 24.313741688884885
Epoch 42 , Loss: 95.96123695373535 , Train MSE: 24.219733575226638
Epoch 43 , Loss: 98.63472938537598 , Train MSE: 24.912485112949618
Epoch 44 , Loss: 96.43458938598633 , Train MSE: 24.321061071641346
Epoch 45 , Loss: 97.62651824951172 , Train MSE: 24.58462912131631
Epoch 46 , Loss: 97.20516395568848 , Train MSE: 24.488556100931657
Epoch 47 , Loss: 97.53454971313477 , Train MSE: 24.571733166442122
Epoch 48 , Loss: 95.9870719909668 , Train MSE: 24.24053086174403
Epoch 49 , Loss: 95.87102699279785 , Train MSE: 24.229417753082295
Epoch 50 , Loss: 97.20796966552734 , Train MSE: 24.50614233510777
Epoch 51 , Loss: 96.60616302490234 , Train MSE: 24.361362016905478
Epoch 52 , Loss: 95.75242805480957 , Train MSE: 24.188741740796445
Epoch 53 , Loss: 95.75719833374023 , Train MSE: 24.19367254164871
Epoch 54 , Loss: 96.20784759521484 , Train MSE: 24.287346735132793
Epoch 55 , Loss: 97.98249435424805 , Train MSE: 24.649230951945505
Epoch 56 , Loss: 97.24070167541504 , Train MSE: 24.5021390034692
Epoch 57 , Loss: 96.97034072875977 , Train MSE: 24.438700154702527
Epoch 58 , Loss: 96.73286437988281 , Train MSE: 24.398065059802324
Epoch 59 , Loss: 96.75599098205566 , Train MSE: 24.398427145753303
Epoch 60 , Loss: 96.91625785827637 , Train MSE: 24.431458892705006
Epoch 61 , Loss: 96.43182563781738 , Train MSE: 24.354046410445378
Epoch 62 , Loss: 96.5994815826416 , Train MSE: 24.36235513382248
Epoch 63 , Loss: 95.80797004699707 , Train MSE: 24.198926932198987
Epoch 64 , Loss: 94.52980422973633 , Train MSE: 23.932677962975294
Epoch 65 , Loss: 94.59825325012207 , Train MSE: 23.95851070677874
Epoch 66 , Loss: 94.99688339233398 , Train MSE: 24.02207215122922
Epoch 67 , Loss: 95.79696083068848 , Train MSE: 24.20245880430711
Epoch 68 , Loss: 98.91885185241699 , Train MSE: 24.891832572785294
Epoch 69 , Loss: 96.01273536682129 , Train MSE: 24.272974445831235
Epoch 70 , Loss: 95.06729316711426 , Train MSE: 24.014123662569606
Epoch 71 , Loss: 94.72697067260742 , Train MSE: 23.94094519577907
Epoch 72 , Loss: 94.21327018737793 , Train MSE: 23.815148105145163
Epoch 73 , Loss: 95.3868350982666 , Train MSE: 24.06337817896809
Epoch 74 , Loss: 94.26577758789062 , Train MSE: 23.82156727569485
Epoch 75 , Loss: 96.41277313232422 , Train MSE: 24.280913040208855
Epoch 76 , Loss: 95.95154762268066 , Train MSE: 24.174343310124346
Epoch 77 , Loss: 93.4805793762207 , Train MSE: 23.63753370402098
Epoch 78 , Loss: 92.88239860534668 , Train MSE: 23.511962541705934
Epoch 79 , Loss: 94.58394050598145 , Train MSE: 23.876651643923648
Epoch 80 , Loss: 94.21906471252441 , Train MSE: 23.795846564849814
Epoch 81 , Loss: 93.63318824768066 , Train MSE: 23.666735536339424
Epoch 82 , Loss: 93.11590576171875 , Train MSE: 23.558257025551427
Epoch 83 , Loss: 92.42020606994629 , Train MSE: 23.413345091595637
Epoch 84 , Loss: 92.40009689331055 , Train MSE: 23.456388257395442
Epoch 85 , Loss: 91.64426803588867 , Train MSE: 23.3212620692986
Epoch 86 , Loss: 91.69937896728516 , Train MSE: 23.264724101211876
Epoch 87 , Loss: 93.16332244873047 , Train MSE: 23.548709666361994
Epoch 88 , Loss: 96.86990165710449 , Train MSE: 24.38030275146029
Epoch 89 , Loss: 98.16022682189941 , Train MSE: 24.862115397245518
Epoch 90 , Loss: 93.06370162963867 , Train MSE: 23.525652322075192
Epoch 91 , Loss: 94.38237762451172 , Train MSE: 23.84491741273352
Epoch 92 , Loss: 92.90547943115234 , Train MSE: 23.472583166621927
Epoch 93 , Loss: 95.77275657653809 , Train MSE: 24.13392399948577
Epoch 94 , Loss: 94.18169403076172 , Train MSE: 23.766958673875294
Epoch 95 , Loss: 92.39816856384277 , Train MSE: 23.37376958147592
Epoch 96 , Loss: 91.39533805847168 , Train MSE: 23.15786734059138
Epoch 97 , Loss: 94.14446449279785 , Train MSE: 23.722601112340552
Epoch 98 , Loss: 94.60745048522949 , Train MSE: 23.979675290789377
Epoch 99 , Loss: 91.34854698181152 , Train MSE: 23.159264646538347
Validation MSE: 190.03326229154555
split 2 :
Epoch 0 , Loss: 830.5440711975098 , Train MSE: 118.1588087185477
Epoch 1 , Loss: 819.3890724182129 , Train MSE: 117.51855399704816
Epoch 2 , Loss: 803.8888130187988 , Train MSE: 118.68249683791417
Epoch 3 , Loss: 799.6212272644043 , Train MSE: 120.94349254470232
Epoch 4 , Loss: 768.4989223480225 , Train MSE: 115.82548852187173
Epoch 5 , Loss: 747.7509250640869 , Train MSE: 114.82192647437418
Epoch 6 , Loss: 740.5183124542236 , Train MSE: 114.22924670943458
Epoch 7 , Loss: 737.242301940918 , Train MSE: 114.39261485256064
Epoch 8 , Loss: 733.692008972168 , Train MSE: 113.2254264457777
Epoch 9 , Loss: 725.8259620666504 , Train MSE: 114.1347315030727
Epoch 10 , Loss: 721.175048828125 , Train MSE: 111.74651338754235
Epoch 11 , Loss: 714.5618743896484 , Train MSE: 111.54296059622752
Epoch 12 , Loss: 738.1537265777588 , Train MSE: 114.88023113886558
Epoch 13 , Loss: 725.797197341919 , Train MSE: 110.70375833476722
Epoch 14 , Loss: 706.1733417510986 , Train MSE: 109.5373533705048
Epoch 15 , Loss: 697.897159576416 , Train MSE: 108.74520346089005
Epoch 16 , Loss: 692.6832599639893 , Train MSE: 108.02764039057713
Epoch 17 , Loss: 688.4622554779053 , Train MSE: 107.31225133744587
Epoch 18 , Loss: 684.8736667633057 , Train MSE: 106.68080750982652
Epoch 19 , Loss: 681.4054946899414 , Train MSE: 106.06655524134133
Epoch 20 , Loss: 678.060359954834 , Train MSE: 105.47478712184146
Epoch 21 , Loss: 674.8377571105957 , Train MSE: 104.9009444305715
Epoch 22 , Loss: 671.7169246673584 , Train MSE: 104.34446579762651
Epoch 23 , Loss: 668.6959953308105 , Train MSE: 103.80458620271318
Epoch 24 , Loss: 665.7716941833496 , Train MSE: 103.28133390090464
Epoch 25 , Loss: 662.9688148498535 , Train MSE: 102.77734601731063
Epoch 26 , Loss: 660.3728046417236 , Train MSE: 102.30029741988551
Epoch 27 , Loss: 657.3906726837158 , Train MSE: 101.78678306228022
Epoch 28 , Loss: 654.9118423461914 , Train MSE: 101.33303275071401
Epoch 29 , Loss: 655.1846809387207 , Train MSE: 101.13782358653779
Epoch 30 , Loss: 649.8117065429688 , Train MSE: 100.44690224327606
Epoch 31 , Loss: 646.2975311279297 , Train MSE: 99.96890343497672
Epoch 32 , Loss: 644.289571762085 , Train MSE: 99.60098405350257
Epoch 33 , Loss: 641.7337989807129 , Train MSE: 99.1689653034185
Epoch 34 , Loss: 639.5992946624756 , Train MSE: 98.78914998758415
Epoch 35 , Loss: 637.4393253326416 , Train MSE: 98.4034794081073
Epoch 36 , Loss: 635.4237632751465 , Train MSE: 98.0469427185616
Epoch 37 , Loss: 633.513952255249 , Train MSE: 97.68407556884686
Epoch 38 , Loss: 631.9862289428711 , Train MSE: 97.38651278499226
Epoch 39 , Loss: 629.6389045715332 , Train MSE: 97.0086826008852
Epoch 40 , Loss: 627.7292423248291 , Train MSE: 96.69148054046387
Epoch 41 , Loss: 624.8724842071533 , Train MSE: 96.34919971637859
Epoch 42 , Loss: 623.8912315368652 , Train MSE: 96.05529726157282
Epoch 43 , Loss: 622.2205200195312 , Train MSE: 95.76796436387704
Epoch 44 , Loss: 620.8146324157715 , Train MSE: 95.48638858944399
Epoch 45 , Loss: 619.4611797332764 , Train MSE: 95.24211688861611
Epoch 46 , Loss: 617.2938346862793 , Train MSE: 94.93443443756148
Epoch 47 , Loss: 617.1602554321289 , Train MSE: 94.77412046983557
Epoch 48 , Loss: 615.2201633453369 , Train MSE: 94.44879968021198
Epoch 49 , Loss: 614.0515785217285 , Train MSE: 94.23829441765709
Epoch 50 , Loss: 612.2814636230469 , Train MSE: 93.96440443024889
Epoch 51 , Loss: 610.8870105743408 , Train MSE: 93.74575105790645
Epoch 52 , Loss: 609.4237442016602 , Train MSE: 93.51596360011554
Epoch 53 , Loss: 608.1606826782227 , Train MSE: 93.29980222073108
Epoch 54 , Loss: 606.9836769104004 , Train MSE: 93.10105863719747
Epoch 55 , Loss: 605.6817817687988 , Train MSE: 92.89426663491996
Epoch 56 , Loss: 604.5919551849365 , Train MSE: 92.70473873667125
Epoch 57 , Loss: 603.4678020477295 , Train MSE: 92.52167546453506
Epoch 58 , Loss: 602.4286193847656 , Train MSE: 92.34399027879105
Epoch 59 , Loss: 603.635612487793 , Train MSE: 92.25114590591338
Epoch 60 , Loss: 602.2898292541504 , Train MSE: 92.06295289762318
Epoch 61 , Loss: 599.839054107666 , Train MSE: 91.861211329044
Epoch 62 , Loss: 598.663990020752 , Train MSE: 91.71728810122923
Epoch 63 , Loss: 597.3714485168457 , Train MSE: 91.5518987097817
Epoch 64 , Loss: 597.6447257995605 , Train MSE: 91.45291558145556
Epoch 65 , Loss: 595.9302368164062 , Train MSE: 91.29365148188
Epoch 66 , Loss: 594.9194107055664 , Train MSE: 91.15770139291135
Epoch 67 , Loss: 594.1154155731201 , Train MSE: 91.03453851324593
Epoch 68 , Loss: 593.3303985595703 , Train MSE: 90.91798361802478
Epoch 69 , Loss: 593.1731700897217 , Train MSE: 90.86861457869696
Epoch 70 , Loss: 590.4209289550781 , Train MSE: 90.55244930232725
Epoch 71 , Loss: 574.3436832427979 , Train MSE: 88.56135601536218
Epoch 72 , Loss: 639.7874031066895 , Train MSE: 94.12200898534402
Epoch 73 , Loss: 640.5809097290039 , Train MSE: 99.31988347924697
Epoch 74 , Loss: 610.609016418457 , Train MSE: 95.41150783891082
Epoch 75 , Loss: 610.0295677185059 , Train MSE: 95.24056683771427
Epoch 76 , Loss: 607.8966884613037 , Train MSE: 95.05148718109261
Epoch 77 , Loss: 607.2278003692627 , Train MSE: 94.96982305452354
Epoch 78 , Loss: 606.4687404632568 , Train MSE: 94.88068924287595
Epoch 79 , Loss: 605.9390506744385 , Train MSE: 94.82466369998171
Epoch 80 , Loss: 604.8977184295654 , Train MSE: 94.74440777906831
Epoch 81 , Loss: 620.090217590332 , Train MSE: 95.44465252736113
Epoch 82 , Loss: 603.9249801635742 , Train MSE: 94.66578913080926
Epoch 83 , Loss: 603.6577396392822 , Train MSE: 94.60444635258617
Epoch 84 , Loss: 603.1193790435791 , Train MSE: 94.52305257993312
Epoch 85 , Loss: 602.7802581787109 , Train MSE: 94.47035172502478
Epoch 86 , Loss: 602.3249683380127 , Train MSE: 94.40551266219725
Epoch 87 , Loss: 601.9468898773193 , Train MSE: 94.35159884296468
Epoch 88 , Loss: 601.5873050689697 , Train MSE: 94.30110291786896
Epoch 89 , Loss: 601.2049388885498 , Train MSE: 94.24844643171633
Epoch 90 , Loss: 600.9077129364014 , Train MSE: 94.20722855602398
Epoch 91 , Loss: 600.551929473877 , Train MSE: 94.15948113845465
Epoch 92 , Loss: 600.2806816101074 , Train MSE: 94.1230075879304
Epoch 93 , Loss: 599.9641265869141 , Train MSE: 94.08159276715872
Epoch 94 , Loss: 599.6916656494141 , Train MSE: 94.04640503013381
Epoch 95 , Loss: 599.4145374298096 , Train MSE: 94.0112130236322
Epoch 96 , Loss: 599.1296691894531 , Train MSE: 93.9756634852544
Epoch 97 , Loss: 598.8987083435059 , Train MSE: 93.94739949065989
Epoch 98 , Loss: 598.5888137817383 , Train MSE: 93.90952115684009
Epoch 99 , Loss: 598.4281826019287 , Train MSE: 93.89161112621149
Validation MSE: 105.49542781037354
split 3 :
Epoch 0 , Loss: 1161.2375259399414 , Train MSE: 117.74619916701758
Epoch 1 , Loss: 1143.1211891174316 , Train MSE: 117.4616990781727
Epoch 2 , Loss: 1120.7137260437012 , Train MSE: 117.66593867293153
Epoch 3 , Loss: 1079.0474300384521 , Train MSE: 115.92722840118965
Epoch 4 , Loss: 1085.520257949829 , Train MSE: 118.60914628570583
Epoch 5 , Loss: 1063.1169872283936 , Train MSE: 119.4132073804579
Epoch 6 , Loss: 1163.6195259094238 , Train MSE: 131.22919854501833
Epoch 7 , Loss: 1103.2614459991455 , Train MSE: 121.88977928541222
Epoch 8 , Loss: 1044.4482498168945 , Train MSE: 114.24757157332157
Epoch 9 , Loss: 1025.350383758545 , Train MSE: 113.63772708459584
Epoch 10 , Loss: 1004.501955986023 , Train MSE: 113.01422405199732
Epoch 11 , Loss: 996.5382251739502 , Train MSE: 112.41535562695812
Epoch 12 , Loss: 986.783616065979 , Train MSE: 111.63861423866263
Epoch 13 , Loss: 1056.2383651733398 , Train MSE: 115.79349643616828
Epoch 14 , Loss: 1016.6886692047119 , Train MSE: 112.6519297295774
Epoch 15 , Loss: 1033.5564765930176 , Train MSE: 112.34356518999202
Epoch 16 , Loss: 1006.1059684753418 , Train MSE: 110.0478398270175
Epoch 17 , Loss: 1003.6086769104004 , Train MSE: 109.60750514416672
Epoch 18 , Loss: 970.3180809020996 , Train MSE: 108.3640521528601
Epoch 19 , Loss: 964.828616142273 , Train MSE: 108.01974355845009
Epoch 20 , Loss: 961.4693508148193 , Train MSE: 107.76139361261939
Epoch 21 , Loss: 1009.3280029296875 , Train MSE: 108.00688466055215
Epoch 22 , Loss: 990.972936630249 , Train MSE: 107.08161783277208
Epoch 23 , Loss: 978.1934051513672 , Train MSE: 106.37656084471078
Epoch 24 , Loss: 1024.5158500671387 , Train MSE: 112.22523524577005
Epoch 25 , Loss: 972.7869529724121 , Train MSE: 105.54186625769037
Epoch 26 , Loss: 995.3311634063721 , Train MSE: 107.85465589244237
Epoch 27 , Loss: 989.8779792785645 , Train MSE: 105.23433941163965
Epoch 28 , Loss: 985.490894317627 , Train MSE: 105.03097852018784
Epoch 29 , Loss: 981.0098724365234 , Train MSE: 106.06956860773872
Epoch 30 , Loss: 969.7702960968018 , Train MSE: 103.90275803341463
Epoch 31 , Loss: 973.028844833374 , Train MSE: 105.1389459802667
Epoch 32 , Loss: 977.2575798034668 , Train MSE: 107.2900331183336
Epoch 33 , Loss: 938.4165573120117 , Train MSE: 102.84986559915738
Epoch 34 , Loss: 933.5122427940369 , Train MSE: 102.62359616291236
Epoch 35 , Loss: 931.3692398071289 , Train MSE: 102.15087156844467
Epoch 36 , Loss: 929.3916101455688 , Train MSE: 101.84439220893069
Epoch 37 , Loss: 929.4786667823792 , Train MSE: 101.7516554142444
Epoch 38 , Loss: 914.377968788147 , Train MSE: 101.89029330188875
Epoch 39 , Loss: 948.9921722412109 , Train MSE: 105.39395419737299
Epoch 40 , Loss: 939.114128112793 , Train MSE: 101.9219660771632
Epoch 41 , Loss: 933.6888208389282 , Train MSE: 101.75367169996596
Epoch 42 , Loss: 931.8414769172668 , Train MSE: 101.69642736133676
Epoch 43 , Loss: 927.9236960411072 , Train MSE: 100.72685148077731
Epoch 44 , Loss: 923.7635293006897 , Train MSE: 100.31471520149236
Epoch 45 , Loss: 922.6778926849365 , Train MSE: 100.13936398225354
Epoch 46 , Loss: 921.956057548523 , Train MSE: 99.9738966788315
Epoch 47 , Loss: 921.42023229599 , Train MSE: 99.89200563964471
Epoch 48 , Loss: 920.9838514328003 , Train MSE: 99.7365630402253
Epoch 49 , Loss: 920.1415071487427 , Train MSE: 99.58177970338474
Epoch 50 , Loss: 920.6473455429077 , Train MSE: 99.61708915549858
Epoch 51 , Loss: 923.4445524215698 , Train MSE: 99.53450748287996
Epoch 52 , Loss: 922.6414642333984 , Train MSE: 99.36382801845824
Epoch 53 , Loss: 922.261736869812 , Train MSE: 99.23542231247053
Epoch 54 , Loss: 918.8001341819763 , Train MSE: 99.057944597649
Epoch 55 , Loss: 918.6702327728271 , Train MSE: 99.06927674078328
Epoch 56 , Loss: 921.9029717445374 , Train MSE: 99.01762333331864
Epoch 57 , Loss: 921.417140007019 , Train MSE: 98.91312546538929
Epoch 58 , Loss: 921.2377047538757 , Train MSE: 98.84464109328356
Epoch 59 , Loss: 921.3666968345642 , Train MSE: 98.82140914544844
Epoch 60 , Loss: 921.1956844329834 , Train MSE: 98.7304271507261
Epoch 61 , Loss: 920.9765129089355 , Train MSE: 98.65361492522106
Epoch 62 , Loss: 920.7823090553284 , Train MSE: 98.57295818709497
Epoch 63 , Loss: 920.7473955154419 , Train MSE: 98.53146428052322
Epoch 64 , Loss: 920.6014847755432 , Train MSE: 98.48045296295791
Epoch 65 , Loss: 920.5168662071228 , Train MSE: 98.43044580075141
Epoch 66 , Loss: 920.3052344322205 , Train MSE: 98.38243747947425
Epoch 67 , Loss: 920.4172949790955 , Train MSE: 98.33058763062002
Epoch 68 , Loss: 920.1049299240112 , Train MSE: 98.25719545407598
Epoch 69 , Loss: 920.198760509491 , Train MSE: 98.22812170442081
Epoch 70 , Loss: 919.592604637146 , Train MSE: 98.12472768220242
Epoch 71 , Loss: 918.7077078819275 , Train MSE: 97.97868111524083
Epoch 72 , Loss: 916.7560367584229 , Train MSE: 97.8253061159794
Epoch 73 , Loss: 911.0031929016113 , Train MSE: 97.5756730957994
Epoch 74 , Loss: 903.3929986953735 , Train MSE: 98.60599994880258
Epoch 75 , Loss: 916.2913565635681 , Train MSE: 97.66255974448394
Epoch 76 , Loss: 916.8563704490662 , Train MSE: 97.4722395176207
Epoch 77 , Loss: 915.8288702964783 , Train MSE: 97.41572012633252
Epoch 78 , Loss: 915.4865164756775 , Train MSE: 97.42322056471959
Epoch 79 , Loss: 915.4238200187683 , Train MSE: 97.43446202900678
Epoch 80 , Loss: 915.1288933753967 , Train MSE: 97.40789975056619
Epoch 81 , Loss: 914.8468871116638 , Train MSE: 97.3873481438926
Epoch 82 , Loss: 914.5880665779114 , Train MSE: 97.35711095328955
Epoch 83 , Loss: 914.4248795509338 , Train MSE: 97.34690086667982
Epoch 84 , Loss: 914.229907989502 , Train MSE: 97.30860113618756
Epoch 85 , Loss: 913.9213027954102 , Train MSE: 97.2917625852532
Epoch 86 , Loss: 913.9377393722534 , Train MSE: 97.29541087004472
Epoch 87 , Loss: 905.0453014373779 , Train MSE: 96.66122113986384
Epoch 88 , Loss: 910.7954845428467 , Train MSE: 96.79347289694263
Epoch 89 , Loss: 908.2608075141907 , Train MSE: 96.46347430863334
Epoch 90 , Loss: 900.52268409729 , Train MSE: 96.03718252353978
Epoch 91 , Loss: 898.782054901123 , Train MSE: 95.84163934912827
Epoch 92 , Loss: 898.1137099266052 , Train MSE: 95.69285231188293
Epoch 93 , Loss: 897.6446738243103 , Train MSE: 95.55506129268775
Epoch 94 , Loss: 896.8826303482056 , Train MSE: 95.41006674962053
Epoch 95 , Loss: 896.5428256988525 , Train MSE: 95.28772918097793
Epoch 96 , Loss: 895.8029837608337 , Train MSE: 95.14908168314477
Epoch 97 , Loss: 895.6551127433777 , Train MSE: 95.04537619748346
Epoch 98 , Loss: 895.0595722198486 , Train MSE: 94.91545646162436
Epoch 99 , Loss: 894.5219683647156 , Train MSE: 94.80616454312641
Validation MSE: 70.6960943227673
split 4 :
Epoch 0 , Loss: 1421.83687210083 , Train MSE: 107.10705046821398
Epoch 1 , Loss: 1334.8170585632324 , Train MSE: 110.93146021204967
Epoch 2 , Loss: 1235.3602504730225 , Train MSE: 109.1625031089617
Epoch 3 , Loss: 1200.958065032959 , Train MSE: 110.14219805529669
Epoch 4 , Loss: 1138.4034481048584 , Train MSE: 103.64735825557396
Epoch 5 , Loss: 1091.7444763183594 , Train MSE: 102.03549559704098
Epoch 6 , Loss: 1074.954041481018 , Train MSE: 101.17703627987098
Epoch 7 , Loss: 1063.9913139343262 , Train MSE: 100.52074591876726
Epoch 8 , Loss: 1060.8417210578918 , Train MSE: 99.81897718346521
Epoch 9 , Loss: 1071.4437189102173 , Train MSE: 99.08529489734293
Epoch 10 , Loss: 1054.7159509658813 , Train MSE: 98.3713297113819
Epoch 11 , Loss: 1037.5660471916199 , Train MSE: 97.24747559843318
Epoch 12 , Loss: 1037.5136108398438 , Train MSE: 96.85161059571307
Epoch 13 , Loss: 1056.25270986557 , Train MSE: 96.87516294937004
Epoch 14 , Loss: 1029.124210357666 , Train MSE: 95.62410126394157
Epoch 15 , Loss: 1032.1979312896729 , Train MSE: 95.2906008865598
Epoch 16 , Loss: 1022.6144313812256 , Train MSE: 94.64803683398723
Epoch 17 , Loss: 1145.8056154251099 , Train MSE: 109.2085516920553
Epoch 18 , Loss: 1053.0853805541992 , Train MSE: 100.0075862607768
Epoch 19 , Loss: 1042.5374426841736 , Train MSE: 98.98208236610508
Epoch 20 , Loss: 1040.2485389709473 , Train MSE: 97.8703218096059
Epoch 21 , Loss: 1031.4131636619568 , Train MSE: 96.77275300143461
Epoch 22 , Loss: 1017.2578263282776 , Train MSE: 95.34461551902548
Epoch 23 , Loss: 1014.5449180603027 , Train MSE: 94.76667497469641
Epoch 24 , Loss: 1051.6963214874268 , Train MSE: 99.68244127063328
Epoch 25 , Loss: 1049.02343416214 , Train MSE: 95.99679898937175
Epoch 26 , Loss: 1008.3730139732361 , Train MSE: 92.6522784947579
Epoch 27 , Loss: 987.9180788993835 , Train MSE: 90.46563929209367
Epoch 28 , Loss: 1036.7725095748901 , Train MSE: 92.29686087351385
Epoch 29 , Loss: 1023.2447643280029 , Train MSE: 90.37325851780103
Epoch 30 , Loss: 1019.5363359451294 , Train MSE: 90.13915549966865
Epoch 31 , Loss: 1024.1165218353271 , Train MSE: 90.1246488268184
Epoch 32 , Loss: 1016.8716459274292 , Train MSE: 90.07593435748981
Epoch 33 , Loss: 1034.3339176177979 , Train MSE: 92.83559007646163
Epoch 34 , Loss: 1006.0676622390747 , Train MSE: 90.23005932855928
Epoch 35 , Loss: 1041.8296556472778 , Train MSE: 96.38239671645489
Epoch 36 , Loss: 1060.2153844833374 , Train MSE: 96.93232395560314
Epoch 37 , Loss: 989.1465044021606 , Train MSE: 88.46265129530737
Epoch 38 , Loss: 985.3262796401978 , Train MSE: 88.53665016032369
Epoch 39 , Loss: 969.8266592025757 , Train MSE: 87.27513323872674
Epoch 40 , Loss: 981.993688583374 , Train MSE: 89.05291609144255
Epoch 41 , Loss: 968.7209024429321 , Train MSE: 86.87519494002865
Epoch 42 , Loss: 985.6716661453247 , Train MSE: 87.19482238882291
Epoch 43 , Loss: 995.7725582122803 , Train MSE: 89.56063250750812
Epoch 44 , Loss: 976.204029083252 , Train MSE: 86.48967429361332
Epoch 45 , Loss: 962.6922707557678 , Train MSE: 85.80594903431768
Epoch 46 , Loss: 959.9836583137512 , Train MSE: 85.47246059721964
Epoch 47 , Loss: 957.4261250495911 , Train MSE: 85.18957167056345
Epoch 48 , Loss: 955.5044550895691 , Train MSE: 84.93667351413858
Epoch 49 , Loss: 953.8833236694336 , Train MSE: 84.69201701068482
Epoch 50 , Loss: 952.5384492874146 , Train MSE: 84.47304936242794
Epoch 51 , Loss: 963.6543707847595 , Train MSE: 85.132025571345
Epoch 52 , Loss: 944.0382785797119 , Train MSE: 84.29583245431978
Epoch 53 , Loss: 949.568564414978 , Train MSE: 83.91574504503352
Epoch 54 , Loss: 948.1345047950745 , Train MSE: 83.74297536874076
Epoch 55 , Loss: 947.0868859291077 , Train MSE: 83.58747564519757
Epoch 56 , Loss: 1198.6706256866455 , Train MSE: 108.6448634772628
Epoch 57 , Loss: 1040.2800731658936 , Train MSE: 89.81511987079847
Epoch 58 , Loss: 999.5719032287598 , Train MSE: 88.13101734767474
Epoch 59 , Loss: 1004.1978521347046 , Train MSE: 87.81012524021617
Epoch 60 , Loss: 985.8116664886475 , Train MSE: 87.24917722958944
Epoch 61 , Loss: 990.4840927124023 , Train MSE: 88.40677629036013
Epoch 62 , Loss: 979.7474880218506 , Train MSE: 86.8638408261019
Epoch 63 , Loss: 966.2327880859375 , Train MSE: 85.48204736714482
Epoch 64 , Loss: 964.4309539794922 , Train MSE: 85.16431798701471
Epoch 65 , Loss: 963.325758934021 , Train MSE: 84.89850224689266
Epoch 66 , Loss: 961.2799997329712 , Train MSE: 84.58849033808187
Epoch 67 , Loss: 969.7667856216431 , Train MSE: 86.4578244043674
Epoch 68 , Loss: 967.828314781189 , Train MSE: 85.30625009777684
Epoch 69 , Loss: 956.3565979003906 , Train MSE: 83.74654660651609
Epoch 70 , Loss: 949.2717833518982 , Train MSE: 83.4904991368247
Epoch 71 , Loss: 1125.2101078033447 , Train MSE: 104.9244371775352
Epoch 72 , Loss: 952.3742508888245 , Train MSE: 83.49668045667576
Epoch 73 , Loss: 944.3326444625854 , Train MSE: 82.91120741725366
Epoch 74 , Loss: 949.866693019867 , Train MSE: 82.98778316466822
Epoch 75 , Loss: 1032.3731079101562 , Train MSE: 94.41747967881007
Epoch 76 , Loss: 1009.5922327041626 , Train MSE: 92.8655819396126
Epoch 77 , Loss: 1125.5919151306152 , Train MSE: 106.72528634087871
Epoch 78 , Loss: 1109.951678276062 , Train MSE: 105.80460090131764
Epoch 79 , Loss: 1102.8310384750366 , Train MSE: 104.95642288470212
Epoch 80 , Loss: 1098.8354272842407 , Train MSE: 104.3674336889156
Epoch 81 , Loss: 1094.5063304901123 , Train MSE: 103.74958783534282
Epoch 82 , Loss: 1091.455132484436 , Train MSE: 103.28979698450848
Epoch 83 , Loss: 1090.9105157852173 , Train MSE: 102.72609562759243
Epoch 84 , Loss: 1085.053867340088 , Train MSE: 102.18228290842279
Epoch 85 , Loss: 1081.6123700141907 , Train MSE: 101.7052248514473
Epoch 86 , Loss: 1078.530746459961 , Train MSE: 101.29577396191777
Epoch 87 , Loss: 1074.060977935791 , Train MSE: 100.6329427789588
Epoch 88 , Loss: 1071.662594795227 , Train MSE: 100.19663826797718
Epoch 89 , Loss: 1071.3586039543152 , Train MSE: 99.9329962387725
Epoch 90 , Loss: 1067.3369979858398 , Train MSE: 99.3223300654653
Epoch 91 , Loss: 1065.3410387039185 , Train MSE: 98.92162992428266
Epoch 92 , Loss: 1063.5507125854492 , Train MSE: 98.55861598504605
Epoch 93 , Loss: 1062.0852408409119 , Train MSE: 98.24821938341182
Epoch 94 , Loss: 1063.5075988769531 , Train MSE: 98.13107328165839
Epoch 95 , Loss: 1059.288076877594 , Train MSE: 97.56195447883918
Epoch 96 , Loss: 1057.962275981903 , Train MSE: 97.2503867452865
Epoch 97 , Loss: 1056.7887768745422 , Train MSE: 96.96434287227942
Epoch 98 , Loss: 1055.7112159729004 , Train MSE: 96.69384405627436
Epoch 99 , Loss: 1054.7163333892822 , Train MSE: 96.43772273877255
Validation MSE: 13.41901227752466
split 5 :
Epoch 0 , Loss: 1625.6767082214355 , Train MSE: 100.49302845254333
Epoch 1 , Loss: 1334.7042770385742 , Train MSE: 96.76425991893026
Epoch 2 , Loss: 1219.2668447494507 , Train MSE: 96.83450477255927
Epoch 3 , Loss: 1168.6881189346313 , Train MSE: 93.58043192185511
Epoch 4 , Loss: 1156.873059272766 , Train MSE: 93.97823557453388
Epoch 5 , Loss: 1132.5711631774902 , Train MSE: 91.75852650161478
Epoch 6 , Loss: 1110.895408630371 , Train MSE: 90.624006874798
Epoch 7 , Loss: 1108.6487927436829 , Train MSE: 90.1277512906761
Epoch 8 , Loss: 1099.6320161819458 , Train MSE: 89.4640953150565
Epoch 9 , Loss: 1093.7533917427063 , Train MSE: 88.88150664742486
Epoch 10 , Loss: 1092.2511911392212 , Train MSE: 88.37482154635302
Epoch 11 , Loss: 1096.0020179748535 , Train MSE: 87.98633083273086
Epoch 12 , Loss: 1109.1902980804443 , Train MSE: 87.85583868832572
Epoch 13 , Loss: 1108.160873413086 , Train MSE: 87.48810769506655
Epoch 14 , Loss: 1085.1124215126038 , Train MSE: 86.73996299745629
Epoch 15 , Loss: 1086.6589279174805 , Train MSE: 86.51111145682778
Epoch 16 , Loss: 1046.2182612419128 , Train MSE: 85.13712194712326
Epoch 17 , Loss: 1047.8761539459229 , Train MSE: 85.14377292566232
Epoch 18 , Loss: 1028.7046608924866 , Train MSE: 83.86079252808423
Epoch 19 , Loss: 1028.3792271614075 , Train MSE: 83.34889228591715
Epoch 20 , Loss: 1087.4906997680664 , Train MSE: 86.37824437119296
Epoch 21 , Loss: 1051.0474104881287 , Train MSE: 83.06007280507708
Epoch 22 , Loss: 1037.2867980003357 , Train MSE: 82.28444227686352
Epoch 23 , Loss: 1027.8945007324219 , Train MSE: 81.8920353996831
Epoch 24 , Loss: 1030.4624462127686 , Train MSE: 81.67670340735742
Epoch 25 , Loss: 1040.135642528534 , Train MSE: 81.18790902515327
Epoch 26 , Loss: 1037.210880279541 , Train MSE: 80.92379412449553
Epoch 27 , Loss: 1034.2986054420471 , Train MSE: 80.69292666970807
Epoch 28 , Loss: 1030.237732410431 , Train MSE: 80.36000423909526
Epoch 29 , Loss: 1030.7646389007568 , Train MSE: 80.45699177245908
Epoch 30 , Loss: 1030.0810632705688 , Train MSE: 80.18792172099633
Epoch 31 , Loss: 1227.7251091003418 , Train MSE: 103.37319546509613
Epoch 32 , Loss: 1224.9158163070679 , Train MSE: 101.95746984841837
Epoch 33 , Loss: 1144.595510482788 , Train MSE: 94.41427496272173
Epoch 34 , Loss: 1137.3672342300415 , Train MSE: 93.38028327724582
Epoch 35 , Loss: 1126.1207494735718 , Train MSE: 92.25080002530459
Epoch 36 , Loss: 1111.793258190155 , Train MSE: 91.33773015020189
Epoch 37 , Loss: 1111.6610851287842 , Train MSE: 91.09523464307433
Epoch 38 , Loss: 1131.8478384017944 , Train MSE: 90.3039927075687
Epoch 39 , Loss: 1102.4126257896423 , Train MSE: 89.18065779330622
Epoch 40 , Loss: 1096.5835781097412 , Train MSE: 88.78236429689761
Epoch 41 , Loss: 1100.1418523788452 , Train MSE: 88.46844781280984
Epoch 42 , Loss: 1107.1367177963257 , Train MSE: 87.84976504332373
Epoch 43 , Loss: 1228.2564973831177 , Train MSE: 101.32020650453477
Epoch 44 , Loss: 1214.0755338668823 , Train MSE: 100.20969292453225
Epoch 45 , Loss: 1196.638180732727 , Train MSE: 98.82091245892578
Epoch 46 , Loss: 1186.9246444702148 , Train MSE: 97.7459534196414
Epoch 47 , Loss: 1172.9202365875244 , Train MSE: 96.23012333139887
Epoch 48 , Loss: 1163.6731843948364 , Train MSE: 95.20504015273727
Epoch 49 , Loss: 1158.276816368103 , Train MSE: 94.52168450404602
Epoch 50 , Loss: 1152.884132385254 , Train MSE: 93.84302724995412
Epoch 51 , Loss: 1148.9217977523804 , Train MSE: 93.26104868808427
Epoch 52 , Loss: 1144.0402936935425 , Train MSE: 92.64654160558261
Epoch 53 , Loss: 1139.6720237731934 , Train MSE: 92.14310344329249
Epoch 54 , Loss: 1130.905879020691 , Train MSE: 91.78488356494242
Epoch 55 , Loss: 1130.216646194458 , Train MSE: 91.48134145969618
Epoch 56 , Loss: 1126.9734659194946 , Train MSE: 90.73081168656273
Epoch 57 , Loss: 1112.3125438690186 , Train MSE: 90.07904876442365
Epoch 58 , Loss: 1097.096703529358 , Train MSE: 89.17730816374358
Epoch 59 , Loss: 1092.3152089118958 , Train MSE: 88.74878331598076
Epoch 60 , Loss: 1088.1101598739624 , Train MSE: 88.21563692169812
Epoch 61 , Loss: 1086.7436079978943 , Train MSE: 87.78086983186705
Epoch 62 , Loss: 1080.2193765640259 , Train MSE: 87.10107980548172
Epoch 63 , Loss: 1096.3619050979614 , Train MSE: 87.40168853261586
Epoch 64 , Loss: 1074.178466796875 , Train MSE: 86.29651273699744
Epoch 65 , Loss: 1076.6825890541077 , Train MSE: 86.13400128472745
Epoch 66 , Loss: 1069.6663331985474 , Train MSE: 85.51612994566713
Epoch 67 , Loss: 1070.2726912498474 , Train MSE: 85.32650519313376
Epoch 68 , Loss: 1065.7719812393188 , Train MSE: 84.83964704804728
Epoch 69 , Loss: 1273.5828170776367 , Train MSE: 107.34549041093172
Epoch 70 , Loss: 1071.2201671600342 , Train MSE: 84.98449285341938
Epoch 71 , Loss: 1063.3857760429382 , Train MSE: 84.35740455303382
Epoch 72 , Loss: 1062.419551372528 , Train MSE: 84.1226359269284
Epoch 73 , Loss: 1060.6343731880188 , Train MSE: 83.84069972831655
Epoch 74 , Loss: 1059.213487625122 , Train MSE: 83.58089786181267
Epoch 75 , Loss: 1058.1652536392212 , Train MSE: 83.35150565424152
Epoch 76 , Loss: 1057.0104179382324 , Train MSE: 83.12315555565239
Epoch 77 , Loss: 1056.1318011283875 , Train MSE: 82.91925988504725
Epoch 78 , Loss: 1055.1039552688599 , Train MSE: 82.71554106628265
Epoch 79 , Loss: 1053.9640040397644 , Train MSE: 82.52763108170593
Epoch 80 , Loss: 1052.0890884399414 , Train MSE: 82.3312417862946
Epoch 81 , Loss: 1071.3843574523926 , Train MSE: 82.67740224512652
Epoch 82 , Loss: 1055.1468906402588 , Train MSE: 82.12798673916573
Epoch 83 , Loss: 1051.527081489563 , Train MSE: 81.84419671115786
Epoch 84 , Loss: 1047.925965309143 , Train MSE: 81.7576423621165
Epoch 85 , Loss: 1053.2563457489014 , Train MSE: 81.82663968930531
Epoch 86 , Loss: 1053.2955384254456 , Train MSE: 81.57419612828322
Epoch 87 , Loss: 1051.929117679596 , Train MSE: 81.40832960487067
Epoch 88 , Loss: 1048.6114163398743 , Train MSE: 81.20877454955863
Epoch 89 , Loss: 1055.8891105651855 , Train MSE: 81.54863535274042
Epoch 90 , Loss: 1049.7016372680664 , Train MSE: 81.04268652201911
Epoch 91 , Loss: 1049.5060567855835 , Train MSE: 80.97491752707187
Epoch 92 , Loss: 1048.0897016525269 , Train MSE: 80.93008821089018
Epoch 93 , Loss: 1046.3478302955627 , Train MSE: 80.78591664558733
Epoch 94 , Loss: 1053.4614601135254 , Train MSE: 81.13795369215393
Epoch 95 , Loss: 1046.360026359558 , Train MSE: 80.66699241424725
Epoch 96 , Loss: 1079.4521579742432 , Train MSE: 82.48743764011556
Epoch 97 , Loss: 1040.2891764640808 , Train MSE: 80.48955831871803
Epoch 98 , Loss: 1040.8163075447083 , Train MSE: 80.61954455369776
Epoch 99 , Loss: 1056.2420253753662 , Train MSE: 81.13670904387584
Validation MSE: 190.63113488862837
Test MSE: 155.0205560528684
Average validation MSE: 114.05498631816788
Decision Tree Regression Model
df = pd.read_csv("data.csv",dtype=object)
train = df[df['time'] < '2021-09-04 00:00:00']
test = df[df['time'] > '2021-09-03 00:00:00' ]
from sklearn.model_selection import TimeSeriesSplit
from sklearn.tree import DecisionTreeRegressor
from sklearn.model_selection import cross_validate
from sklearn.pipeline import make_pipeline
from sklearn import preprocessing
from sklearn.metrics import r2_score
from sklearn.metrics import mean_squared_error
tscv = TimeSeriesSplit(n_splits=5)
all_counties = df.fips.unique()
best_model_per_county = []
max_depths = range(2,20)
# keeping track of the RMSEs of our successful models per county to plot later
train_RMSEs_per_county = []
valid_RMSEs_per_county = []
test_RMSEs_per_county = []
# keeping track of the R2s of our successful models per county to plot later
train_R2_per_county = []
valid_R2_per_county = []
test_R2_per_county = []
for county in all_counties:
print("\n ************* CURRENT COUNTY FIPS: ", county, "*************")
min_rmse = float('inf')
best_model = 0
best_train_RMSEs = 0
best_valid_RMSEs = 0
best_test_RMSEs = 0
best_train_R2s = 0
best_valid_R2s = 0
best_test_R2s = 0
data = getCounty(train, county) # county data from 9/3/20-9/3/21
data_test = getCounty(test, county) # county dara from 9/4/21-10/2/21
# All our training data
y = np.array(data["dailyCases_t"], dtype=np.float32) # class labels
X = data.drop(columns=['dailyCases_t','time','fips'])
X = np.array(X,dtype=np.float32) # training data
# Our testing data
y_test = np.array(data_test["dailyCases_t"], dtype=np.float32) # class labels for testing
X_test = data_test.drop(columns=['dailyCases_t','time','fips'])
X_test = np.array(X_test,dtype=np.float32) # testing data
# Hyperparameter tuning
for depth in max_depths:
print("\n------ CURRENT DEPTH : ", depth)
i = 0
train_error = []
valid_error = []
test_error = []
for train_index, valid_index in tscv.split(X):
X_train, X_valid = X[train_index], X[valid_index]
y_train, y_valid = y[train_index], y[valid_index]
print("split", i)
i += 1
dtree = DecisionTreeRegressor(max_depth=depth)
dtree.fit(X_train, y_train)
# train
pred_train = dtree.predict(X_train)
train_rmse = np.sqrt(mean_squared_error(y_train, pred_train))
train_error.append(train_rmse)
train_R2 = r2_score(y_train, pred_train)
print("The Train R-squared value is:", train_R2)
print("The Train RMSE is:", train_rmse)
# validation
pred_valid = dtree.predict(X_valid)
valid_rmse = np.sqrt(mean_squared_error(y_valid, pred_valid))
valid_error.append(valid_rmse)
valid_R2 = r2_score(y_valid, pred_valid)
print("The Validation R-squared value is:", valid_R2)
print("The Validation RMSE is:", valid_rmse)
# test
pred_test = dtree.predict(X_test)
test_rmse = np.sqrt(mean_squared_error(y_test, pred_test))
test_error.append(test_rmse)
test_R2 = r2_score(y_test, pred_test)
print("The Test R-squared value is:", test_R2)
print("The Test RMSE is:", test_rmse)
avg_test_rmse = np.mean(test_error)
if avg_test_rmse < min_rmse:
min_rmse = avg_test_rmse
best_model = dtree
best_train_RMSEs = train_error
best_valid_RMSEs = valid_error
best_test_RMSEs = test_error
best_train_R2s = train_R2
best_valid_R2s = valid_R2
best_test_R2s = test_R2
best_model_per_county.append(best_model)
train_RMSEs_per_county.append(best_train_RMSEs)
valid_RMSEs_per_county.append(best_valid_RMSEs)
test_RMSEs_per_county.append(best_test_RMSEs)
train_R2_per_county.append(best_train_R2s)
valid_R2_per_county.append(best_valid_R2s)
test_R2_per_county.append(best_valid_R2s)
split 0
The Train R-squared value is: 0.9946250446173814
The Train RMSE is: 4.768263708921121
The Validation R-squared value is: -1.9059067431990142
The Validation RMSE is: 778.5048090441818
The Test R-squared value is: -0.10964728989653416
The Test RMSE is: 331.04672031978015
split 1
The Train R-squared value is: 0.9995663955101207
The Train RMSE is: 9.331889037352752
The Validation R-squared value is: -0.6778963705266889
The Validation RMSE is: 542.8250480466844
The Test R-squared value is: -0.09399432925802698
The Test RMSE is: 328.703513823628
split 2
The Train R-squared value is: 0.9993522448547404
The Train RMSE is: 11.26018664493127
The Validation R-squared value is: -11.06797197556558
The Validation RMSE is: 95.08705387279413
The Test R-squared value is: -0.10069027142267029
The Test RMSE is: 329.707916715428
split 3
The Train R-squared value is: 0.9991836886990565
The Train RMSE is: 11.849101443465075
The Validation R-squared value is: -108.40323289346732
The Validation RMSE is: 133.59129472153865
The Test R-squared value is: -0.12245644779591203
The Test RMSE is: 332.9519484850629
split 4
The Train R-squared value is: 0.9991033989584869
The Train RMSE is: 11.854872062821281
The Validation R-squared value is: -0.09591314728883571
The Validation RMSE is: 242.34269868629707
The Test R-squared value is: -0.10474055180023556
The Test RMSE is: 330.3139833343525
------ CURRENT DEPTH : 12
split 0
The Train R-squared value is: 0.9996802736388396
The Train RMSE is: 1.1629527145991116
The Validation R-squared value is: -1.0546040633832665
The Validation RMSE is: 654.6128626906135
The Test R-squared value is: -0.11506465555648848
The Test RMSE is: 331.8538315542573
split 1
The Train R-squared value is: 0.999780838978166
The Train RMSE is: 6.634441043714931
The Validation R-squared value is: 0.349856220034233
The Validation RMSE is: 337.8951966726166
The Test R-squared value is: -0.14118504481612426
The Test RMSE is: 335.71816998067357
split 2
The Train R-squared value is: 0.9997266424097266
The Train RMSE is: 7.314855010534154
The Validation R-squared value is: -6.090804392991616
The Validation RMSE is: 72.8873226513273
The Test R-squared value is: -0.12199487539307241
The Test RMSE is: 332.88348380943063
split 3
The Train R-squared value is: 0.9996871157675203
The Train RMSE is: 7.335831581189737
The Validation R-squared value is: -116.51770461065097
The Validation RMSE is: 138.4569406223798
The Test R-squared value is: -0.1281793171264296
The Test RMSE is: 333.79965083347787
split 4
The Train R-squared value is: 0.999581092464327
The Train RMSE is: 8.103195900228087
The Validation R-squared value is: -0.1768041897239092
The Validation RMSE is: 251.1273271195899
The Test R-squared value is: -0.16258906840960385
The Test RMSE is: 338.85190285575425
------ CURRENT DEPTH : 13
split 0
The Train R-squared value is: 0.9999825603803003
The Train RMSE is: 0.2716072381275556
The Validation R-squared value is: -1.9104957963143896
The Validation RMSE is: 779.1192799944155
The Test R-squared value is: -0.17116734961873226
The Test RMSE is: 340.0997318231917
split 1
The Train R-squared value is: 0.9999426616260976
The Train RMSE is: 3.3934805705224926
The Validation R-squared value is: 0.31442036727236056
The Validation RMSE is: 346.9814564894513
The Test R-squared value is: -0.1383936141348059
The Test RMSE is: 335.3073217214321
split 2
The Train R-squared value is: 0.9998535773774545
The Train RMSE is: 5.3535793508390155
The Validation R-squared value is: -10.223847706349131
The Validation RMSE is: 91.70122299470687
The Test R-squared value is: -0.12268618655617569
The Test RMSE is: 332.9860202098167
split 3
The Train R-squared value is: 0.9998723000002208
The Train RMSE is: 4.686546834149878
The Validation R-squared value is: -123.71580561065471
The Validation RMSE is: 142.63425210657084
The Test R-squared value is: -0.12405833447081993
The Test RMSE is: 333.1894459561283
split 4
The Train R-squared value is: 0.9998399021864608
The Train RMSE is: 5.009448917229574
The Validation R-squared value is: -0.1502161601197507
The Validation RMSE is: 248.27420721293848
The Test R-squared value is: -0.15309305417410335
The Test RMSE is: 337.4651961620482
------ CURRENT DEPTH : 14
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.9860777982223821
The Validation RMSE is: 643.6037881069174
The Test R-squared value is: -0.0044258902546499
The Test RMSE is: 314.9602602190336
split 1
The Train R-squared value is: 0.9999966329562748
The Train RMSE is: 0.8223317506923354
The Validation R-squared value is: -0.8857607055700638
The Validation RMSE is: 575.4672371070368
The Test R-squared value is: -0.13206078490431783
The Test RMSE is: 334.3733717330642
split 2
The Train R-squared value is: 0.9999612814602106
The Train RMSE is: 2.7529583359127328
The Validation R-squared value is: -8.551704291980599
The Validation RMSE is: 84.5949970423202
The Test R-squared value is: -0.11759841571337759
The Test RMSE is: 332.23065315613445
split 3
The Train R-squared value is: 0.9999731022114229
The Train RMSE is: 2.1508781164610267
The Validation R-squared value is: -113.47062197037356
The Validation RMSE is: 136.6501467202441
The Test R-squared value is: -0.10483534522487314
The Test RMSE is: 330.32815449969206
split 4
The Train R-squared value is: 0.9999473024982759
The Train RMSE is: 2.8740364072017837
The Validation R-squared value is: -0.14449696225653397
The Validation RMSE is: 247.65619346976652
The Test R-squared value is: -0.13009403955999432
The Test RMSE is: 334.082789656737
------ CURRENT DEPTH : 15
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.9821063934247067
The Validation RMSE is: 642.9599839675923
The Test R-squared value is: -0.0044258902546499
The Test RMSE is: 314.9602602190336
split 1
The Train R-squared value is: 0.9999998163430696
The Train RMSE is: 0.19205531989934396
The Validation R-squared value is: -0.8859170672587215
The Validation RMSE is: 575.4910946313591
The Test R-squared value is: -0.08908287767652556
The Test RMSE is: 327.96483200868164
split 2
The Train R-squared value is: 0.999997696848261
The Train RMSE is: 0.6714310628286031
The Validation R-squared value is: -14.710452340771687
The Validation RMSE is: 108.49221850165033
The Test R-squared value is: -0.1471132126132606
The Test RMSE is: 336.58902598538936
split 3
The Train R-squared value is: 0.9999897220105175
The Train RMSE is: 1.3295711951007099
The Validation R-squared value is: -104.91705871671005
The Validation RMSE is: 131.44559513682577
The Test R-squared value is: -0.12873236605691218
The Test RMSE is: 333.88145738601133
split 4
The Train R-squared value is: 0.9999785834722164
The Train RMSE is: 1.8321953746012958
The Validation R-squared value is: -0.12875723784189486
The Validation RMSE is: 245.94734875711862
The Test R-squared value is: -0.14503927912395076
The Test RMSE is: 336.28461877936996
------ CURRENT DEPTH : 16
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.980066377588922
The Validation RMSE is: 642.6290264015239
The Test R-squared value is: -0.0030502508694847386
The Test RMSE is: 314.7445050326945
split 1
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -1.0428954277463953
The Validation RMSE is: 598.9635310061956
The Test R-squared value is: -0.09582665298730264
The Test RMSE is: 328.9786703074249
split 2
The Train R-squared value is: 0.9999998743735415
The Train RMSE is: 0.156812512046795
The Validation R-squared value is: -10.485476122949814
The Validation RMSE is: 92.76384612305046
The Test R-squared value is: -0.14364722982733835
The Test RMSE is: 336.0801423306136
split 3
The Train R-squared value is: 0.9999970055625697
The Train RMSE is: 0.7176540408890425
The Validation R-squared value is: -126.62982998515324
The Validation RMSE is: 144.2909777861797
The Test R-squared value is: -0.11238809425530194
The Test RMSE is: 331.4553072280714
split 4
The Train R-squared value is: 0.9999873395044361
The Train RMSE is: 1.4087132667493734
The Validation R-squared value is: -0.10397955338934728
The Validation RMSE is: 243.2329383432309
The Test R-squared value is: -0.13784440582189617
The Test RMSE is: 335.226428881569
------ CURRENT DEPTH : 17
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -1.910633269832799
The Validation RMSE is: 779.1376801247415
The Test R-squared value is: -0.10900171699725236
The Test RMSE is: 330.95040778611093
split 1
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.8720597809834394
The Validation RMSE is: 573.3729081525205
The Test R-squared value is: -0.06249218733774842
The Test RMSE is: 323.9363496440152
split 2
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -15.326586158713162
The Validation RMSE is: 110.59918773602658
The Test R-squared value is: -0.08956435146133335
The Test RMSE is: 328.0373191553374
split 3
The Train R-squared value is: 0.9999994837176844
The Train RMSE is: 0.2979896209730303
The Validation R-squared value is: -125.19829625018549
The Validation RMSE is: 143.479490864461
The Test R-squared value is: -0.1413889466945346
The Test RMSE is: 335.7481609572968
split 4
The Train R-squared value is: 0.9999972047512734
The Train RMSE is: 0.6619230140301304
The Validation R-squared value is: -0.1524526307860954
The Validation RMSE is: 248.5154611412125
The Test R-squared value is: -0.14040190797832608
The Test RMSE is: 335.60295711984753
------ CURRENT DEPTH : 18
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -1.9088215119143723
The Validation RMSE is: 778.8951506702972
The Test R-squared value is: -0.11268102863199059
The Test RMSE is: 331.49894679189845
split 1
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: 0.1709577244517313
The Validation RMSE is: 381.56246937138206
The Test R-squared value is: -0.1315971176191657
The Test RMSE is: 334.3048887179032
split 2
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -7.630763792843885
The Validation RMSE is: 80.41348064132319
The Test R-squared value is: -0.10196570560389073
The Test RMSE is: 329.89888733075315
split 3
The Train R-squared value is: 0.9999999523431709
The Train RMSE is: 0.09053574604251853
The Validation R-squared value is: -120.63932298414406
The Validation RMSE is: 140.86402000208406
The Test R-squared value is: -0.1617323488305813
The Test RMSE is: 338.7270287414336
split 4
The Train R-squared value is: 0.9999995049571974
The Train RMSE is: 0.27855991889591825
The Validation R-squared value is: -0.1841483543741076
The Validation RMSE is: 251.9097223148033
The Test R-squared value is: -0.16978018838084874
The Test RMSE is: 339.89826064016114
------ CURRENT DEPTH : 19
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -1.9077891636063198
The Validation RMSE is: 778.7569224490368
The Test R-squared value is: -0.11129561312936764
The Test RMSE is: 331.2925054056174
split 1
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.6597777381466106
The Validation RMSE is: 539.8862661041387
The Test R-squared value is: -0.03399655068286456
The Test RMSE is: 319.5629019080001
split 2
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -16.18890546497049
The Validation RMSE is: 113.48235942929344
The Test R-squared value is: -0.14232954885281734
The Test RMSE is: 335.88647507443795
split 3
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -113.91129022607467
The Validation RMSE is: 136.91291955467025
The Test R-squared value is: -0.17352863113189732
The Test RMSE is: 340.4424099355993
split 4
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.22361742717597322
The Validation RMSE is: 256.07353580115404
The Test R-squared value is: -0.1527124838873446
The Test RMSE is: 337.40950255408205
************* CURRENT COUNTY FIPS: 06111 *************
------ CURRENT DEPTH : 2
split 0
The Train R-squared value is: 0.40814027211655035
The Train RMSE is: 50.600433513801235
The Validation R-squared value is: -0.4877192076248609
The Validation RMSE is: 650.848916362917
The Test R-squared value is: -0.2378399098133679
The Test RMSE is: 226.8373638021525
split 1
The Train R-squared value is: 0.6677942266586558
The Train RMSE is: 243.91297409256705
The Validation R-squared value is: 0.08980151845501827
The Validation RMSE is: 414.1736126137834
The Test R-squared value is: -0.21077963904554764
The Test RMSE is: 224.34423069890434
split 2
The Train R-squared value is: 0.6934241212813624
The Train RMSE is: 247.20947425171843
The Validation R-squared value is: -9.232898048279862
The Validation RMSE is: 58.73587417289765
The Test R-squared value is: -0.8512685173377592
The Test RMSE is: 277.40677202994544
split 3
The Train R-squared value is: 0.7209988298393724
The Train RMSE is: 215.41979835818842
The Validation R-squared value is: -32.24623419946364
The Validation RMSE is: 58.84213933436499
The Test R-squared value is: -0.4219564487403269
The Test RMSE is: 243.12269353533776
split 4
The Train R-squared value is: 0.6729288937854438
The Train RMSE is: 216.14865870548002
The Validation R-squared value is: -5.0636574303570425
The Validation RMSE is: 275.80680603551497
The Test R-squared value is: -1.838301788670483
The Test RMSE is: 343.48805315447504
------ CURRENT DEPTH : 3
split 0
The Train R-squared value is: 0.6214615037385112
The Train RMSE is: 40.466893725777126
The Validation R-squared value is: -0.5677579628931573
The Validation RMSE is: 668.1272862985925
The Test R-squared value is: -0.314987302517594
The Test RMSE is: 233.7992591646156
split 1
The Train R-squared value is: 0.7854723759989677
The Train RMSE is: 196.0076064110004
The Validation R-squared value is: 0.10893466344287595
The Validation RMSE is: 409.79735176767343
The Test R-squared value is: -0.2550010914508718
The Test RMSE is: 228.4043667895593
split 2
The Train R-squared value is: 0.7929393654928646
The Train RMSE is: 203.1631277904575
The Validation R-squared value is: -0.77026318115398
The Validation RMSE is: 24.429975507674236
The Test R-squared value is: -0.780966849831563
The Test RMSE is: 272.08855167996666
split 3
The Train R-squared value is: 0.8280912428016661
The Train RMSE is: 169.0953258591593
The Validation R-squared value is: -9.203221199338428
The Validation RMSE is: 32.597618676040774
The Test R-squared value is: -0.29579774425927874
The Test RMSE is: 232.08707803928576
split 4
The Train R-squared value is: 0.8020295606475414
The Train RMSE is: 168.16345766155945
The Validation R-squared value is: -11.302748988547815
The Validation RMSE is: 392.8608317604439
The Test R-squared value is: -27.66393187199887
The Test RMSE is: 1091.566584764577
------ CURRENT DEPTH : 4
split 0
The Train R-squared value is: 0.741089332225395
The Train RMSE is: 33.46723359243059
The Validation R-squared value is: -0.6211773655430262
The Validation RMSE is: 679.4147433215023
The Test R-squared value is: -0.35594912294614045
The Test RMSE is: 237.41275531430279
split 1
The Train R-squared value is: 0.8870026432160405
The Train RMSE is: 142.25431744046207
The Validation R-squared value is: -0.3273783606319587
The Validation RMSE is: 500.1632052475166
The Test R-squared value is: -0.22370039424113508
The Test RMSE is: 225.53809144265193
split 2
The Train R-squared value is: 0.8580685171621317
The Train RMSE is: 168.20367884183315
The Validation R-squared value is: -4.183670240889496
The Validation RMSE is: 41.80447716629975
The Test R-squared value is: -1.0941137724645604
The Test RMSE is: 295.0410789420172
split 3
The Train R-squared value is: 0.9027401365455905
The Train RMSE is: 127.1889641657708
The Validation R-squared value is: -7.868325333595205
The Validation RMSE is: 30.390513095253045
The Test R-squared value is: -1.6520319461431479
The Test RMSE is: 332.02571962437327
split 4
The Train R-squared value is: 0.8924893765090922
The Train RMSE is: 123.92449141289623
The Validation R-squared value is: -10.628464219847302
The Validation RMSE is: 381.94324242760604
The Test R-squared value is: -27.590440948213406
The Test RMSE is: 1090.1663630179164
------ CURRENT DEPTH : 5
split 0
The Train R-squared value is: 0.7973242184987728
The Train RMSE is: 29.610502032236685
The Validation R-squared value is: -0.5417236995284285
The Validation RMSE is: 662.5565858914971
The Test R-squared value is: -0.3613611999940638
The Test RMSE is: 237.88608301323413
split 1
The Train R-squared value is: 0.9380838089887967
The Train RMSE is: 105.30122646406859
The Validation R-squared value is: -0.17420672908578028
The Validation RMSE is: 470.4209506791625
The Test R-squared value is: -0.30607936865200025
The Test RMSE is: 233.00601674557092
split 2
The Train R-squared value is: 0.9274789602770481
The Train RMSE is: 120.2342312803932
The Validation R-squared value is: -2.3204028224502276
The Validation RMSE is: 33.45796329323676
The Test R-squared value is: -0.8164233780204166
The Test RMSE is: 274.78365277258837
split 3
The Train R-squared value is: 0.9441657473301869
The Train RMSE is: 96.36802454559346
The Validation R-squared value is: -29.49108880070926
The Validation RMSE is: 56.35126769503647
The Test R-squared value is: -3.8780629902446773
The Test RMSE is: 450.30425743655707
split 4
The Train R-squared value is: 0.9373645824664513
The Train RMSE is: 94.58917626276732
The Validation R-squared value is: -9.420614837901383
The Validation RMSE is: 361.5632838589565
The Test R-squared value is: -27.250293633254195
The Test RMSE is: 1083.6619740665976
------ CURRENT DEPTH : 6
split 0
The Train R-squared value is: 0.8333200415470238
The Train RMSE is: 26.852611855572672
The Validation R-squared value is: -0.6597046034318392
The Validation RMSE is: 687.4404773040831
The Test R-squared value is: -0.26033482458108126
The Test RMSE is: 228.88920951462703
split 1
The Train R-squared value is: 0.9740114340273639
The Train RMSE is: 68.2216897322905
The Validation R-squared value is: -0.58230223575709
The Validation RMSE is: 546.083549310789
The Test R-squared value is: -0.3784380519661432
The Test RMSE is: 239.37344920165268
split 2
The Train R-squared value is: 0.953526702822567
The Train RMSE is: 96.24936279795736
The Validation R-squared value is: -3.9892680196878105
The Validation RMSE is: 41.01309372121288
The Test R-squared value is: -0.5953813267184624
The Test RMSE is: 257.522158157216
split 3
The Train R-squared value is: 0.9622569354289253
The Train RMSE is: 79.23210289976505
The Validation R-squared value is: -33.163992737300866
The Validation RMSE is: 59.64877575703827
The Test R-squared value is: -1.9781969073837598
The Test RMSE is: 351.85121510735496
split 4
The Train R-squared value is: 0.9600471377187083
The Train RMSE is: 75.5449440895655
The Validation R-squared value is: -8.729475731196075
The Validation RMSE is: 349.3673934596339
The Test R-squared value is: -27.18042118919082
The Test RMSE is: 1082.3210147706436
------ CURRENT DEPTH : 7
split 0
The Train R-squared value is: 0.8932609158606973
The Train RMSE is: 21.488523645232828
The Validation R-squared value is: -0.6596779464022571
The Validation RMSE is: 687.434956684726
The Test R-squared value is: -0.3386392829495757
The Test RMSE is: 235.89250035585687
split 1
The Train R-squared value is: 0.9891018058222858
The Train RMSE is: 44.17825774861995
The Validation R-squared value is: -0.5764468965438727
The Validation RMSE is: 545.072216602053
The Test R-squared value is: -0.22643517285623593
The Test RMSE is: 225.78997193798753
split 2
The Train R-squared value is: 0.9730210083606946
The Train RMSE is: 73.33460834224924
The Validation R-squared value is: -13.524555000370231
The Validation RMSE is: 69.97703401541757
The Test R-squared value is: -0.37591159046764977
The Test RMSE is: 239.15398153396788
split 3
The Train R-squared value is: 0.9786728747437621
The Train RMSE is: 59.55918721772248
The Validation R-squared value is: -23.310479304925373
The Validation RMSE is: 50.316912543593794
The Test R-squared value is: -1.5277289161212821
The Test RMSE is: 324.15117435963026
split 4
The Train R-squared value is: 0.9815818430460402
The Train RMSE is: 51.292579443790295
The Validation R-squared value is: -8.366491436241837
The Validation RMSE is: 342.7884027164356
The Test R-squared value is: -27.133296636542738
The Test RMSE is: 1081.4156834789007
------ CURRENT DEPTH : 8
split 0
The Train R-squared value is: 0.9324377008895822
The Train RMSE is: 17.09610012091244
The Validation R-squared value is: -0.6578764954333669
The Validation RMSE is: 687.0617768773792
The Test R-squared value is: -0.42738957926093746
The Test RMSE is: 243.5867225020903
split 1
The Train R-squared value is: 0.9943657983591023
The Train RMSE is: 31.76490104391056
The Validation R-squared value is: -0.5817929800953814
The Validation RMSE is: 545.9956651782527
The Test R-squared value is: -0.2255295048723014
The Test RMSE is: 225.70658860106641
split 2
The Train R-squared value is: 0.9853222912408547
The Train RMSE is: 54.09103022129478
The Validation R-squared value is: -13.516481132560392
The Validation RMSE is: 69.95758199572457
The Test R-squared value is: -0.25436011749981513
The Test RMSE is: 228.3460321999398
split 3
The Train R-squared value is: 0.9877125304027949
The Train RMSE is: 45.20784765733618
The Validation R-squared value is: -27.45739117803822
The Validation RMSE is: 54.43958000382173
The Test R-squared value is: -1.5206919135293466
The Test RMSE is: 323.69965394104497
split 4
The Train R-squared value is: 0.9916659133611694
The Train RMSE is: 34.50328618933916
The Validation R-squared value is: -8.421492886769203
The Validation RMSE is: 343.7933821312556
The Test R-squared value is: -27.051257565901455
The Test RMSE is: 1079.8377825522166
------ CURRENT DEPTH : 9
split 0
The Train R-squared value is: 0.9774384740826667
The Train RMSE is: 9.87936604931206
The Validation R-squared value is: -0.49148884394103565
The Validation RMSE is: 651.6729668612202
The Test R-squared value is: -0.3610037792316123
The Test RMSE is: 237.8548528714156
split 1
The Train R-squared value is: 0.9969929364199608
The Train RMSE is: 23.20612457058897
The Validation R-squared value is: -0.5699760393397404
The Validation RMSE is: 543.9523846085349
The Test R-squared value is: -0.19100533013071685
The Test RMSE is: 222.50470742792433
split 2
The Train R-squared value is: 0.9920624065863959
The Train RMSE is: 39.77778030359188
The Validation R-squared value is: -13.32791918090471
The Validation RMSE is: 69.50173957016247
The Test R-squared value is: -0.26937914592903045
The Test RMSE is: 229.7090102850349
split 3
The Train R-squared value is: 0.9927934203495753
The Train RMSE is: 34.621605079118396
The Validation R-squared value is: -19.16766118642423
The Validation RMSE is: 45.82948663270833
The Test R-squared value is: -3.784674136283222
The Test RMSE is: 445.9729661894084
split 4
The Train R-squared value is: 0.9968578577918228
The Train RMSE is: 21.18577586197679
The Validation R-squared value is: -8.58889314331247
The Validation RMSE is: 346.83417972690023
The Test R-squared value is: -27.00821820843096
The Test RMSE is: 1079.009060971869
------ CURRENT DEPTH : 10
split 0
The Train R-squared value is: 0.9920078445274585
The Train RMSE is: 5.879992937061295
The Validation R-squared value is: -0.6560831624799652
The Validation RMSE is: 686.6900773238841
The Test R-squared value is: -0.33784053553376125
The Test RMSE is: 235.82211296974938
split 1
The Train R-squared value is: 0.9980257459890064
The Train RMSE is: 18.80324602003414
The Validation R-squared value is: -0.629748159159804
The Validation RMSE is: 554.2103376522814
The Test R-squared value is: -0.1821624343881716
The Test RMSE is: 221.67714952310635
split 2
The Train R-squared value is: 0.9969724384354061
The Train RMSE is: 24.56646181398061
The Validation R-squared value is: -13.4699049407915
The Validation RMSE is: 69.84526222828364
The Test R-squared value is: -0.25556303152095805
The Test RMSE is: 228.4554963074941
split 3
The Train R-squared value is: 0.9970997693410337
The Train RMSE is: 21.963364660929784
The Validation R-squared value is: -31.440679797509233
The Validation RMSE is: 58.124897129582465
The Test R-squared value is: -3.97556875955764
The Test RMSE is: 454.7824711798295
split 4
The Train R-squared value is: 0.9985729362614317
The Train RMSE is: 14.277531942171295
The Validation R-squared value is: -8.580148531556448
The Validation RMSE is: 346.6759955659453
The Test R-squared value is: -27.054524325542356
The Test RMSE is: 1079.9006579470552
------ CURRENT DEPTH : 11
split 0
The Train R-squared value is: 0.9994353663611724
The Train RMSE is: 1.562889295765913
The Validation R-squared value is: -0.5835923931966356
The Validation RMSE is: 671.492868265291
The Test R-squared value is: -0.30584849827231664
The Test RMSE is: 232.98542206874603
split 1
The Train R-squared value is: 0.9986517126730259
The Train RMSE is: 15.538981157713382
The Validation R-squared value is: -0.5775211540007028
The Validation RMSE is: 545.257902581768
The Test R-squared value is: -0.2162507190737637
The Test RMSE is: 224.85052506207242
split 2
The Train R-squared value is: 0.9987082852225764
The Train RMSE is: 16.04646725148703
The Validation R-squared value is: -12.691998027548733
The Validation RMSE is: 67.94187486856495
The Test R-squared value is: -0.20635347806960724
The Test RMSE is: 223.93379563519707
split 3
The Train R-squared value is: 0.9991966193062829
The Train RMSE is: 11.559613089054253
The Validation R-squared value is: -30.74276779588719
The Validation RMSE is: 57.49626323817202
The Test R-squared value is: -3.739130522914456
The Test RMSE is: 443.845361988805
split 4
The Train R-squared value is: 0.9994255905318241
The Train RMSE is: 9.05820718554623
The Validation R-squared value is: -8.65745105389036
The Validation RMSE is: 348.071855215906
The Test R-squared value is: -27.005398970394747
The Test RMSE is: 1078.9547544123272
------ CURRENT DEPTH : 12
split 0
The Train R-squared value is: 0.9999595787999049
The Train RMSE is: 0.4181666987914534
The Validation R-squared value is: -0.6564154829437843
The Validation RMSE is: 686.7589717258171
The Test R-squared value is: -0.341391796009731
The Test RMSE is: 236.1348971566304
split 1
The Train R-squared value is: 0.9992656972471131
The Train RMSE is: 11.46750378187204
The Validation R-squared value is: -0.5864110476319151
The Validation RMSE is: 546.7921054270782
The Test R-squared value is: -0.2488615931291991
The Test RMSE is: 227.84500176125482
split 2
The Train R-squared value is: 0.9994658636589278
The Train RMSE is: 10.31863341276268
The Validation R-squared value is: -12.942126366198615
The Validation RMSE is: 68.55965453171564
The Test R-squared value is: -0.2802878841527061
The Test RMSE is: 230.69393068559998
split 3
The Train R-squared value is: 0.9998251035309041
The Train RMSE is: 5.393530308502471
The Validation R-squared value is: -24.13540568683358
The Validation RMSE is: 51.16349147020411
The Test R-squared value is: -3.83394086742757
The Test RMSE is: 448.2631284526273
split 4
The Train R-squared value is: 0.9996777745470743
The Train RMSE is: 6.7843989207849065
The Validation R-squared value is: -8.587870159193054
The Validation RMSE is: 346.81567835681585
The Test R-squared value is: -27.05849491845715
The Test RMSE is: 1079.977075107903
------ CURRENT DEPTH : 13
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.5835923931966356
The Validation RMSE is: 671.492868265291
The Test R-squared value is: -0.3238511740138472
The Test RMSE is: 234.585915442803
split 1
The Train R-squared value is: 0.9995884565179697
The Train RMSE is: 8.584977825381676
The Validation R-squared value is: -0.579763003185713
The Validation RMSE is: 545.6452038867291
The Test R-squared value is: -0.23997556130986086
The Test RMSE is: 227.03296130130835
split 2
The Train R-squared value is: 0.9997406366356941
The Train RMSE is: 7.190356702431162
The Validation R-squared value is: -13.160356534134202
The Validation RMSE is: 69.0941386804988
The Test R-squared value is: -0.25882353824854487
The Test RMSE is: 228.75193611390938
split 3
The Train R-squared value is: 0.999955430516061
The Train RMSE is: 2.7227101189838603
The Validation R-squared value is: -20.495144058341914
The Validation RMSE is: 47.31375363464103
The Test R-squared value is: -3.955443610770633
The Test RMSE is: 453.86178862424185
split 4
The Train R-squared value is: 0.999892731666773
The Train RMSE is: 3.9144181929409596
The Validation R-squared value is: -8.56795779467679
The Validation RMSE is: 346.45535278793113
The Test R-squared value is: -27.067124622435546
The Test RMSE is: 1080.1431418550744
------ CURRENT DEPTH : 14
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.6596039630696153
The Validation RMSE is: 687.419634647106
The Test R-squared value is: -0.35821652503884005
The Test RMSE is: 237.61117180283682
split 1
The Train R-squared value is: 0.9999170479676468
The Train RMSE is: 3.8542939141810466
The Validation R-squared value is: -0.629748159159804
The Validation RMSE is: 554.2103376522814
The Test R-squared value is: -0.23997556130986086
The Test RMSE is: 227.03296130130835
split 2
The Train R-squared value is: 0.9999456912442031
The Train RMSE is: 3.2902662808354917
The Validation R-squared value is: -13.351745558419958
The Validation RMSE is: 69.55950395283116
The Test R-squared value is: -0.22593725693358557
The Test RMSE is: 225.74413346331605
split 3
The Train R-squared value is: 0.9999861281037762
The Train RMSE is: 1.5189746227702325
The Validation R-squared value is: -32.59262031412403
The Validation RMSE is: 59.14787750281203
The Test R-squared value is: -1.7203835266489689
The Test RMSE is: 336.27719733617954
split 4
The Train R-squared value is: 0.9999852940875992
The Train RMSE is: 1.4493639079627028
The Validation R-squared value is: -8.655170762770734
The Validation RMSE is: 348.0307598992732
The Test R-squared value is: -27.02687202646826
The Test RMSE is: 1079.3683178216977
------ CURRENT DEPTH : 15
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.6560831624799652
The Validation RMSE is: 686.6900773238841
The Test R-squared value is: -0.32902750322899577
The Test RMSE is: 235.04408977451664
split 1
The Train R-squared value is: 0.9999807675952734
The Train RMSE is: 1.8558723811410007
The Validation R-squared value is: -0.6236168890902032
The Validation RMSE is: 553.1668588866219
The Test R-squared value is: -0.22142704828891224
The Test RMSE is: 225.3284958332673
split 2
The Train R-squared value is: 0.9999874804799034
The Train RMSE is: 1.5797558087567036
The Validation R-squared value is: -12.654264825606216
The Validation RMSE is: 67.84819120050533
The Test R-squared value is: -0.22616703949650163
The Test RMSE is: 225.7652885570352
split 3
The Train R-squared value is: 0.9999940302169316
The Train RMSE is: 0.9964646065987529
The Validation R-squared value is: -22.134950242534597
The Validation RMSE is: 49.085306707279145
The Test R-squared value is: -3.7553496991745012
The Test RMSE is: 444.60422032277285
split 4
The Train R-squared value is: 0.9999978049392498
The Train RMSE is: 0.5599570631470827
The Validation R-squared value is: -8.708293051936977
The Validation RMSE is: 348.9868709071643
The Test R-squared value is: -27.097192127040078
The Test RMSE is: 1080.7215501531914
------ CURRENT DEPTH : 16
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.5835825479438095
The Validation RMSE is: 671.4907809139743
The Test R-squared value is: -0.441700594891937
The Test RMSE is: 244.8047779565277
split 1
The Train R-squared value is: 0.9999985353713678
The Train RMSE is: 0.5121475197315839
The Validation R-squared value is: -0.5786795253814803
The Validation RMSE is: 545.4580568689756
The Test R-squared value is: -0.2536928337300042
The Test RMSE is: 228.28528733537144
split 2
The Train R-squared value is: 0.99999912278379
The Train RMSE is: 0.41816669879145335
The Validation R-squared value is: -12.94134836203485
The Validation RMSE is: 68.55774160826937
The Test R-squared value is: -0.25165631318141624
The Test RMSE is: 228.09979667106086
split 3
The Train R-squared value is: 0.9999960524092342
The Train RMSE is: 0.8103052726431709
The Validation R-squared value is: -30.28709573554768
The Validation RMSE is: 57.082087856860674
The Test R-squared value is: -1.548502721169676
The Test RMSE is: 325.4804455684257
split 4
The Train R-squared value is: 0.999999583022618
The Train RMSE is: 0.24405499693727775
The Validation R-squared value is: -8.655332800649722
The Validation RMSE is: 348.03368029969573
The Test R-squared value is: -27.109291796580486
The Test RMSE is: 1080.9542240459393
------ CURRENT DEPTH : 17
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.4909025044393842
The Validation RMSE is: 651.5448602498778
The Test R-squared value is: -0.2694889332355903
The Test RMSE is: 229.71894371916366
split 1
The Train R-squared value is: 0.999999511790456
The Train RMSE is: 0.29568850838182914
The Validation R-squared value is: -0.5323762454910086
The Validation RMSE is: 537.3992776386635
The Test R-squared value is: -0.3363996246608907
The Test RMSE is: 235.6950835588072
split 2
The Train R-squared value is: 0.9999997075945967
The Train RMSE is: 0.24142865611338277
The Validation R-squared value is: -12.69214390332944
The Validation RMSE is: 67.94223679702934
The Test R-squared value is: -0.2576489096958765
The Test RMSE is: 228.6451851442649
split 3
The Train R-squared value is: 0.999999100631168
The Train RMSE is: 0.3867688766361976
The Validation R-squared value is: -29.110092604445644
The Validation RMSE is: 55.998097157366885
The Test R-squared value is: -3.7649155645763814
The Test RMSE is: 445.0511787695185
split 4
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -8.459324464386324
The Validation RMSE is: 344.4829339797237
The Test R-squared value is: -26.993493827969875
The Test RMSE is: 1078.7253972900705
------ CURRENT DEPTH : 18
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.5415957112416874
The Validation RMSE is: 662.5290838033044
The Test R-squared value is: -0.26471327181065285
The Test RMSE is: 229.2864493642591
split 1
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.5771718246123116
The Validation RMSE is: 545.1975277477834
The Test R-squared value is: -0.1447203197319311
The Test RMSE is: 218.13836165971864
split 2
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -14.326682026135764
The Validation RMSE is: 71.8833298808152
The Test R-squared value is: -0.28554384920193376
The Test RMSE is: 231.16697950570946
split 3
The Train R-squared value is: 0.9999994817335772
The Train RMSE is: 0.2936020875136104
The Validation R-squared value is: -35.172628776335564
The Validation RMSE is: 61.37722651298468
The Test R-squared value is: -3.772455760016891
The Test RMSE is: 445.4031730803434
split 4
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -8.661308600815943
The Validation RMSE is: 348.14136472999814
The Test R-squared value is: -27.093731286417384
The Test RMSE is: 1080.6549897615573
------ CURRENT DEPTH : 19
split 0
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.6560831624799652
The Validation RMSE is: 686.6900773238841
The Test R-squared value is: -0.23315840209670546
The Test RMSE is: 226.40800826582205
split 1
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -0.5751389989632336
The Validation RMSE is: 544.8460603768708
The Test R-squared value is: -0.15870968125184515
The Test RMSE is: 219.46722322243363
split 2
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -13.62132400116392
The Validation RMSE is: 70.2097559892183
The Test R-squared value is: -0.27150637436559677
The Test RMSE is: 229.9014031545127
split 3
The Train R-squared value is: 0.9999998850122042
The Train RMSE is: 0.13829563644637993
The Validation R-squared value is: -20.579348321862625
The Validation RMSE is: 47.406335607571535
The Test R-squared value is: -2.025019418514792
The Test RMSE is: 354.60628969274524
split 4
The Train R-squared value is: 1.0
The Train RMSE is: 0.0
The Validation R-squared value is: -8.605502232589693
The Validation RMSE is: 347.1344285365683
The Test R-squared value is: -27.04728700435044
The Test RMSE is: 1079.7613561370385
best_index = 0
best_general_model = 0
min_test_rmse = float("inf")
i = 0
for model in best_model_per_county:
curr_test_errors = []
for county in all_counties:
data_test = getCounty(test, county)
# Our testing data
y_test = np.array(data_test["dailyCases_t"], dtype=np.float32)
X_test = data_test.drop(columns=['dailyCases_t','time','fips'])
X_test = np.array(X_test,dtype=np.float32) # testing data
# test
pred_test = model.predict(X_test)
test_rmse = mean_squared_error(y_test, pred_test, squared = False)
curr_test_errors.append(test_rmse)
if np.mean(curr_test_errors) < min_test_rmse:
min_test_rmse = np.mean(curr_test_errors)
best_general_model = model
best_index = i
i += 1
import matplotlib.pyplot as plt
t = getCounty(test, '06037')
y_t = np.array(t["dailyCases_t"], dtype=np.float32)
X_t = np.array(t.drop(columns=['dailyCases_t','time','fips']))
model_t = best_general_model
pred_t = model_t.predict(X_t)
print("Test RMSE: ", mean_squared_error(y_t, pred_t, squared = False))
plt.plot(range(1,len(y_t)+1), y_t, color='red', label='Ground Truth')
plt.plot(range(1,len(y_t)+1), pred_t, color='blue', label='Predictions by Best DT Model')
plt.legend(loc='best')
plt.title('Best DT Model Predictions VS Actual Number of Cases in LA')
plt.xlabel('Days')
plt.ylabel('Number of Cases')
plt.show()
Test RMSE: 2608.2265064540024
min_test_rmse
# Here are the test averages of the NN model across all counties
NN_test_RMSEs = np.array([422.96855915421315,389.79688977191597,548.7047392266221,519.2485649450427,2165.628641457177,577.7467416936556,847.4010750524387,625.0115243391487,813.5223898441664,988.6604761160371,336.09873845254344,470.7531034449218,310.65657127066,452.3952963502877,351.9262260762237])
np.mean(NN_test_RMSEs)
# feature names
feature_names=data.columns[2:12]
feature_names
# feature importances
print("Feature importances are:", best_general_model.feature_importances_)
# visualize feature importances
(pd.Series(best_general_model.feature_importances_, index=data.columns[2:12])
.nlargest(4)
.plot(kind='barh'))
Feature importances are: [0.09814735 0. 0. 0. 0. 0.
0.03412387 0.86772877 0. 0. ]
# visualize best tree
from sklearn import tree
fig = plt.figure(figsize=(25,20))
_ = tree.plot_tree(best_general_model,
feature_names=data.columns[2:12],
filled=True)
#best_index
import matplotlib.pyplot as plt
plt.plot(range(1,6), train_RMSEs_per_county[best_index], color='red', label='Training RMSEs')
plt.plot(range(1,6), valid_RMSEs_per_county[best_index], color='green', label='Validation RMSEs')
plt.plot(range(1,6), test_RMSEs_per_county[best_index], color='blue', label='Testing RMSEs')
plt.legend(loc='best')
plt.title('Train/Valid/Test RMSEs Across the 5 Splits')
plt.xlabel('Splits')
plt.ylabel('RMSE')
plt.show()