pip install split-folders
^C
Traceback (most recent call last):
File "/usr/local/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/local/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/root/venv/lib/python3.7/site-packages/pip/__main__.py", line 29, in <module>
from pip._internal.cli.main import main as _main
File "/root/venv/lib/python3.7/site-packages/pip/_internal/cli/main.py", line 9, in <module>
from pip._internal.cli.autocompletion import autocomplete
File "/root/venv/lib/python3.7/site-packages/pip/_internal/cli/autocompletion.py", line 10, in <module>
from pip._internal.cli.main_parser import create_main_parser
File "/root/venv/lib/python3.7/site-packages/pip/_internal/cli/main_parser.py", line 8, in <module>
from pip._internal.cli import cmdoptions
File "/root/venv/lib/python3.7/site-packages/pip/_internal/cli/cmdoptions.py", line 23, in <module>
from pip._internal.cli.parser import ConfigOptionParser
File "/root/venv/lib/python3.7/site-packages/pip/_internal/cli/parser.py", line 12, in <module>
from pip._internal.configuration import Configuration, ConfigurationError
File "/root/venv/lib/python3.7/site-packages/pip/_internal/configuration.py", line 21, in <module>
from pip._internal.exceptions import (
File "/root/venv/lib/python3.7/site-packages/pip/_internal/exceptions.py", line 7, in <module>
from pip._vendor.pkg_resources import Distribution
File "/root/venv/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__init__.py", line 57, in <module>
from pip._vendor import six
File "<frozen importlib._bootstrap>", line 983, in _find_and_load
File "<frozen importlib._bootstrap>", line 967, in _find_and_load_unlocked
File "<frozen importlib._bootstrap>", line 677, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 724, in exec_module
File "<frozen importlib._bootstrap_external>", line 857, in get_code
File "<frozen importlib._bootstrap_external>", line 525, in _compile_bytecode
KeyboardInterrupt
Note: you may need to restart the kernel to use updated packages.
pip install scikit-image
Requirement already satisfied: scikit-image in /usr/local/lib/python3.7/site-packages (0.18.3)
Requirement already satisfied: tifffile>=2019.7.26 in /usr/local/lib/python3.7/site-packages (from scikit-image) (2021.10.12)
Requirement already satisfied: PyWavelets>=1.1.1 in /usr/local/lib/python3.7/site-packages (from scikit-image) (1.1.1)
Requirement already satisfied: imageio>=2.3.0 in /usr/local/lib/python3.7/site-packages (from scikit-image) (2.9.0)
Requirement already satisfied: networkx>=2.0 in /usr/local/lib/python3.7/site-packages (from scikit-image) (2.6.3)
Requirement already satisfied: pillow!=7.1.0,!=7.1.1,>=4.3.0 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from scikit-image) (8.3.2)
Requirement already satisfied: matplotlib!=3.0.0,>=2.0.0 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from scikit-image) (3.4.3)
Requirement already satisfied: numpy>=1.16.5 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from scikit-image) (1.19.5)
Requirement already satisfied: scipy>=1.0.1 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from scikit-image) (1.7.1)
Requirement already satisfied: kiwisolver>=1.0.1 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image) (1.3.2)
Requirement already satisfied: cycler>=0.10 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image) (0.10.0)
Requirement already satisfied: python-dateutil>=2.7 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image) (2.8.2)
Requirement already satisfied: pyparsing>=2.2.1 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image) (2.4.7)
Requirement already satisfied: six in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from cycler>=0.10->matplotlib!=3.0.0,>=2.0.0->scikit-image) (1.16.0)
WARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv
WARNING: You are using pip version 21.2.4; however, version 21.3 is available.
You should consider upgrading via the '/usr/local/bin/python -m pip install --upgrade pip' command.
Note: you may need to restart the kernel to use updated packages.
pip install torchmetrics
Requirement already satisfied: torchmetrics in /usr/local/lib/python3.7/site-packages (0.5.1)
Requirement already satisfied: numpy>=1.17.2 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from torchmetrics) (1.19.5)
Requirement already satisfied: packaging in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from torchmetrics) (21.0)
Requirement already satisfied: torch>=1.3.1 in /shared-libs/python3.7/py/lib/python3.7/site-packages (from torchmetrics) (1.9.1)
Requirement already satisfied: typing-extensions in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from torch>=1.3.1->torchmetrics) (3.10.0.2)
Requirement already satisfied: pyparsing>=2.0.2 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from packaging->torchmetrics) (2.4.7)
WARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv
WARNING: You are using pip version 21.2.4; however, version 21.3 is available.
You should consider upgrading via the '/usr/local/bin/python -m pip install --upgrade pip' command.
Note: you may need to restart the kernel to use updated packages.
import splitfolders
from plotly.subplots import make_subplots
import plotly.graph_objects as go
import numpy as np
from pathlib import Path
from skimage import io
from IPython.display import display
import torchvision.models as models
import os
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import torch.utils.data as data
import torchvision
from torchvision import transforms
from torchmetrics import MetricCollection, Accuracy, Precision, Recall
mv kaggle.json /root/.kaggle/
%%bash
kaggle datasets download -d andrewmvd/medical-mnist
unzip -q medical-mnist.zip -d data/
Warning: Your Kaggle API key is readable by other users on this system! To fix this, you can run 'chmod 600 /root/.kaggle/kaggle.json'
medical-mnist.zip: Skipping, found more recently modified local copy (use --force to force download)
# Train test split. Do not override this cell!
NEW_DATA_DIR = 'splitted_data/'
splitfolders.ratio("data", output=NEW_DATA_DIR, seed=42, ratio=(.05, .95) , group_prefix=None)
!mv splitted_data/val splitted_data/test
Copying files: 58954 files [08:18, 118.29 files/s]
data_path = Path('data/')
labels_path = [label_folder for label_folder in data_path.iterdir()]
fig = make_subplots(
rows=2, cols=3,
subplot_titles=[str(lab_path).split('/')[1] for lab_path in labels_path]
)
tmp = []
for i, label_path in enumerate(labels_path):
label_images_path = list(label_path.iterdir())
random_image_path = np.random.choice(label_images_path)
img = io.imread(random_image_path)
tmp.append(img)
fig.add_trace(
go.Heatmap(z=img[::-1], coloraxis="coloraxis"), # for black and white images
row = (i // 3) + 1, col = (i % 3) + 1
)
fig.update_layout(
height=600,
width=800,
title_text="Examples of images",
coloraxis={"colorscale": "greys"},
)
fig.show()
skinchanger = transforms.Compose([
transforms.Grayscale(num_output_channels=1),
transforms.ToTensor()
])
train_data = torchvision.datasets.ImageFolder(root="./splitted_data/train/", transform=skinchanger)
train_data_loader = data.DataLoader(train_data, batch_size=10, shuffle=True, num_workers=4)
test_data = torchvision.datasets.ImageFolder(root="./splitted_data/test/", transform=skinchanger)
test_data_loader = data.DataLoader(test_data, batch_size=10, shuffle=True, num_workers=4)
/shared-libs/python3.7/py/lib/python3.7/site-packages/torch/utils/data/dataloader.py:481: UserWarning:
This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.
print("Number of train samples: ", len(train_data))
print("Number of test samples: ", len(test_data))
Number of train samples: 2947
Number of test samples: 56007
class nono(nn.Module):
def __init__(self):
super(nono, self).__init__()
self.cnn_layers = nn.Sequential(
nn.Conv2d(1, 4, kernel_size=5, stride=1, padding=1),
nn.Relu(inplace=True),
nn.Conv2d(4, 4, kernel_size=5, stride=1, padding=1),
nn.ReLU(inplace=True),
nn.Dropout(),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.BatchNorm2d(4),
nn.Conv2d(4, 16, kernel_size=5, stride=1, padding=1),
nn.Relu(inplace=True),
nn.Conv2d(16, 16, kernel_size=5, stride=1, padding=1),
nn.Relu(inplace=True),
nn.Dropout(),
nn.MaxPool2d(kernel_size=2, stride=2),
nn.BatchNorm2d(16)
)
self.linear_layers = nn.Sequential(nn.Linear(784, len(labels_path)))
# Defining the forward pass
def forward(self, x):
x = self.cnn_layers(x)
x = x.view(x.size(0), -1)
x = self.linear_layers(x)
return x
model = nono()
opti = torch.optim.Adam(model.parameters(), lr=0.003)
entropy = nn.CrossEntropyLoss()
for epoch in range(100):
losses = []
model.train()
for i, (x, y) in enumerate(train_data_loader):
opti.zero_grad()
outputs = model(x)
loss = entropy(outputs, y)
loss.backward()
optimizer.step()
losses.append(loss.item())
if i % 100 == 0:
print("Epoch: %d, i: %4d, loss=%.3f" % (epoch + 1, i + 1, np.average(losses)))
train_loss = np.average(losses)
Epoch: 1, i: 1, loss=2.233
Epoch: 1, i: 101, loss=0.392
Epoch: 1, i: 201, loss=0.262
Epoch: 2, i: 1, loss=0.000
Epoch: 2, i: 101, loss=0.080
Epoch: 2, i: 201, loss=0.095
Epoch: 3, i: 1, loss=0.058
Epoch: 3, i: 101, loss=0.025
Epoch: 3, i: 201, loss=0.044
Epoch: 4, i: 1, loss=0.061
Epoch: 4, i: 101, loss=0.075
Epoch: 4, i: 201, loss=0.066
Epoch: 5, i: 1, loss=0.004
Epoch: 5, i: 101, loss=0.042
Epoch: 5, i: 201, loss=0.040
Epoch: 6, i: 1, loss=0.000
Epoch: 6, i: 101, loss=0.016
Epoch: 6, i: 201, loss=0.030
Epoch: 7, i: 1, loss=0.000
Epoch: 7, i: 101, loss=0.004
Epoch: 7, i: 201, loss=0.022
Epoch: 8, i: 1, loss=0.001
Epoch: 8, i: 101, loss=0.032
Epoch: 8, i: 201, loss=0.039
Epoch: 9, i: 1, loss=0.401
Epoch: 9, i: 101, loss=0.016
Epoch: 9, i: 201, loss=0.014
Epoch: 10, i: 1, loss=0.000
Epoch: 10, i: 101, loss=0.003
Epoch: 10, i: 201, loss=0.008
Epoch: 11, i: 1, loss=0.062
Epoch: 11, i: 101, loss=0.008
Epoch: 11, i: 201, loss=0.014
Epoch: 12, i: 1, loss=0.019
Epoch: 12, i: 101, loss=0.002
Epoch: 12, i: 201, loss=0.004
Epoch: 13, i: 1, loss=0.000
Epoch: 13, i: 101, loss=0.018
Epoch: 13, i: 201, loss=0.025
Epoch: 14, i: 1, loss=0.001
Epoch: 14, i: 101, loss=0.023
Epoch: 14, i: 201, loss=0.014
Epoch: 15, i: 1, loss=0.000
Epoch: 15, i: 101, loss=0.003
Epoch: 15, i: 201, loss=0.020
Epoch: 16, i: 1, loss=0.000
Epoch: 16, i: 101, loss=0.014
Epoch: 16, i: 201, loss=0.008
Epoch: 17, i: 1, loss=0.002
Epoch: 17, i: 101, loss=0.021
Epoch: 17, i: 201, loss=0.020
Epoch: 18, i: 1, loss=0.000
Epoch: 18, i: 101, loss=0.011
Epoch: 18, i: 201, loss=0.014
Epoch: 19, i: 1, loss=0.000
Epoch: 19, i: 101, loss=0.005
Epoch: 19, i: 201, loss=0.004
Epoch: 20, i: 1, loss=0.000
Epoch: 20, i: 101, loss=0.007
Epoch: 20, i: 201, loss=0.009
Epoch: 21, i: 1, loss=0.000
Epoch: 21, i: 101, loss=0.013
Epoch: 21, i: 201, loss=0.018
Epoch: 22, i: 1, loss=0.001
Epoch: 22, i: 101, loss=0.002
Epoch: 22, i: 201, loss=0.002
Epoch: 23, i: 1, loss=0.000
Epoch: 23, i: 101, loss=0.007
Epoch: 23, i: 201, loss=0.007
Epoch: 24, i: 1, loss=0.000
Epoch: 24, i: 101, loss=0.015
Epoch: 24, i: 201, loss=0.017
Epoch: 25, i: 1, loss=0.006
Epoch: 25, i: 101, loss=0.019
Epoch: 25, i: 201, loss=0.028
Epoch: 26, i: 1, loss=0.000
Epoch: 26, i: 101, loss=0.012
Epoch: 26, i: 201, loss=0.015
Epoch: 27, i: 1, loss=0.003
Epoch: 27, i: 101, loss=0.010
Epoch: 27, i: 201, loss=0.007
Epoch: 28, i: 1, loss=0.000
Epoch: 28, i: 101, loss=0.001
Epoch: 28, i: 201, loss=0.004
Epoch: 29, i: 1, loss=0.000
Epoch: 29, i: 101, loss=0.001
Epoch: 29, i: 201, loss=0.005
Epoch: 30, i: 1, loss=0.000
Epoch: 30, i: 101, loss=0.001
Epoch: 30, i: 201, loss=0.003
Epoch: 31, i: 1, loss=0.000
Epoch: 31, i: 101, loss=0.003
Epoch: 31, i: 201, loss=0.004
Epoch: 32, i: 1, loss=0.000
Epoch: 32, i: 101, loss=0.003
Epoch: 32, i: 201, loss=0.012
Epoch: 33, i: 1, loss=0.000
Epoch: 33, i: 101, loss=0.001
Epoch: 33, i: 201, loss=0.003
Epoch: 34, i: 1, loss=0.000
Epoch: 34, i: 101, loss=0.024
Epoch: 34, i: 201, loss=0.018
Epoch: 35, i: 1, loss=0.000
Epoch: 35, i: 101, loss=0.008
Epoch: 35, i: 201, loss=0.025
Epoch: 36, i: 1, loss=0.000
Epoch: 36, i: 101, loss=0.010
Epoch: 36, i: 201, loss=0.009
Epoch: 37, i: 1, loss=0.001
Epoch: 37, i: 101, loss=0.001
Epoch: 37, i: 201, loss=0.001
Epoch: 38, i: 1, loss=0.000
Epoch: 38, i: 101, loss=0.007
Epoch: 38, i: 201, loss=0.004
Epoch: 39, i: 1, loss=0.000
Epoch: 39, i: 101, loss=0.004
Epoch: 39, i: 201, loss=0.003
Epoch: 40, i: 1, loss=0.000
Epoch: 40, i: 101, loss=0.001
Epoch: 40, i: 201, loss=0.001
Epoch: 41, i: 1, loss=0.000
Epoch: 41, i: 101, loss=0.002
Epoch: 41, i: 201, loss=0.003
Epoch: 42, i: 1, loss=0.002
Epoch: 42, i: 101, loss=0.024
Epoch: 42, i: 201, loss=0.012
Epoch: 43, i: 1, loss=0.023
Epoch: 43, i: 101, loss=0.002
Epoch: 43, i: 201, loss=0.002
Epoch: 44, i: 1, loss=0.001
Epoch: 44, i: 101, loss=0.007
Epoch: 44, i: 201, loss=0.013
Epoch: 45, i: 1, loss=0.000
Epoch: 45, i: 101, loss=0.007
Epoch: 45, i: 201, loss=0.008
Epoch: 46, i: 1, loss=0.000
Epoch: 46, i: 101, loss=0.000
Epoch: 46, i: 201, loss=0.012
Epoch: 47, i: 1, loss=0.000
Epoch: 47, i: 101, loss=0.027
Epoch: 47, i: 201, loss=0.015
Epoch: 48, i: 1, loss=0.000
Epoch: 48, i: 101, loss=0.008
Epoch: 48, i: 201, loss=0.016
Epoch: 49, i: 1, loss=0.000
Epoch: 49, i: 101, loss=0.001
Epoch: 49, i: 201, loss=0.005
Epoch: 50, i: 1, loss=0.000
Epoch: 50, i: 101, loss=0.002
Epoch: 50, i: 201, loss=0.004
Epoch: 51, i: 1, loss=0.000
Epoch: 51, i: 101, loss=0.005
Epoch: 51, i: 201, loss=0.007
Epoch: 52, i: 1, loss=0.000
Epoch: 52, i: 101, loss=0.003
Epoch: 52, i: 201, loss=0.004
Epoch: 53, i: 1, loss=0.000
Epoch: 53, i: 101, loss=0.001
Epoch: 53, i: 201, loss=0.001
Epoch: 54, i: 1, loss=0.000
Epoch: 54, i: 101, loss=0.001
Epoch: 54, i: 201, loss=0.003
Epoch: 55, i: 1, loss=0.000
Epoch: 55, i: 101, loss=0.010
Epoch: 55, i: 201, loss=0.005
Epoch: 56, i: 1, loss=0.005
Epoch: 56, i: 101, loss=0.006
Epoch: 56, i: 201, loss=0.005
Epoch: 57, i: 1, loss=0.000
Epoch: 57, i: 101, loss=0.004
Epoch: 57, i: 201, loss=0.004
Epoch: 58, i: 1, loss=0.000
Epoch: 58, i: 101, loss=0.000
Epoch: 58, i: 201, loss=0.001
Epoch: 59, i: 1, loss=0.000
Epoch: 59, i: 101, loss=0.000
Epoch: 59, i: 201, loss=0.001
Epoch: 60, i: 1, loss=0.000
Epoch: 60, i: 101, loss=0.000
Epoch: 60, i: 201, loss=0.002
Epoch: 61, i: 1, loss=0.000
Epoch: 61, i: 101, loss=0.004
Epoch: 61, i: 201, loss=0.009
Epoch: 62, i: 1, loss=0.000
Epoch: 62, i: 101, loss=0.000
Epoch: 62, i: 201, loss=0.000
Epoch: 63, i: 1, loss=0.017
Epoch: 63, i: 101, loss=0.010
Epoch: 63, i: 201, loss=0.008
Epoch: 64, i: 1, loss=0.019
Epoch: 64, i: 101, loss=0.007
Epoch: 64, i: 201, loss=0.004
Epoch: 65, i: 1, loss=0.000
Epoch: 65, i: 101, loss=0.011
Epoch: 65, i: 201, loss=0.019
Epoch: 66, i: 1, loss=0.000
Epoch: 66, i: 101, loss=0.002
Epoch: 66, i: 201, loss=0.001
Epoch: 67, i: 1, loss=0.000
Epoch: 67, i: 101, loss=0.006
Epoch: 67, i: 201, loss=0.004
Epoch: 68, i: 1, loss=0.000
Epoch: 68, i: 101, loss=0.000
Epoch: 68, i: 201, loss=0.005
Epoch: 69, i: 1, loss=0.000
Epoch: 69, i: 101, loss=0.003
Epoch: 69, i: 201, loss=0.002
Epoch: 70, i: 1, loss=0.000
Epoch: 70, i: 101, loss=0.000
Epoch: 70, i: 201, loss=0.000
Epoch: 71, i: 1, loss=0.000
Epoch: 71, i: 101, loss=0.000
Epoch: 71, i: 201, loss=0.000
Epoch: 72, i: 1, loss=0.000
Epoch: 72, i: 101, loss=0.001
Epoch: 72, i: 201, loss=0.002
Epoch: 73, i: 1, loss=0.000
Epoch: 73, i: 101, loss=0.000
Epoch: 73, i: 201, loss=0.000
Epoch: 74, i: 1, loss=0.001
Epoch: 74, i: 101, loss=0.043
Epoch: 74, i: 201, loss=0.022
Epoch: 75, i: 1, loss=0.000
Epoch: 75, i: 101, loss=0.015
Epoch: 75, i: 201, loss=0.008
Epoch: 76, i: 1, loss=0.000
Epoch: 76, i: 101, loss=0.016
Epoch: 76, i: 201, loss=0.011
Epoch: 77, i: 1, loss=0.000
Epoch: 77, i: 101, loss=0.005
Epoch: 77, i: 201, loss=0.005
Epoch: 78, i: 1, loss=0.000
Epoch: 78, i: 101, loss=0.001
Epoch: 78, i: 201, loss=0.001
Epoch: 79, i: 1, loss=0.000
Epoch: 79, i: 101, loss=0.001
Epoch: 79, i: 201, loss=0.002
Epoch: 80, i: 1, loss=0.000
Epoch: 80, i: 101, loss=0.001
Epoch: 80, i: 201, loss=0.001
Epoch: 81, i: 1, loss=0.000
Epoch: 81, i: 101, loss=0.003
Epoch: 81, i: 201, loss=0.002
Epoch: 82, i: 1, loss=0.000
Epoch: 82, i: 101, loss=0.007
Epoch: 82, i: 201, loss=0.005
Epoch: 83, i: 1, loss=0.000
Epoch: 83, i: 101, loss=0.000
Epoch: 83, i: 201, loss=0.002
Epoch: 84, i: 1, loss=0.000
Epoch: 84, i: 101, loss=0.026
Epoch: 84, i: 201, loss=0.020
Epoch: 85, i: 1, loss=0.002
Epoch: 85, i: 101, loss=0.010
Epoch: 85, i: 201, loss=0.006
Epoch: 86, i: 1, loss=0.000
Epoch: 86, i: 101, loss=0.000
Epoch: 86, i: 201, loss=0.000
Epoch: 87, i: 1, loss=0.000
Epoch: 87, i: 101, loss=0.017
Epoch: 87, i: 201, loss=0.011
Epoch: 88, i: 1, loss=0.000
Epoch: 88, i: 101, loss=0.004
Epoch: 88, i: 201, loss=0.002
Epoch: 89, i: 1, loss=0.000
Epoch: 89, i: 101, loss=0.006
Epoch: 89, i: 201, loss=0.003
Epoch: 90, i: 1, loss=0.000
Epoch: 90, i: 101, loss=0.006
Epoch: 90, i: 201, loss=0.006
Epoch: 91, i: 1, loss=0.000
Epoch: 91, i: 101, loss=0.001
Epoch: 91, i: 201, loss=0.005
Epoch: 92, i: 1, loss=0.000
Epoch: 92, i: 101, loss=0.004
Epoch: 92, i: 201, loss=0.003
Epoch: 93, i: 1, loss=0.000
Epoch: 93, i: 101, loss=0.001
Epoch: 93, i: 201, loss=0.005
Epoch: 94, i: 1, loss=0.000
Epoch: 94, i: 101, loss=0.001
Epoch: 94, i: 201, loss=0.000
Epoch: 95, i: 1, loss=0.000
Epoch: 95, i: 101, loss=0.000
Epoch: 95, i: 201, loss=0.000
Epoch: 96, i: 1, loss=0.000
Epoch: 96, i: 101, loss=0.000
Epoch: 96, i: 201, loss=0.000
Epoch: 97, i: 1, loss=0.006
Epoch: 97, i: 101, loss=0.007
Epoch: 97, i: 201, loss=0.015
Epoch: 98, i: 1, loss=0.000
Epoch: 98, i: 101, loss=0.002
Epoch: 98, i: 201, loss=0.001
Epoch: 99, i: 1, loss=0.000
Epoch: 99, i: 101, loss=0.000
Epoch: 99, i: 201, loss=0.001
Epoch: 100, i: 1, loss=0.000
Epoch: 100, i: 101, loss=0.002
Epoch: 100, i: 201, loss=0.002
Execution error
FileNotFoundError: [Errno 2] No such file or directory: 'checkpoint.pt'
metrics = MetricCollection([
Recall(num_classes=len(labels_path), average='macro'),
Precision(num_classes=len(labels_path), average='macro')
])
with torch.no_grad():
for step, (x, y) in enumerate(test_data_loader):
outputs = model(x)
_, predicted = torch.max(outputs.data, 1)
metrics(predicted, y)
results = metrics.compute()
print("Recall: {}".format(results["Recall"]))
print("Precision: {}".format(results["Precision"]))
print(results)
Execution error
NameError: name 'MetricCollection' is not defined
augmentation = transforms.Compose([
transforms.Grayscale(num_output_channels=1),
transforms.RandomHorizontalFlip(p=1),
transforms.ToTensor()
])
aug = torchvision.datasets.ImageFolder(root="./splitted_data/train/", transform=augmentation)
image_dataset = torch.utils.data.ConcatDataset([
train_data,
aug
])
train_data_loader = data.DataLoader(image_dataset, batch_size=10, shuffle=True, num_workers=4)
/shared-libs/python3.7/py/lib/python3.7/site-packages/torch/utils/data/dataloader.py:481: UserWarning:
This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.
print("Number of train samples: ", len(image_dataset))
Number of train samples: 11788
model = nono()
optimizer = torch.optim.Adam(model.parameters(), lr=0.003)
entropy = nn.CrossEntropyLoss()
for epoch in range(100):
losses = []
model.train()
for i, (x, y) in enumerate(train_data_loader):
optimizer.zero_grad()
outputs = model(x)
loss = entropy(outputs, y)
loss.backward()
optimizer.step()
losses.append(loss.item())
if i % 100 == 0:
print("Epoch: %d, i: %4d, loss=%.3f" % (epoch + 1, i + 1, np.average(losses)))
train_loss = np.average(losses)
Epoch: 17, i: 901, loss=0.014
Epoch: 17, i: 1001, loss=0.014
Epoch: 17, i: 1101, loss=0.014
Epoch: 18, i: 1, loss=0.000
Epoch: 18, i: 101, loss=0.010
Epoch: 18, i: 201, loss=0.016
Epoch: 18, i: 301, loss=0.012
Epoch: 18, i: 401, loss=0.010
Epoch: 18, i: 501, loss=0.012
Epoch: 18, i: 601, loss=0.012
Epoch: 18, i: 701, loss=0.010
Epoch: 18, i: 801, loss=0.010
Epoch: 18, i: 901, loss=0.011
Epoch: 18, i: 1001, loss=0.011
Epoch: 18, i: 1101, loss=0.012
Epoch: 19, i: 1, loss=0.000
Epoch: 19, i: 101, loss=0.024
Epoch: 19, i: 201, loss=0.023
Epoch: 19, i: 301, loss=0.019
Epoch: 19, i: 401, loss=0.026
Epoch: 19, i: 501, loss=0.027
Epoch: 19, i: 601, loss=0.025
Epoch: 19, i: 701, loss=0.022
Epoch: 19, i: 801, loss=0.020
Epoch: 19, i: 901, loss=0.018
Epoch: 19, i: 1001, loss=0.017
Epoch: 19, i: 1101, loss=0.016
Epoch: 20, i: 1, loss=0.000
Epoch: 20, i: 101, loss=0.012
Epoch: 20, i: 201, loss=0.008
Epoch: 20, i: 301, loss=0.011
Epoch: 20, i: 401, loss=0.010
Epoch: 20, i: 501, loss=0.009
Epoch: 20, i: 601, loss=0.008
Epoch: 20, i: 701, loss=0.008
Epoch: 20, i: 801, loss=0.007
Epoch: 20, i: 901, loss=0.008
Epoch: 20, i: 1001, loss=0.009
Epoch: 20, i: 1101, loss=0.011
Epoch: 21, i: 1, loss=0.002
Epoch: 21, i: 101, loss=0.003
Epoch: 21, i: 201, loss=0.010
Epoch: 21, i: 301, loss=0.014
Epoch: 21, i: 401, loss=0.016
Epoch: 21, i: 501, loss=0.016
Epoch: 21, i: 601, loss=0.015
Epoch: 21, i: 701, loss=0.014
Epoch: 21, i: 801, loss=0.014
Epoch: 21, i: 901, loss=0.014
Epoch: 21, i: 1001, loss=0.014
Epoch: 21, i: 1101, loss=0.013
Epoch: 22, i: 1, loss=0.000
Epoch: 22, i: 101, loss=0.011
Epoch: 22, i: 201, loss=0.016
Epoch: 22, i: 301, loss=0.012
Epoch: 22, i: 401, loss=0.013
Epoch: 22, i: 501, loss=0.015
Epoch: 22, i: 601, loss=0.015
Epoch: 22, i: 701, loss=0.014
Epoch: 22, i: 801, loss=0.014
Epoch: 22, i: 901, loss=0.014
Epoch: 22, i: 1001, loss=0.014
Epoch: 22, i: 1101, loss=0.014
Epoch: 23, i: 1, loss=0.001
Epoch: 23, i: 101, loss=0.004
Epoch: 23, i: 201, loss=0.005
Epoch: 23, i: 301, loss=0.004
Epoch: 23, i: 401, loss=0.007
Epoch: 23, i: 501, loss=0.007
Epoch: 23, i: 601, loss=0.008
Epoch: 23, i: 701, loss=0.009
Epoch: 23, i: 801, loss=0.008
Epoch: 23, i: 901, loss=0.010
Epoch: 23, i: 1001, loss=0.009
Epoch: 23, i: 1101, loss=0.009
Epoch: 24, i: 1, loss=0.001
Epoch: 24, i: 101, loss=0.032
Epoch: 24, i: 201, loss=0.025
Epoch: 24, i: 301, loss=0.019
Epoch: 24, i: 401, loss=0.020
Epoch: 24, i: 501, loss=0.017
Epoch: 24, i: 601, loss=0.015
Epoch: 24, i: 701, loss=0.016
Epoch: 24, i: 801, loss=0.015
Epoch: 24, i: 901, loss=0.014
Epoch: 24, i: 1001, loss=0.015
Epoch: 24, i: 1101, loss=0.015
Epoch: 25, i: 1, loss=0.000
Epoch: 25, i: 101, loss=0.006
Epoch: 25, i: 201, loss=0.007
Epoch: 25, i: 301, loss=0.006
Epoch: 25, i: 401, loss=0.006
Epoch: 25, i: 501, loss=0.009
Epoch: 25, i: 601, loss=0.011
Epoch: 25, i: 701, loss=0.010
Epoch: 25, i: 801, loss=0.010
Epoch: 25, i: 901, loss=0.011
Epoch: 25, i: 1001, loss=0.012
Epoch: 25, i: 1101, loss=0.011
Epoch: 26, i: 1, loss=0.001
Epoch: 26, i: 101, loss=0.014
Epoch: 26, i: 201, loss=0.010
Epoch: 26, i: 301, loss=0.009
Epoch: 26, i: 401, loss=0.009
Epoch: 26, i: 501, loss=0.009
Epoch: 26, i: 601, loss=0.009
Epoch: 26, i: 701, loss=0.010
Epoch: 26, i: 801, loss=0.009
Epoch: 26, i: 901, loss=0.012
Epoch: 26, i: 1001, loss=0.011
Epoch: 26, i: 1101, loss=0.010
Epoch: 27, i: 1, loss=0.000
Epoch: 27, i: 101, loss=0.025
Epoch: 27, i: 201, loss=0.027
Epoch: 27, i: 301, loss=0.019
Epoch: 27, i: 401, loss=0.015
Epoch: 27, i: 501, loss=0.014
Epoch: 27, i: 601, loss=0.012
Epoch: 27, i: 701, loss=0.014
Epoch: 27, i: 801, loss=0.013
Epoch: 27, i: 901, loss=0.014
Epoch: 27, i: 1001, loss=0.014
Epoch: 27, i: 1101, loss=0.014
Epoch: 28, i: 1, loss=0.000
Epoch: 28, i: 101, loss=0.004
Epoch: 28, i: 201, loss=0.010
Epoch: 28, i: 301, loss=0.010
Epoch: 28, i: 401, loss=0.008
Epoch: 28, i: 501, loss=0.007
Epoch: 28, i: 601, loss=0.006
Epoch: 28, i: 701, loss=0.009
Epoch: 28, i: 801, loss=0.008
Epoch: 28, i: 901, loss=0.009
Epoch: 28, i: 1001, loss=0.009
Epoch: 28, i: 1101, loss=0.010
Epoch: 29, i: 1, loss=0.000
Epoch: 29, i: 101, loss=0.021
Epoch: 29, i: 201, loss=0.014
Epoch: 29, i: 301, loss=0.015
Epoch: 29, i: 401, loss=0.011
Epoch: 29, i: 501, loss=0.009
Epoch: 29, i: 601, loss=0.009
Epoch: 29, i: 701, loss=0.008
Epoch: 29, i: 801, loss=0.008
Epoch: 29, i: 901, loss=0.007
Epoch: 29, i: 1001, loss=0.007
Epoch: 29, i: 1101, loss=0.008
Epoch: 30, i: 1, loss=0.000
Epoch: 30, i: 101, loss=0.014
Epoch: 30, i: 201, loss=0.011
Epoch: 30, i: 301, loss=0.013
Epoch: 30, i: 401, loss=0.018
Epoch: 30, i: 501, loss=0.020
Epoch: 30, i: 601, loss=0.019
Epoch: 30, i: 701, loss=0.018
Epoch: 30, i: 801, loss=0.018
Epoch: 30, i: 901, loss=0.017
Epoch: 30, i: 1001, loss=0.016
Epoch: 30, i: 1101, loss=0.015
Epoch: 31, i: 1, loss=0.000
Epoch: 31, i: 101, loss=0.017
Epoch: 31, i: 201, loss=0.014
Epoch: 31, i: 301, loss=0.010
Epoch: 31, i: 401, loss=0.009
Epoch: 31, i: 501, loss=0.008
Epoch: 31, i: 601, loss=0.007
Epoch: 31, i: 701, loss=0.007
Epoch: 31, i: 801, loss=0.006
Epoch: 31, i: 901, loss=0.006
Epoch: 31, i: 1001, loss=0.006
Epoch: 31, i: 1101, loss=0.006
Epoch: 32, i: 1, loss=0.000
Epoch: 32, i: 101, loss=0.002
Epoch: 32, i: 201, loss=0.010
Epoch: 32, i: 301, loss=0.018
Epoch: 32, i: 401, loss=0.015
Epoch: 32, i: 501, loss=0.013
Epoch: 32, i: 601, loss=0.011
Epoch: 32, i: 701, loss=0.011
Epoch: 32, i: 801, loss=0.010
Epoch: 32, i: 901, loss=0.010
Epoch: 32, i: 1001, loss=0.009
Epoch: 32, i: 1101, loss=0.009
Epoch: 33, i: 1, loss=0.022
Epoch: 33, i: 101, loss=0.006
Epoch: 33, i: 201, loss=0.008
Epoch: 33, i: 301, loss=0.014
Epoch: 33, i: 401, loss=0.017
Epoch: 33, i: 501, loss=0.020
Epoch: 33, i: 601, loss=0.019
Epoch: 33, i: 701, loss=0.016
Epoch: 33, i: 801, loss=0.015
Epoch: 33, i: 901, loss=0.013
Epoch: 33, i: 1001, loss=0.014
Epoch: 33, i: 1101, loss=0.014
Epoch: 34, i: 1, loss=0.000
Epoch: 34, i: 101, loss=0.001
Epoch: 34, i: 201, loss=0.004
Epoch: 34, i: 301, loss=0.007
Epoch: 34, i: 401, loss=0.008
Epoch: 34, i: 501, loss=0.007
Epoch: 34, i: 601, loss=0.008
Epoch: 34, i: 701, loss=0.008
Epoch: 34, i: 801, loss=0.008
Epoch: 34, i: 901, loss=0.008
Epoch: 34, i: 1001, loss=0.008
Epoch: 34, i: 1101, loss=0.008
Epoch: 35, i: 1, loss=0.000
Epoch: 35, i: 101, loss=0.001
Epoch: 35, i: 201, loss=0.007
Epoch: 35, i: 301, loss=0.008
Epoch: 35, i: 401, loss=0.007
Epoch: 35, i: 501, loss=0.006
Epoch: 35, i: 601, loss=0.006
Epoch: 35, i: 701, loss=0.006
Epoch: 35, i: 801, loss=0.009
Epoch: 35, i: 901, loss=0.011
Epoch: 35, i: 1001, loss=0.011
Epoch: 35, i: 1101, loss=0.011
Epoch: 36, i: 1, loss=0.000
Epoch: 36, i: 101, loss=0.003
Epoch: 36, i: 201, loss=0.005
Epoch: 36, i: 301, loss=0.012
Epoch: 36, i: 401, loss=0.015
Epoch: 36, i: 501, loss=0.013
Epoch: 36, i: 601, loss=0.012
Epoch: 36, i: 701, loss=0.010
Epoch: 36, i: 801, loss=0.009
Epoch: 36, i: 901, loss=0.008
Epoch: 36, i: 1001, loss=0.009
Epoch: 36, i: 1101, loss=0.009
Epoch: 37, i: 1, loss=0.000
Epoch: 37, i: 101, loss=0.002
Epoch: 37, i: 201, loss=0.004
Epoch: 37, i: 301, loss=0.004
Epoch: 37, i: 401, loss=0.007
Epoch: 37, i: 501, loss=0.007
Epoch: 37, i: 601, loss=0.006
Epoch: 37, i: 701, loss=0.006
Epoch: 37, i: 801, loss=0.006
Epoch: 37, i: 901, loss=0.007
Epoch: 37, i: 1001, loss=0.008
Epoch: 37, i: 1101, loss=0.008
Epoch: 38, i: 1, loss=0.000
Epoch: 38, i: 101, loss=0.002
Epoch: 38, i: 201, loss=0.002
Epoch: 38, i: 301, loss=0.002
Epoch: 38, i: 401, loss=0.002
Epoch: 38, i: 501, loss=0.002
Epoch: 38, i: 601, loss=0.002
Epoch: 38, i: 701, loss=0.002
Epoch: 38, i: 801, loss=0.005
Epoch: 38, i: 901, loss=0.006
Epoch: 38, i: 1001, loss=0.006
Epoch: 38, i: 1101, loss=0.006
Epoch: 39, i: 1, loss=0.000
Epoch: 39, i: 101, loss=0.009
Epoch: 39, i: 201, loss=0.006
Epoch: 39, i: 301, loss=0.010
Epoch: 39, i: 401, loss=0.010
Epoch: 39, i: 501, loss=0.009
Epoch: 39, i: 601, loss=0.009
Epoch: 39, i: 701, loss=0.009
Epoch: 39, i: 801, loss=0.008
Epoch: 39, i: 901, loss=0.008
Epoch: 39, i: 1001, loss=0.007
Epoch: 39, i: 1101, loss=0.011
Epoch: 40, i: 1, loss=0.000
Epoch: 40, i: 101, loss=0.003
Epoch: 40, i: 201, loss=0.004
Epoch: 40, i: 301, loss=0.009
Epoch: 40, i: 401, loss=0.007
Epoch: 40, i: 501, loss=0.009
Epoch: 40, i: 601, loss=0.012
Epoch: 40, i: 701, loss=0.012
Epoch: 40, i: 801, loss=0.011
Epoch: 40, i: 901, loss=0.010
Epoch: 40, i: 1001, loss=0.009
Epoch: 40, i: 1101, loss=0.009
Epoch: 41, i: 1, loss=0.000
Epoch: 41, i: 101, loss=0.028
Epoch: 41, i: 201, loss=0.022
Epoch: 41, i: 301, loss=0.019
Epoch: 41, i: 401, loss=0.018
Epoch: 41, i: 501, loss=0.015
Epoch: 41, i: 601, loss=0.014
Epoch: 41, i: 701, loss=0.012
Epoch: 41, i: 801, loss=0.011
Epoch: 41, i: 901, loss=0.011
Epoch: 41, i: 1001, loss=0.010
Epoch: 41, i: 1101, loss=0.010
Epoch: 42, i: 1, loss=0.000
Epoch: 42, i: 101, loss=0.003
Epoch: 42, i: 201, loss=0.011
Epoch: 42, i: 301, loss=0.018
Epoch: 42, i: 401, loss=0.016
Epoch: 42, i: 501, loss=0.014
Epoch: 42, i: 601, loss=0.014
Epoch: 42, i: 701, loss=0.012
Epoch: 42, i: 801, loss=0.011
Epoch: 42, i: 901, loss=0.013
Epoch: 42, i: 1001, loss=0.012
Epoch: 42, i: 1101, loss=0.014
Epoch: 43, i: 1, loss=0.000
Epoch: 43, i: 101, loss=0.002
Epoch: 43, i: 201, loss=0.003
Epoch: 43, i: 301, loss=0.004
Epoch: 43, i: 401, loss=0.005
Epoch: 43, i: 501, loss=0.008
Epoch: 43, i: 601, loss=0.007
Epoch: 43, i: 701, loss=0.010
Epoch: 43, i: 801, loss=0.010
Epoch: 43, i: 901, loss=0.010
Epoch: 43, i: 1001, loss=0.010
Epoch: 43, i: 1101, loss=0.009
Epoch: 44, i: 1, loss=0.000
Epoch: 44, i: 101, loss=0.001
Epoch: 44, i: 201, loss=0.004
Epoch: 44, i: 301, loss=0.006
Epoch: 44, i: 401, loss=0.010
Epoch: 44, i: 501, loss=0.010
Epoch: 44, i: 601, loss=0.009
Epoch: 44, i: 701, loss=0.009
Epoch: 44, i: 801, loss=0.009
Epoch: 44, i: 901, loss=0.009
Epoch: 44, i: 1001, loss=0.008
Epoch: 44, i: 1101, loss=0.008
Epoch: 45, i: 1, loss=0.000
Epoch: 45, i: 101, loss=0.008
Epoch: 45, i: 201, loss=0.008
Epoch: 45, i: 301, loss=0.010
Epoch: 45, i: 401, loss=0.010
Epoch: 45, i: 501, loss=0.009
Epoch: 45, i: 601, loss=0.008
Epoch: 45, i: 701, loss=0.008
Epoch: 45, i: 801, loss=0.007
Epoch: 45, i: 901, loss=0.008
Epoch: 45, i: 1001, loss=0.008
Epoch: 45, i: 1101, loss=0.008
Epoch: 46, i: 1, loss=0.000
Epoch: 46, i: 101, loss=0.008
Epoch: 46, i: 201, loss=0.007
Epoch: 46, i: 301, loss=0.007
Epoch: 46, i: 401, loss=0.008
Epoch: 46, i: 501, loss=0.009
Epoch: 46, i: 601, loss=0.010
Epoch: 46, i: 701, loss=0.010
Epoch: 46, i: 801, loss=0.010
Epoch: 46, i: 901, loss=0.010
Epoch: 46, i: 1001, loss=0.009
Epoch: 46, i: 1101, loss=0.010
Epoch: 47, i: 1, loss=0.000
Epoch: 47, i: 101, loss=0.005
Epoch: 47, i: 201, loss=0.004
Epoch: 47, i: 301, loss=0.004
Epoch: 47, i: 401, loss=0.006
Epoch: 47, i: 501, loss=0.005
Epoch: 47, i: 601, loss=0.005
Epoch: 47, i: 701, loss=0.005
Epoch: 47, i: 801, loss=0.004
Epoch: 47, i: 901, loss=0.006
Epoch: 47, i: 1001, loss=0.007
Epoch: 47, i: 1101, loss=0.007
Epoch: 48, i: 1, loss=0.000
Epoch: 48, i: 101, loss=0.003
Epoch: 48, i: 201, loss=0.009
Epoch: 48, i: 301, loss=0.013
Epoch: 48, i: 401, loss=0.011
Epoch: 48, i: 501, loss=0.013
Epoch: 48, i: 601, loss=0.012
Epoch: 48, i: 701, loss=0.012
Epoch: 48, i: 801, loss=0.011
Epoch: 48, i: 901, loss=0.010
Epoch: 48, i: 1001, loss=0.010
Epoch: 48, i: 1101, loss=0.010
Epoch: 49, i: 1, loss=0.000
Epoch: 49, i: 101, loss=0.025
Epoch: 49, i: 201, loss=0.013
Epoch: 49, i: 301, loss=0.010
Epoch: 49, i: 401, loss=0.010
Epoch: 49, i: 501, loss=0.012
Epoch: 49, i: 601, loss=0.010
Epoch: 49, i: 701, loss=0.009
Epoch: 49, i: 801, loss=0.011
Epoch: 49, i: 901, loss=0.011
Epoch: 49, i: 1001, loss=0.011
Epoch: 49, i: 1101, loss=0.010
Epoch: 50, i: 1, loss=0.001
Epoch: 50, i: 101, loss=0.020
Epoch: 50, i: 201, loss=0.013
Epoch: 50, i: 301, loss=0.009
Epoch: 50, i: 401, loss=0.008
Epoch: 50, i: 501, loss=0.007
Epoch: 50, i: 601, loss=0.007
Epoch: 50, i: 701, loss=0.006
Epoch: 50, i: 801, loss=0.006
Epoch: 50, i: 901, loss=0.007
Epoch: 50, i: 1001, loss=0.008
Epoch: 50, i: 1101, loss=0.009
Epoch: 51, i: 1, loss=0.000
Epoch: 51, i: 101, loss=0.004
Epoch: 51, i: 201, loss=0.007
Epoch: 51, i: 301, loss=0.009
Epoch: 51, i: 401, loss=0.008
Epoch: 51, i: 501, loss=0.007
Epoch: 51, i: 601, loss=0.007
Epoch: 51, i: 701, loss=0.010
Epoch: 51, i: 801, loss=0.011
Epoch: 51, i: 901, loss=0.010
Epoch: 51, i: 1001, loss=0.010
Epoch: 51, i: 1101, loss=0.009
Epoch: 52, i: 1, loss=0.000
Epoch: 52, i: 101, loss=0.001
Epoch: 52, i: 201, loss=0.001
Epoch: 52, i: 301, loss=0.002
Epoch: 52, i: 401, loss=0.003
Epoch: 52, i: 501, loss=0.002
Epoch: 52, i: 601, loss=0.004
Epoch: 52, i: 701, loss=0.005
Epoch: 52, i: 801, loss=0.008
Epoch: 52, i: 901, loss=0.007
Epoch: 52, i: 1001, loss=0.007
Epoch: 52, i: 1101, loss=0.006
Epoch: 53, i: 1, loss=0.000
Epoch: 53, i: 101, loss=0.004
Epoch: 53, i: 201, loss=0.003
Epoch: 53, i: 301, loss=0.004
Epoch: 53, i: 401, loss=0.003
Epoch: 53, i: 501, loss=0.002
Epoch: 53, i: 601, loss=0.003
Epoch: 53, i: 701, loss=0.006
Epoch: 53, i: 801, loss=0.006
Epoch: 53, i: 901, loss=0.007
Epoch: 53, i: 1001, loss=0.007
Epoch: 53, i: 1101, loss=0.007
Epoch: 54, i: 1, loss=0.058
Epoch: 54, i: 101, loss=0.001
Epoch: 54, i: 201, loss=0.004
Epoch: 54, i: 301, loss=0.004
Epoch: 54, i: 401, loss=0.004
Epoch: 54, i: 501, loss=0.005
Epoch: 54, i: 601, loss=0.007
Epoch: 54, i: 701, loss=0.007
Epoch: 54, i: 801, loss=0.006
Epoch: 54, i: 901, loss=0.006
Epoch: 54, i: 1001, loss=0.006
Epoch: 54, i: 1101, loss=0.006
Epoch: 55, i: 1, loss=0.000
Epoch: 55, i: 101, loss=0.015
Epoch: 55, i: 201, loss=0.011
Epoch: 55, i: 301, loss=0.012
Epoch: 55, i: 401, loss=0.013
Epoch: 55, i: 501, loss=0.011
Epoch: 55, i: 601, loss=0.009
Epoch: 55, i: 701, loss=0.008
Epoch: 55, i: 801, loss=0.008
Epoch: 55, i: 901, loss=0.007
Epoch: 55, i: 1001, loss=0.007
Epoch: 55, i: 1101, loss=0.007
Epoch: 56, i: 1, loss=0.000
Epoch: 56, i: 101, loss=0.002
Epoch: 56, i: 201, loss=0.003
Epoch: 56, i: 301, loss=0.006
Epoch: 56, i: 401, loss=0.005
Epoch: 56, i: 501, loss=0.005
Epoch: 56, i: 601, loss=0.005
Epoch: 56, i: 701, loss=0.006
Epoch: 56, i: 801, loss=0.005
Epoch: 56, i: 901, loss=0.005
Epoch: 56, i: 1001, loss=0.006
Epoch: 56, i: 1101, loss=0.006
Epoch: 57, i: 1, loss=0.000
Epoch: 57, i: 101, loss=0.004
Epoch: 57, i: 201, loss=0.002
Epoch: 57, i: 301, loss=0.006
Epoch: 57, i: 401, loss=0.010
Epoch: 57, i: 501, loss=0.012
Epoch: 57, i: 601, loss=0.011
Epoch: 57, i: 701, loss=0.011
Epoch: 57, i: 801, loss=0.010
Epoch: 57, i: 901, loss=0.010
Epoch: 57, i: 1001, loss=0.009
Epoch: 57, i: 1101, loss=0.010
Epoch: 58, i: 1, loss=0.000
Epoch: 58, i: 101, loss=0.006
Epoch: 58, i: 201, loss=0.009
Epoch: 58, i: 301, loss=0.007
Epoch: 58, i: 401, loss=0.006
Epoch: 58, i: 501, loss=0.009
Epoch: 58, i: 601, loss=0.009
Epoch: 58, i: 701, loss=0.009
Epoch: 58, i: 801, loss=0.008
Epoch: 58, i: 901, loss=0.007
Epoch: 58, i: 1001, loss=0.007
Epoch: 58, i: 1101, loss=0.007
Epoch: 59, i: 1, loss=0.000
Epoch: 59, i: 101, loss=0.018
Epoch: 59, i: 201, loss=0.010
Epoch: 59, i: 301, loss=0.009
Epoch: 59, i: 401, loss=0.010
Epoch: 59, i: 501, loss=0.009
Epoch: 59, i: 601, loss=0.011
Epoch: 59, i: 701, loss=0.009
Epoch: 59, i: 801, loss=0.009
Epoch: 59, i: 901, loss=0.009
Epoch: 59, i: 1001, loss=0.010
Epoch: 59, i: 1101, loss=0.009
Epoch: 60, i: 1, loss=0.001
Epoch: 60, i: 101, loss=0.005
Epoch: 60, i: 201, loss=0.004
Epoch: 60, i: 301, loss=0.007
Epoch: 60, i: 401, loss=0.007
Epoch: 60, i: 501, loss=0.006
Epoch: 60, i: 601, loss=0.006
Epoch: 60, i: 701, loss=0.005
Epoch: 60, i: 801, loss=0.005
Epoch: 60, i: 901, loss=0.005
Epoch: 60, i: 1001, loss=0.006
Epoch: 60, i: 1101, loss=0.007
Epoch: 61, i: 1, loss=0.000
Epoch: 61, i: 101, loss=0.020
Epoch: 61, i: 201, loss=0.017
Epoch: 61, i: 301, loss=0.013
Epoch: 61, i: 401, loss=0.020
Epoch: 61, i: 501, loss=0.018
Epoch: 61, i: 601, loss=0.016
Epoch: 61, i: 701, loss=0.015
Epoch: 61, i: 801, loss=0.014
Epoch: 61, i: 901, loss=0.013
Epoch: 61, i: 1001, loss=0.012
Epoch: 61, i: 1101, loss=0.011
Epoch: 62, i: 1, loss=0.000
Epoch: 62, i: 101, loss=0.003
Epoch: 62, i: 201, loss=0.004
Epoch: 62, i: 301, loss=0.011
Epoch: 62, i: 401, loss=0.010
Epoch: 62, i: 501, loss=0.011
Epoch: 62, i: 601, loss=0.010
Epoch: 62, i: 701, loss=0.015
Epoch: 62, i: 801, loss=0.015
Epoch: 62, i: 901, loss=0.013
Epoch: 62, i: 1001, loss=0.012
Epoch: 62, i: 1101, loss=0.012
Epoch: 63, i: 1, loss=0.042
Epoch: 63, i: 101, loss=0.001
Epoch: 63, i: 201, loss=0.008
Epoch: 63, i: 301, loss=0.008
Epoch: 63, i: 401, loss=0.008
Epoch: 63, i: 501, loss=0.006
Epoch: 63, i: 601, loss=0.006
Epoch: 63, i: 701, loss=0.005
Epoch: 63, i: 801, loss=0.007
Epoch: 63, i: 901, loss=0.007
Epoch: 63, i: 1001, loss=0.008
Epoch: 63, i: 1101, loss=0.008
Epoch: 64, i: 1, loss=0.002
Epoch: 64, i: 101, loss=0.006
Epoch: 64, i: 201, loss=0.006
Epoch: 64, i: 301, loss=0.008
Epoch: 64, i: 401, loss=0.008
Epoch: 64, i: 501, loss=0.007
Epoch: 64, i: 601, loss=0.006
Epoch: 64, i: 701, loss=0.006
Epoch: 64, i: 801, loss=0.006
Epoch: 64, i: 901, loss=0.007
Epoch: 64, i: 1001, loss=0.008
Epoch: 64, i: 1101, loss=0.008
Epoch: 65, i: 1, loss=0.001
Epoch: 65, i: 101, loss=0.003
Epoch: 65, i: 201, loss=0.002
Epoch: 65, i: 301, loss=0.002
Epoch: 65, i: 401, loss=0.002
Epoch: 65, i: 501, loss=0.001
Epoch: 65, i: 601, loss=0.001
Epoch: 65, i: 701, loss=0.002
Epoch: 65, i: 801, loss=0.002
Epoch: 65, i: 901, loss=0.002
Epoch: 65, i: 1001, loss=0.002
Epoch: 65, i: 1101, loss=0.002
Epoch: 66, i: 1, loss=0.000
Epoch: 66, i: 101, loss=0.001
Epoch: 66, i: 201, loss=0.001
Epoch: 66, i: 301, loss=0.001
Epoch: 66, i: 401, loss=0.001
Epoch: 66, i: 501, loss=0.002
Epoch: 66, i: 601, loss=0.001
Epoch: 66, i: 701, loss=0.002
Epoch: 66, i: 801, loss=0.002
Epoch: 66, i: 901, loss=0.002
Epoch: 66, i: 1001, loss=0.003
Epoch: 66, i: 1101, loss=0.004
Epoch: 67, i: 1, loss=0.000
Epoch: 67, i: 101, loss=0.003
Epoch: 67, i: 201, loss=0.002
Epoch: 67, i: 301, loss=0.002
Epoch: 67, i: 401, loss=0.002
Epoch: 67, i: 501, loss=0.007
Epoch: 67, i: 601, loss=0.006
Epoch: 67, i: 701, loss=0.007
Epoch: 67, i: 801, loss=0.008
Epoch: 67, i: 901, loss=0.011
Epoch: 67, i: 1001, loss=0.011
Epoch: 67, i: 1101, loss=0.011
Epoch: 68, i: 1, loss=0.000
Epoch: 68, i: 101, loss=0.001
Epoch: 68, i: 201, loss=0.005
Epoch: 68, i: 301, loss=0.005
Epoch: 68, i: 401, loss=0.004
Epoch: 68, i: 501, loss=0.003
Epoch: 68, i: 601, loss=0.004
Epoch: 68, i: 701, loss=0.005
Epoch: 68, i: 801, loss=0.006
Epoch: 68, i: 901, loss=0.007
Epoch: 68, i: 1001, loss=0.007
Epoch: 68, i: 1101, loss=0.007
Epoch: 69, i: 1, loss=0.000
Epoch: 69, i: 101, loss=0.006
Epoch: 69, i: 201, loss=0.003
Epoch: 69, i: 301, loss=0.002
Epoch: 69, i: 401, loss=0.002
Epoch: 69, i: 501, loss=0.008
Epoch: 69, i: 601, loss=0.007
Epoch: 69, i: 701, loss=0.007
Epoch: 69, i: 801, loss=0.007
Epoch: 69, i: 901, loss=0.007
Epoch: 69, i: 1001, loss=0.006
Epoch: 69, i: 1101, loss=0.007
Epoch: 70, i: 1, loss=0.000
Epoch: 70, i: 101, loss=0.003
Epoch: 70, i: 201, loss=0.004
Epoch: 70, i: 301, loss=0.005
Epoch: 70, i: 401, loss=0.004
Epoch: 70, i: 501, loss=0.005
Epoch: 70, i: 601, loss=0.010
Epoch: 70, i: 701, loss=0.011
Epoch: 70, i: 801, loss=0.010
Epoch: 70, i: 901, loss=0.009
Epoch: 70, i: 1001, loss=0.009
Epoch: 70, i: 1101, loss=0.010
Epoch: 71, i: 1, loss=0.000
Epoch: 71, i: 101, loss=0.007
Epoch: 71, i: 201, loss=0.006
Epoch: 71, i: 301, loss=0.006
Epoch: 71, i: 401, loss=0.009
Epoch: 71, i: 501, loss=0.009
Epoch: 71, i: 601, loss=0.009
Epoch: 71, i: 701, loss=0.010
Epoch: 71, i: 801, loss=0.010
Epoch: 71, i: 901, loss=0.009
Epoch: 71, i: 1001, loss=0.008
Epoch: 71, i: 1101, loss=0.008
Epoch: 72, i: 1, loss=0.000
Epoch: 72, i: 101, loss=0.012
Epoch: 72, i: 201, loss=0.012
Epoch: 72, i: 301, loss=0.009
Epoch: 72, i: 401, loss=0.008
Epoch: 72, i: 501, loss=0.008
Epoch: 72, i: 601, loss=0.008
Epoch: 72, i: 701, loss=0.008
Epoch: 72, i: 801, loss=0.008
Epoch: 72, i: 901, loss=0.009
Epoch: 72, i: 1001, loss=0.008
Epoch: 72, i: 1101, loss=0.008
Epoch: 73, i: 1, loss=0.000
Epoch: 73, i: 101, loss=0.002
Epoch: 73, i: 201, loss=0.002
Epoch: 73, i: 301, loss=0.002
Epoch: 73, i: 401, loss=0.011
Epoch: 73, i: 501, loss=0.010
Epoch: 73, i: 601, loss=0.010
Epoch: 73, i: 701, loss=0.009
Epoch: 73, i: 801, loss=0.009
Epoch: 73, i: 901, loss=0.008
Epoch: 73, i: 1001, loss=0.008
Epoch: 73, i: 1101, loss=0.008
Epoch: 74, i: 1, loss=0.000
Epoch: 74, i: 101, loss=0.001
Epoch: 74, i: 201, loss=0.006
Epoch: 74, i: 301, loss=0.007
Epoch: 74, i: 401, loss=0.007
Epoch: 74, i: 501, loss=0.006
Epoch: 74, i: 601, loss=0.006
Epoch: 74, i: 701, loss=0.007
Epoch: 74, i: 801, loss=0.006
Epoch: 74, i: 901, loss=0.006
Epoch: 74, i: 1001, loss=0.006
Epoch: 74, i: 1101, loss=0.006
Epoch: 75, i: 1, loss=0.000
Epoch: 75, i: 101, loss=0.001
Epoch: 75, i: 201, loss=0.002
Epoch: 75, i: 301, loss=0.002
Epoch: 75, i: 401, loss=0.003
Epoch: 75, i: 501, loss=0.005
Epoch: 75, i: 601, loss=0.004
Epoch: 75, i: 701, loss=0.005
Epoch: 75, i: 801, loss=0.005
Epoch: 75, i: 901, loss=0.004
Epoch: 75, i: 1001, loss=0.004
Epoch: 75, i: 1101, loss=0.005
Epoch: 76, i: 1, loss=0.001
Epoch: 76, i: 101, loss=0.027
Epoch: 76, i: 201, loss=0.022
Epoch: 76, i: 301, loss=0.016
Epoch: 76, i: 401, loss=0.012
Epoch: 76, i: 501, loss=0.010
Epoch: 76, i: 601, loss=0.009
Epoch: 76, i: 701, loss=0.009
Epoch: 76, i: 801, loss=0.009
Epoch: 76, i: 901, loss=0.008
Epoch: 76, i: 1001, loss=0.007
Epoch: 76, i: 1101, loss=0.008
Epoch: 77, i: 1, loss=0.000
Epoch: 77, i: 101, loss=0.022
Epoch: 77, i: 201, loss=0.014
Epoch: 77, i: 301, loss=0.012
Epoch: 77, i: 401, loss=0.011
Epoch: 77, i: 501, loss=0.011
Epoch: 77, i: 601, loss=0.010
Epoch: 77, i: 701, loss=0.009
Epoch: 77, i: 801, loss=0.008
Epoch: 77, i: 901, loss=0.008
Epoch: 77, i: 1001, loss=0.008
Epoch: 77, i: 1101, loss=0.007
Epoch: 78, i: 1, loss=0.000
Epoch: 78, i: 101, loss=0.000
Epoch: 78, i: 201, loss=0.003
Epoch: 78, i: 301, loss=0.002
Epoch: 78, i: 401, loss=0.006
Epoch: 78, i: 501, loss=0.011
Epoch: 78, i: 601, loss=0.009
Epoch: 78, i: 701, loss=0.008
Epoch: 78, i: 801, loss=0.007
Epoch: 78, i: 901, loss=0.011
Epoch: 78, i: 1001, loss=0.010
Epoch: 78, i: 1101, loss=0.010
Epoch: 79, i: 1, loss=0.000
Epoch: 79, i: 101, loss=0.006
Epoch: 79, i: 201, loss=0.003
Epoch: 79, i: 301, loss=0.003
Epoch: 79, i: 401, loss=0.002
Epoch: 79, i: 501, loss=0.003
Epoch: 79, i: 601, loss=0.004
Epoch: 79, i: 701, loss=0.004
Epoch: 79, i: 801, loss=0.006
Epoch: 79, i: 901, loss=0.007
Epoch: 79, i: 1001, loss=0.009
Epoch: 79, i: 1101, loss=0.009
Epoch: 80, i: 1, loss=0.000
Epoch: 80, i: 101, loss=0.005
Epoch: 80, i: 201, loss=0.007
Epoch: 80, i: 301, loss=0.006
Epoch: 80, i: 401, loss=0.006
Epoch: 80, i: 501, loss=0.005
Epoch: 80, i: 601, loss=0.006
Epoch: 80, i: 701, loss=0.006
Epoch: 80, i: 801, loss=0.006
Epoch: 80, i: 901, loss=0.007
Epoch: 80, i: 1001, loss=0.009
Epoch: 80, i: 1101, loss=0.009
Epoch: 81, i: 1, loss=0.000
Epoch: 81, i: 101, loss=0.005
Epoch: 81, i: 201, loss=0.004
Epoch: 81, i: 301, loss=0.003
Epoch: 81, i: 401, loss=0.007
Epoch: 81, i: 501, loss=0.006
Epoch: 81, i: 601, loss=0.007
Epoch: 81, i: 701, loss=0.007
Epoch: 81, i: 801, loss=0.008
Epoch: 81, i: 901, loss=0.007
Epoch: 81, i: 1001, loss=0.006
Epoch: 81, i: 1101, loss=0.006
Epoch: 82, i: 1, loss=0.000
Epoch: 82, i: 101, loss=0.001
Epoch: 82, i: 201, loss=0.002
Epoch: 82, i: 301, loss=0.002
Epoch: 82, i: 401, loss=0.002
Epoch: 82, i: 501, loss=0.002
Epoch: 82, i: 601, loss=0.003
Epoch: 82, i: 701, loss=0.003
Epoch: 82, i: 801, loss=0.004
Epoch: 82, i: 901, loss=0.003
Epoch: 82, i: 1001, loss=0.003
Epoch: 82, i: 1101, loss=0.003
Epoch: 83, i: 1, loss=0.000
Epoch: 83, i: 101, loss=0.007
Epoch: 83, i: 201, loss=0.006
Epoch: 83, i: 301, loss=0.009
Epoch: 83, i: 401, loss=0.008
Epoch: 83, i: 501, loss=0.007
Epoch: 83, i: 601, loss=0.006
Epoch: 83, i: 701, loss=0.007
Epoch: 83, i: 801, loss=0.006
Epoch: 83, i: 901, loss=0.006
Epoch: 83, i: 1001, loss=0.005
Epoch: 83, i: 1101, loss=0.005
Epoch: 84, i: 1, loss=0.000
Epoch: 84, i: 101, loss=0.001
Epoch: 84, i: 201, loss=0.001
Epoch: 84, i: 301, loss=0.001
Epoch: 84, i: 401, loss=0.001
Epoch: 84, i: 501, loss=0.001
Epoch: 84, i: 601, loss=0.001
Epoch: 84, i: 701, loss=0.001
Epoch: 84, i: 801, loss=0.001
Epoch: 84, i: 901, loss=0.001
Epoch: 84, i: 1001, loss=0.001
Epoch: 84, i: 1101, loss=0.002
Epoch: 85, i: 1, loss=0.000
Epoch: 85, i: 101, loss=0.013
Epoch: 85, i: 201, loss=0.008
Epoch: 85, i: 301, loss=0.005
Epoch: 85, i: 401, loss=0.004
Epoch: 85, i: 501, loss=0.007
Epoch: 85, i: 601, loss=0.011
Epoch: 85, i: 701, loss=0.010
Epoch: 85, i: 801, loss=0.009
Epoch: 85, i: 901, loss=0.009
Epoch: 85, i: 1001, loss=0.009
Epoch: 85, i: 1101, loss=0.008
Epoch: 86, i: 1, loss=0.000
Epoch: 86, i: 101, loss=0.019
Epoch: 86, i: 201, loss=0.029
Epoch: 86, i: 301, loss=0.029
Epoch: 86, i: 401, loss=0.024
Epoch: 86, i: 501, loss=0.020
Epoch: 86, i: 601, loss=0.018
Epoch: 86, i: 701, loss=0.016
Epoch: 86, i: 801, loss=0.015
Epoch: 86, i: 901, loss=0.013
Epoch: 86, i: 1001, loss=0.012
Epoch: 86, i: 1101, loss=0.013
Epoch: 87, i: 1, loss=0.000
Epoch: 87, i: 101, loss=0.011
Epoch: 87, i: 201, loss=0.010
Epoch: 87, i: 301, loss=0.011
Epoch: 87, i: 401, loss=0.008
Epoch: 87, i: 501, loss=0.009
Epoch: 87, i: 601, loss=0.010
Epoch: 87, i: 701, loss=0.008
Epoch: 87, i: 801, loss=0.008
Epoch: 87, i: 901, loss=0.007
Epoch: 87, i: 1001, loss=0.007
Epoch: 87, i: 1101, loss=0.008
Epoch: 88, i: 1, loss=0.000
Epoch: 88, i: 101, loss=0.001
Epoch: 88, i: 201, loss=0.001
Epoch: 88, i: 301, loss=0.001
Epoch: 88, i: 401, loss=0.006
Epoch: 88, i: 501, loss=0.011
Epoch: 88, i: 601, loss=0.011
Epoch: 88, i: 701, loss=0.009
Epoch: 88, i: 801, loss=0.008
Epoch: 88, i: 901, loss=0.008
Epoch: 88, i: 1001, loss=0.008
Epoch: 88, i: 1101, loss=0.007
Epoch: 89, i: 1, loss=0.000
Epoch: 89, i: 101, loss=0.001
Epoch: 89, i: 201, loss=0.002
Epoch: 89, i: 301, loss=0.002
Epoch: 89, i: 401, loss=0.002
Epoch: 89, i: 501, loss=0.001
Epoch: 89, i: 601, loss=0.002
Epoch: 89, i: 701, loss=0.002
Epoch: 89, i: 801, loss=0.004
Epoch: 89, i: 901, loss=0.003
Epoch: 89, i: 1001, loss=0.003
Epoch: 89, i: 1101, loss=0.003
Epoch: 90, i: 1, loss=0.005
Epoch: 90, i: 101, loss=0.012
Epoch: 90, i: 201, loss=0.011
Epoch: 90, i: 301, loss=0.009
Epoch: 90, i: 401, loss=0.007
Epoch: 90, i: 501, loss=0.007
Epoch: 90, i: 601, loss=0.008
Epoch: 90, i: 701, loss=0.008
Epoch: 90, i: 801, loss=0.007
Epoch: 90, i: 901, loss=0.008
Epoch: 90, i: 1001, loss=0.007
Epoch: 90, i: 1101, loss=0.009
Epoch: 91, i: 1, loss=0.000
Epoch: 91, i: 101, loss=0.001
Epoch: 91, i: 201, loss=0.005
Epoch: 91, i: 301, loss=0.003
Epoch: 91, i: 401, loss=0.006
Epoch: 91, i: 501, loss=0.005
Epoch: 91, i: 601, loss=0.005
Epoch: 91, i: 701, loss=0.005
Epoch: 91, i: 801, loss=0.005
Epoch: 91, i: 901, loss=0.005
Epoch: 91, i: 1001, loss=0.008
Epoch: 91, i: 1101, loss=0.008
Epoch: 92, i: 1, loss=0.000
Epoch: 92, i: 101, loss=0.008
Epoch: 92, i: 201, loss=0.006
Epoch: 92, i: 301, loss=0.004
Epoch: 92, i: 401, loss=0.006
Epoch: 92, i: 501, loss=0.005
Epoch: 92, i: 601, loss=0.013
Epoch: 92, i: 701, loss=0.016
Epoch: 92, i: 801, loss=0.015
Epoch: 92, i: 901, loss=0.014
Epoch: 92, i: 1001, loss=0.013
Epoch: 92, i: 1101, loss=0.013
Epoch: 93, i: 1, loss=0.000
Epoch: 93, i: 101, loss=0.007
Epoch: 93, i: 201, loss=0.006
Epoch: 93, i: 301, loss=0.005
Epoch: 93, i: 401, loss=0.004
Epoch: 93, i: 501, loss=0.003
Epoch: 93, i: 601, loss=0.003
Epoch: 93, i: 701, loss=0.003
Epoch: 93, i: 801, loss=0.005
Epoch: 93, i: 901, loss=0.008
Epoch: 93, i: 1001, loss=0.008
Epoch: 93, i: 1101, loss=0.008
Epoch: 94, i: 1, loss=0.000
Epoch: 94, i: 101, loss=0.005
Epoch: 94, i: 201, loss=0.008
Epoch: 94, i: 301, loss=0.006
Epoch: 94, i: 401, loss=0.005
Epoch: 94, i: 501, loss=0.005
Epoch: 94, i: 601, loss=0.005
Epoch: 94, i: 701, loss=0.004
Epoch: 94, i: 801, loss=0.005
Epoch: 94, i: 901, loss=0.006
Epoch: 94, i: 1001, loss=0.005
Epoch: 94, i: 1101, loss=0.005
Epoch: 95, i: 1, loss=0.000
Epoch: 95, i: 101, loss=0.014
Epoch: 95, i: 201, loss=0.012
Epoch: 95, i: 301, loss=0.010
Epoch: 95, i: 401, loss=0.010
Epoch: 95, i: 501, loss=0.008
Epoch: 95, i: 601, loss=0.007
Epoch: 95, i: 701, loss=0.007
Epoch: 95, i: 801, loss=0.007
Epoch: 95, i: 901, loss=0.006
Epoch: 95, i: 1001, loss=0.006
Epoch: 95, i: 1101, loss=0.006
Epoch: 96, i: 1, loss=0.000
Epoch: 96, i: 101, loss=0.003
Epoch: 96, i: 201, loss=0.002
Epoch: 96, i: 301, loss=0.002
Epoch: 96, i: 401, loss=0.002
Epoch: 96, i: 501, loss=0.003
Epoch: 96, i: 601, loss=0.005
Epoch: 96, i: 701, loss=0.005
Epoch: 96, i: 801, loss=0.005
Epoch: 96, i: 901, loss=0.004
Epoch: 96, i: 1001, loss=0.004
Epoch: 96, i: 1101, loss=0.004
Epoch: 97, i: 1, loss=0.000
Epoch: 97, i: 101, loss=0.002
Epoch: 97, i: 201, loss=0.002
Epoch: 97, i: 301, loss=0.002
Epoch: 97, i: 401, loss=0.005
Epoch: 97, i: 501, loss=0.005
Epoch: 97, i: 601, loss=0.006
Epoch: 97, i: 701, loss=0.005
Epoch: 97, i: 801, loss=0.006
Epoch: 97, i: 901, loss=0.009
Epoch: 97, i: 1001, loss=0.009
Epoch: 97, i: 1101, loss=0.008
Epoch: 98, i: 1, loss=0.000
Epoch: 98, i: 101, loss=0.003
Epoch: 98, i: 201, loss=0.002
Epoch: 98, i: 301, loss=0.012
Epoch: 98, i: 401, loss=0.011
Epoch: 98, i: 501, loss=0.010
Epoch: 98, i: 601, loss=0.012
Epoch: 98, i: 701, loss=0.011
Epoch: 98, i: 801, loss=0.010
Epoch: 98, i: 901, loss=0.010
Epoch: 98, i: 1001, loss=0.009
Epoch: 98, i: 1101, loss=0.009
Epoch: 99, i: 1, loss=0.000
Epoch: 99, i: 101, loss=0.002
Epoch: 99, i: 201, loss=0.003
Epoch: 99, i: 301, loss=0.005
Epoch: 99, i: 401, loss=0.006
Epoch: 99, i: 501, loss=0.005
Epoch: 99, i: 601, loss=0.005
Epoch: 99, i: 701, loss=0.007
Epoch: 99, i: 801, loss=0.007
Epoch: 99, i: 901, loss=0.006
Epoch: 99, i: 1001, loss=0.005
Epoch: 99, i: 1101, loss=0.006
Epoch: 100, i: 1, loss=0.000
Epoch: 100, i: 101, loss=0.001
Epoch: 100, i: 201, loss=0.001
Epoch: 100, i: 301, loss=0.008
Epoch: 100, i: 401, loss=0.008
Epoch: 100, i: 501, loss=0.006
Epoch: 100, i: 601, loss=0.007
Epoch: 100, i: 701, loss=0.006
Epoch: 100, i: 801, loss=0.005
Epoch: 100, i: 901, loss=0.005
Epoch: 100, i: 1001, loss=0.005
Epoch: 100, i: 1101, loss=0.005
metrics = MetricCollection([
Recall(num_classes=len(labels_path), average='macro'),
Precision(num_classes=len(labels_path), average='macro')
])
with torch.no_grad():
for step, (x, y) in enumerate(test_data_loader):
outputs = model(x)
_, predicted = torch.max(outputs.data, 1)
metrics(predicted, y)
results = metrics.compute()
print("Recall: {}".format(results["Recall"]))
print("Precision: {}".format(results["Precision"]))
print(results)
Accuracy: 0.9899476766586304
Precision: 0.9901649951934814
{'Accuracy': tensor(0.9899), 'Precision': tensor(0.9902)}
class shefine(nn.Module):
def __init__(self, original_model):
super(shefine, self).__init__()
self.conv1 = nn.Conv2d(1, 3, kernel_size=3, stride=1, padding=1, bias=False)
self.features = nn.Sequential(*list(original_model.children())[:-1])
self.classifier = nn.Sequential(nn.Linear(2048, len(labels_path)))
for p in self.features.parameters():
p.requires_grad = False
def forward(self, x):
f = self.conv1(x)
f = self.features(f)
f = f.view(f.size(0), -1)
y = self.classifier(f)
return y
pip install ipywidgets
Collecting ipywidgets
Downloading ipywidgets-7.6.5-py2.py3-none-any.whl (121 kB)
|████████████████████████████████| 121 kB 20.4 MB/s
Requirement already satisfied: ipykernel>=4.5.1 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipywidgets) (5.5.5)
Requirement already satisfied: ipython-genutils~=0.2.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipywidgets) (0.2.0)
Requirement already satisfied: ipython>=4.0.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipywidgets) (7.28.0)
Collecting widgetsnbextension~=3.5.0
Downloading widgetsnbextension-3.5.1-py2.py3-none-any.whl (2.2 MB)
|████████████████████████████████| 2.2 MB 38.8 MB/s
Requirement already satisfied: nbformat>=4.2.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipywidgets) (5.1.3)
Requirement already satisfied: traitlets>=4.3.1 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipywidgets) (4.3.3)
Collecting jupyterlab-widgets>=1.0.0
Downloading jupyterlab_widgets-1.0.2-py3-none-any.whl (243 kB)
|████████████████████████████████| 243 kB 31.1 MB/s
Requirement already satisfied: tornado>=4.2 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipykernel>=4.5.1->ipywidgets) (6.1)
Requirement already satisfied: jupyter-client in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipykernel>=4.5.1->ipywidgets) (6.1.12)
Requirement already satisfied: setuptools>=18.5 in /root/venv/lib/python3.7/site-packages (from ipython>=4.0.0->ipywidgets) (58.1.0)
Requirement already satisfied: pygments in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipython>=4.0.0->ipywidgets) (2.10.0)
Requirement already satisfied: decorator in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipython>=4.0.0->ipywidgets) (5.1.0)
Requirement already satisfied: backcall in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipython>=4.0.0->ipywidgets) (0.2.0)
Requirement already satisfied: matplotlib-inline in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipython>=4.0.0->ipywidgets) (0.1.3)
Requirement already satisfied: pexpect>4.3 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipython>=4.0.0->ipywidgets) (4.8.0)
Requirement already satisfied: jedi>=0.16 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipython>=4.0.0->ipywidgets) (0.17.2)
Requirement already satisfied: prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipython>=4.0.0->ipywidgets) (3.0.20)
Requirement already satisfied: pickleshare in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from ipython>=4.0.0->ipywidgets) (0.7.5)
Requirement already satisfied: parso<0.8.0,>=0.7.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from jedi>=0.16->ipython>=4.0.0->ipywidgets) (0.7.1)
Requirement already satisfied: jsonschema!=2.5.0,>=2.4 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from nbformat>=4.2.0->ipywidgets) (3.2.0)
Requirement already satisfied: jupyter-core in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from nbformat>=4.2.0->ipywidgets) (4.7.1)
Requirement already satisfied: pyrsistent>=0.14.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets) (0.18.0)
Requirement already satisfied: six>=1.11.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets) (1.16.0)
Requirement already satisfied: attrs>=17.4.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets) (21.2.0)
Requirement already satisfied: importlib-metadata in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets) (4.8.1)
Requirement already satisfied: ptyprocess>=0.5 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from pexpect>4.3->ipython>=4.0.0->ipywidgets) (0.7.0)
Requirement already satisfied: wcwidth in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->ipython>=4.0.0->ipywidgets) (0.2.5)
Requirement already satisfied: notebook>=4.4.1 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from widgetsnbextension~=3.5.0->ipywidgets) (6.3.0)
Requirement already satisfied: argon2-cffi in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (21.1.0)
Requirement already satisfied: Send2Trash>=1.5.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (1.8.0)
Requirement already satisfied: jinja2 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (3.0.2)
Requirement already satisfied: nbconvert==6.0.7 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (6.0.7)
Requirement already satisfied: terminado>=0.8.3 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (0.12.1)
Requirement already satisfied: prometheus-client in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (0.11.0)
Requirement already satisfied: pyzmq>=17 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (22.3.0)
Requirement already satisfied: python-dateutil>=2.1 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from jupyter-client->ipykernel>=4.5.1->ipywidgets) (2.8.2)
Requirement already satisfied: nbclient<0.6.0,>=0.5.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from nbconvert==6.0.7->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (0.5.4)
Requirement already satisfied: mistune<2,>=0.8.1 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from nbconvert==6.0.7->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (0.8.4)
Requirement already satisfied: entrypoints>=0.2.2 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from nbconvert==6.0.7->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (0.3)
Requirement already satisfied: pandocfilters>=1.4.1 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from nbconvert==6.0.7->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (1.5.0)
Requirement already satisfied: testpath in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from nbconvert==6.0.7->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (0.5.0)
Requirement already satisfied: bleach in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from nbconvert==6.0.7->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (4.1.0)
Requirement already satisfied: jupyterlab-pygments in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from nbconvert==6.0.7->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (0.1.2)
Requirement already satisfied: defusedxml in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from nbconvert==6.0.7->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (0.7.1)
Requirement already satisfied: MarkupSafe>=2.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from jinja2->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (2.0.1)
Requirement already satisfied: nest-asyncio in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from nbclient<0.6.0,>=0.5.0->nbconvert==6.0.7->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (1.5.1)
Requirement already satisfied: cffi>=1.0.0 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from argon2-cffi->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (1.14.6)
Requirement already satisfied: pycparser in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from cffi>=1.0.0->argon2-cffi->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (2.20)
Requirement already satisfied: packaging in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from bleach->nbconvert==6.0.7->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (21.0)
Requirement already satisfied: webencodings in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from bleach->nbconvert==6.0.7->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (0.5.1)
Requirement already satisfied: typing-extensions>=3.6.4 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from importlib-metadata->jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets) (3.10.0.2)
Requirement already satisfied: zipp>=0.5 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from importlib-metadata->jsonschema!=2.5.0,>=2.4->nbformat>=4.2.0->ipywidgets) (3.6.0)
Requirement already satisfied: pyparsing>=2.0.2 in /shared-libs/python3.7/py-core/lib/python3.7/site-packages (from packaging->bleach->nbconvert==6.0.7->notebook>=4.4.1->widgetsnbextension~=3.5.0->ipywidgets) (2.4.7)
Installing collected packages: widgetsnbextension, jupyterlab-widgets, ipywidgets
Successfully installed ipywidgets-7.6.5 jupyterlab-widgets-1.0.2 widgetsnbextension-3.5.1
WARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv
WARNING: You are using pip version 21.2.4; however, version 21.3 is available.
You should consider upgrading via the '/usr/local/bin/python -m pip install --upgrade pip' command.
Note: you may need to restart the kernel to use updated packages.
original = torchvision.models.resnet50(pretrained=True)
model = shefine(original)
Downloading: "https://download.pytorch.org/models/resnet50-0676ba61.pth" to /root/.cache/torch/hub/checkpoints/resnet50-0676ba61.pth
Execution error
ImportError: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html
optimizer = torch.optim.Adam(model.parameters(), lr=0.003)
entropy = nn.CrossEntropyLoss()
for epoch in range(100):
losses = []
model.train()
for i, (x, y) in enumerate(train_data_loader):
optimizer.zero_grad()
outputs = model(x)
loss = entropy(outputs, y)
loss.backward()
optimizer.step()
losses.append(loss.item())
if i % 100 == 0:
print("Epoch: %d, i: %4d, loss=%.3f" % (epoch + 1, i + 1, np.average(losses)))
train_loss = np.average(losses)
Epoch: 17, i: 901, loss=0.003
Epoch: 17, i: 1001, loss=0.004
Epoch: 17, i: 1101, loss=0.004
Epoch: 18, i: 1, loss=0.018
Epoch: 18, i: 101, loss=0.007
Epoch: 18, i: 201, loss=0.006
Epoch: 18, i: 301, loss=0.004
Epoch: 18, i: 401, loss=0.003
Epoch: 18, i: 501, loss=0.004
Epoch: 18, i: 601, loss=0.003
Epoch: 18, i: 701, loss=0.003
Epoch: 18, i: 801, loss=0.003
Epoch: 18, i: 901, loss=0.002
Epoch: 18, i: 1001, loss=0.002
Epoch: 18, i: 1101, loss=0.005
Epoch: 19, i: 1, loss=0.000
Epoch: 19, i: 101, loss=0.004
Epoch: 19, i: 201, loss=0.004
Epoch: 19, i: 301, loss=0.009
Epoch: 19, i: 401, loss=0.007
Epoch: 19, i: 501, loss=0.007
Epoch: 19, i: 601, loss=0.006
Epoch: 19, i: 701, loss=0.006
Epoch: 19, i: 801, loss=0.006
Epoch: 19, i: 901, loss=0.005
Epoch: 19, i: 1001, loss=0.006
Epoch: 19, i: 1101, loss=0.006
Epoch: 20, i: 1, loss=0.000
Epoch: 20, i: 101, loss=0.004
Epoch: 20, i: 201, loss=0.005
Epoch: 20, i: 301, loss=0.004
Epoch: 20, i: 401, loss=0.003
Epoch: 20, i: 501, loss=0.009
Epoch: 20, i: 601, loss=0.008
Epoch: 20, i: 701, loss=0.008
Epoch: 20, i: 801, loss=0.007
Epoch: 20, i: 901, loss=0.007
Epoch: 20, i: 1001, loss=0.007
Epoch: 20, i: 1101, loss=0.007
Epoch: 21, i: 1, loss=0.000
Epoch: 21, i: 101, loss=0.000
Epoch: 21, i: 201, loss=0.001
Epoch: 21, i: 301, loss=0.002
Epoch: 21, i: 401, loss=0.003
Epoch: 21, i: 501, loss=0.002
Epoch: 21, i: 601, loss=0.002
Epoch: 21, i: 701, loss=0.002
Epoch: 21, i: 801, loss=0.002
Epoch: 21, i: 901, loss=0.002
Epoch: 21, i: 1001, loss=0.001
Epoch: 21, i: 1101, loss=0.002
Epoch: 22, i: 1, loss=0.000
Epoch: 22, i: 101, loss=0.018
Epoch: 22, i: 201, loss=0.013
Epoch: 22, i: 301, loss=0.010
Epoch: 22, i: 401, loss=0.008
Epoch: 22, i: 501, loss=0.006
Epoch: 22, i: 601, loss=0.008
Epoch: 22, i: 701, loss=0.008
Epoch: 22, i: 801, loss=0.007
Epoch: 22, i: 901, loss=0.007
Epoch: 22, i: 1001, loss=0.006
Epoch: 22, i: 1101, loss=0.005
Epoch: 23, i: 1, loss=0.000
Epoch: 23, i: 101, loss=0.039
Epoch: 23, i: 201, loss=0.021
Epoch: 23, i: 301, loss=0.015
Epoch: 23, i: 401, loss=0.011
Epoch: 23, i: 501, loss=0.011
Epoch: 23, i: 601, loss=0.011
Epoch: 23, i: 701, loss=0.010
Epoch: 23, i: 801, loss=0.008
Epoch: 23, i: 901, loss=0.008
Epoch: 23, i: 1001, loss=0.008
Epoch: 23, i: 1101, loss=0.009
Epoch: 24, i: 1, loss=0.000
Epoch: 24, i: 101, loss=0.016
Epoch: 24, i: 201, loss=0.010
Epoch: 24, i: 301, loss=0.009
Epoch: 24, i: 401, loss=0.008
Epoch: 24, i: 501, loss=0.008
Epoch: 24, i: 601, loss=0.007
Epoch: 24, i: 701, loss=0.006
Epoch: 24, i: 801, loss=0.005
Epoch: 24, i: 901, loss=0.005
Epoch: 24, i: 1001, loss=0.004
Epoch: 24, i: 1101, loss=0.004
Epoch: 25, i: 1, loss=0.000
Epoch: 25, i: 101, loss=0.012
Epoch: 25, i: 201, loss=0.006
Epoch: 25, i: 301, loss=0.008
Epoch: 25, i: 401, loss=0.007
Epoch: 25, i: 501, loss=0.009
Epoch: 25, i: 601, loss=0.009
Epoch: 25, i: 701, loss=0.008
Epoch: 25, i: 801, loss=0.007
Epoch: 25, i: 901, loss=0.007
Epoch: 25, i: 1001, loss=0.006
Epoch: 25, i: 1101, loss=0.006
Epoch: 26, i: 1, loss=0.000
Epoch: 26, i: 101, loss=0.001
Epoch: 26, i: 201, loss=0.004
Epoch: 26, i: 301, loss=0.003
Epoch: 26, i: 401, loss=0.002
Epoch: 26, i: 501, loss=0.003
Epoch: 26, i: 601, loss=0.004
Epoch: 26, i: 701, loss=0.004
Epoch: 26, i: 801, loss=0.004
Epoch: 26, i: 901, loss=0.005
Epoch: 26, i: 1001, loss=0.006
Epoch: 26, i: 1101, loss=0.007
Epoch: 27, i: 1, loss=0.000
Epoch: 27, i: 101, loss=0.005
Epoch: 27, i: 201, loss=0.003
Epoch: 27, i: 301, loss=0.002
Epoch: 27, i: 401, loss=0.002
Epoch: 27, i: 501, loss=0.004
Epoch: 27, i: 601, loss=0.004
Epoch: 27, i: 701, loss=0.006
Epoch: 27, i: 801, loss=0.006
Epoch: 27, i: 901, loss=0.010
Epoch: 27, i: 1001, loss=0.010
Epoch: 27, i: 1101, loss=0.009
Epoch: 28, i: 1, loss=0.001
Epoch: 28, i: 101, loss=0.001
Epoch: 28, i: 201, loss=0.008
Epoch: 28, i: 301, loss=0.008
Epoch: 28, i: 401, loss=0.006
Epoch: 28, i: 501, loss=0.006
Epoch: 28, i: 601, loss=0.005
Epoch: 28, i: 701, loss=0.005
Epoch: 28, i: 801, loss=0.004
Epoch: 28, i: 901, loss=0.005
Epoch: 28, i: 1001, loss=0.006
Epoch: 28, i: 1101, loss=0.006
Epoch: 29, i: 1, loss=0.000
Epoch: 29, i: 101, loss=0.013
Epoch: 29, i: 201, loss=0.010
Epoch: 29, i: 301, loss=0.007
Epoch: 29, i: 401, loss=0.005
Epoch: 29, i: 501, loss=0.004
Epoch: 29, i: 601, loss=0.004
Epoch: 29, i: 701, loss=0.004
Epoch: 29, i: 801, loss=0.003
Epoch: 29, i: 901, loss=0.003
Epoch: 29, i: 1001, loss=0.003
Epoch: 29, i: 1101, loss=0.003
Epoch: 30, i: 1, loss=0.020
Epoch: 30, i: 101, loss=0.002
Epoch: 30, i: 201, loss=0.003
Epoch: 30, i: 301, loss=0.003
Epoch: 30, i: 401, loss=0.003
Epoch: 30, i: 501, loss=0.007
Epoch: 30, i: 601, loss=0.007
Epoch: 30, i: 701, loss=0.006
Epoch: 30, i: 801, loss=0.006
Epoch: 30, i: 901, loss=0.005
Epoch: 30, i: 1001, loss=0.005
Epoch: 30, i: 1101, loss=0.005
Epoch: 31, i: 1, loss=0.000
Epoch: 31, i: 101, loss=0.018
Epoch: 31, i: 201, loss=0.010
Epoch: 31, i: 301, loss=0.011
Epoch: 31, i: 401, loss=0.013
Epoch: 31, i: 501, loss=0.011
Epoch: 31, i: 601, loss=0.013
Epoch: 31, i: 701, loss=0.012
Epoch: 31, i: 801, loss=0.012
Epoch: 31, i: 901, loss=0.010
Epoch: 31, i: 1001, loss=0.010
Epoch: 31, i: 1101, loss=0.009
Epoch: 32, i: 1, loss=0.000
Epoch: 32, i: 101, loss=0.008
Epoch: 32, i: 201, loss=0.006
Epoch: 32, i: 301, loss=0.004
Epoch: 32, i: 401, loss=0.004
Epoch: 32, i: 501, loss=0.003
Epoch: 32, i: 601, loss=0.003
Epoch: 32, i: 701, loss=0.003
Epoch: 32, i: 801, loss=0.002
Epoch: 32, i: 901, loss=0.002
Epoch: 32, i: 1001, loss=0.002
Epoch: 32, i: 1101, loss=0.002
Epoch: 33, i: 1, loss=0.000
Epoch: 33, i: 101, loss=0.001
Epoch: 33, i: 201, loss=0.003
Epoch: 33, i: 301, loss=0.004
Epoch: 33, i: 401, loss=0.003
Epoch: 33, i: 501, loss=0.003
Epoch: 33, i: 601, loss=0.002
Epoch: 33, i: 701, loss=0.002
Epoch: 33, i: 801, loss=0.003
Epoch: 33, i: 901, loss=0.003
Epoch: 33, i: 1001, loss=0.005
Epoch: 33, i: 1101, loss=0.005
Epoch: 34, i: 1, loss=0.000
Epoch: 34, i: 101, loss=0.011
Epoch: 34, i: 201, loss=0.007
Epoch: 34, i: 301, loss=0.005
Epoch: 34, i: 401, loss=0.006
Epoch: 34, i: 501, loss=0.008
Epoch: 34, i: 601, loss=0.007
Epoch: 34, i: 701, loss=0.007
Epoch: 34, i: 801, loss=0.007
Epoch: 34, i: 901, loss=0.009
Epoch: 34, i: 1001, loss=0.009
Epoch: 34, i: 1101, loss=0.009
Epoch: 35, i: 1, loss=0.000
Epoch: 35, i: 101, loss=0.008
Epoch: 35, i: 201, loss=0.010
Epoch: 35, i: 301, loss=0.009
Epoch: 35, i: 401, loss=0.007
Epoch: 35, i: 501, loss=0.006
Epoch: 35, i: 601, loss=0.005
Epoch: 35, i: 701, loss=0.005
Epoch: 35, i: 801, loss=0.005
Epoch: 35, i: 901, loss=0.005
Epoch: 35, i: 1001, loss=0.005
Epoch: 35, i: 1101, loss=0.004
Epoch: 36, i: 1, loss=0.000
Epoch: 36, i: 101, loss=0.000
Epoch: 36, i: 201, loss=0.000
Epoch: 36, i: 301, loss=0.000
Epoch: 36, i: 401, loss=0.001
Epoch: 36, i: 501, loss=0.004
Epoch: 36, i: 601, loss=0.004
Epoch: 36, i: 701, loss=0.008
Epoch: 36, i: 801, loss=0.007
Epoch: 36, i: 901, loss=0.006
Epoch: 36, i: 1001, loss=0.006
Epoch: 36, i: 1101, loss=0.005
Epoch: 37, i: 1, loss=0.000
Epoch: 37, i: 101, loss=0.015
Epoch: 37, i: 201, loss=0.010
Epoch: 37, i: 301, loss=0.007
Epoch: 37, i: 401, loss=0.006
Epoch: 37, i: 501, loss=0.005
Epoch: 37, i: 601, loss=0.004
Epoch: 37, i: 701, loss=0.004
Epoch: 37, i: 801, loss=0.003
Epoch: 37, i: 901, loss=0.006
Epoch: 37, i: 1001, loss=0.006
Epoch: 37, i: 1101, loss=0.006
Epoch: 38, i: 1, loss=0.000
Epoch: 38, i: 101, loss=0.002
Epoch: 38, i: 201, loss=0.002
Epoch: 38, i: 301, loss=0.002
Epoch: 38, i: 401, loss=0.003
Epoch: 38, i: 501, loss=0.007
Epoch: 38, i: 601, loss=0.006
Epoch: 38, i: 701, loss=0.005
Epoch: 38, i: 801, loss=0.006
Epoch: 38, i: 901, loss=0.007
Epoch: 38, i: 1001, loss=0.007
Epoch: 38, i: 1101, loss=0.006
Epoch: 39, i: 1, loss=0.000
Epoch: 39, i: 101, loss=0.008
Epoch: 39, i: 201, loss=0.007
Epoch: 39, i: 301, loss=0.006
Epoch: 39, i: 401, loss=0.005
Epoch: 39, i: 501, loss=0.004
Epoch: 39, i: 601, loss=0.005
Epoch: 39, i: 701, loss=0.005
Epoch: 39, i: 801, loss=0.004
Epoch: 39, i: 901, loss=0.004
Epoch: 39, i: 1001, loss=0.004
Epoch: 39, i: 1101, loss=0.003
Epoch: 40, i: 1, loss=0.000
Epoch: 40, i: 101, loss=0.002
Epoch: 40, i: 201, loss=0.002
Epoch: 40, i: 301, loss=0.002
Epoch: 40, i: 401, loss=0.002
Epoch: 40, i: 501, loss=0.002
Epoch: 40, i: 601, loss=0.002
Epoch: 40, i: 701, loss=0.002
Epoch: 40, i: 801, loss=0.002
Epoch: 40, i: 901, loss=0.002
Epoch: 40, i: 1001, loss=0.002
Epoch: 40, i: 1101, loss=0.002
Epoch: 41, i: 1, loss=0.000
Epoch: 41, i: 101, loss=0.011
Epoch: 41, i: 201, loss=0.017
Epoch: 41, i: 301, loss=0.024
Epoch: 41, i: 401, loss=0.018
Epoch: 41, i: 501, loss=0.015
Epoch: 41, i: 601, loss=0.013
Epoch: 41, i: 701, loss=0.012
Epoch: 41, i: 801, loss=0.010
Epoch: 41, i: 901, loss=0.010
Epoch: 41, i: 1001, loss=0.011
Epoch: 41, i: 1101, loss=0.010
Epoch: 42, i: 1, loss=0.000
Epoch: 42, i: 101, loss=0.000
Epoch: 42, i: 201, loss=0.001
Epoch: 42, i: 301, loss=0.001
Epoch: 42, i: 401, loss=0.001
Epoch: 42, i: 501, loss=0.001
Epoch: 42, i: 601, loss=0.001
Epoch: 42, i: 701, loss=0.000
Epoch: 42, i: 801, loss=0.000
Epoch: 42, i: 901, loss=0.000
Epoch: 42, i: 1001, loss=0.000
Epoch: 42, i: 1101, loss=0.000
Epoch: 43, i: 1, loss=0.000
Epoch: 43, i: 101, loss=0.001
Epoch: 43, i: 201, loss=0.004
Epoch: 43, i: 301, loss=0.003
Epoch: 43, i: 401, loss=0.011
Epoch: 43, i: 501, loss=0.012
Epoch: 43, i: 601, loss=0.010
Epoch: 43, i: 701, loss=0.009
Epoch: 43, i: 801, loss=0.008
Epoch: 43, i: 901, loss=0.007
Epoch: 43, i: 1001, loss=0.007
Epoch: 43, i: 1101, loss=0.006
Epoch: 44, i: 1, loss=0.000
Epoch: 44, i: 101, loss=0.000
Epoch: 44, i: 201, loss=0.000
Epoch: 44, i: 301, loss=0.005
Epoch: 44, i: 401, loss=0.006
Epoch: 44, i: 501, loss=0.008
Epoch: 44, i: 601, loss=0.007
Epoch: 44, i: 701, loss=0.007
Epoch: 44, i: 801, loss=0.006
Epoch: 44, i: 901, loss=0.006
Epoch: 44, i: 1001, loss=0.005
Epoch: 44, i: 1101, loss=0.005
Epoch: 45, i: 1, loss=0.000
Epoch: 45, i: 101, loss=0.022
Epoch: 45, i: 201, loss=0.017
Epoch: 45, i: 301, loss=0.015
Epoch: 45, i: 401, loss=0.013
Epoch: 45, i: 501, loss=0.010
Epoch: 45, i: 601, loss=0.010
Epoch: 45, i: 701, loss=0.010
Epoch: 45, i: 801, loss=0.010
Epoch: 45, i: 901, loss=0.010
Epoch: 45, i: 1001, loss=0.010
Epoch: 45, i: 1101, loss=0.009
Epoch: 46, i: 1, loss=0.000
Epoch: 46, i: 101, loss=0.002
Epoch: 46, i: 201, loss=0.001
Epoch: 46, i: 301, loss=0.001
Epoch: 46, i: 401, loss=0.002
Epoch: 46, i: 501, loss=0.002
Epoch: 46, i: 601, loss=0.003
Epoch: 46, i: 701, loss=0.004
Epoch: 46, i: 801, loss=0.004
Epoch: 46, i: 901, loss=0.005
Epoch: 46, i: 1001, loss=0.006
Epoch: 46, i: 1101, loss=0.005
Epoch: 47, i: 1, loss=0.000
Epoch: 47, i: 101, loss=0.018
Epoch: 47, i: 201, loss=0.009
Epoch: 47, i: 301, loss=0.007
Epoch: 47, i: 401, loss=0.006
Epoch: 47, i: 501, loss=0.005
Epoch: 47, i: 601, loss=0.004
Epoch: 47, i: 701, loss=0.005
Epoch: 47, i: 801, loss=0.005
Epoch: 47, i: 901, loss=0.008
Epoch: 47, i: 1001, loss=0.009
Epoch: 47, i: 1101, loss=0.009
Epoch: 48, i: 1, loss=0.000
Epoch: 48, i: 101, loss=0.012
Epoch: 48, i: 201, loss=0.009
Epoch: 48, i: 301, loss=0.006
Epoch: 48, i: 401, loss=0.005
Epoch: 48, i: 501, loss=0.004
Epoch: 48, i: 601, loss=0.006
Epoch: 48, i: 701, loss=0.009
Epoch: 48, i: 801, loss=0.008
Epoch: 48, i: 901, loss=0.008
Epoch: 48, i: 1001, loss=0.008
Epoch: 48, i: 1101, loss=0.007
Epoch: 49, i: 1, loss=0.000
Epoch: 49, i: 101, loss=0.010
Epoch: 49, i: 201, loss=0.006
Epoch: 49, i: 301, loss=0.004
Epoch: 49, i: 401, loss=0.003
Epoch: 49, i: 501, loss=0.003
Epoch: 49, i: 601, loss=0.002
Epoch: 49, i: 701, loss=0.002
Epoch: 49, i: 801, loss=0.002
Epoch: 49, i: 901, loss=0.002
Epoch: 49, i: 1001, loss=0.002
Epoch: 49, i: 1101, loss=0.002
Epoch: 50, i: 1, loss=0.000
Epoch: 50, i: 101, loss=0.006
Epoch: 50, i: 201, loss=0.003
Epoch: 50, i: 301, loss=0.004
Epoch: 50, i: 401, loss=0.010
Epoch: 50, i: 501, loss=0.008
Epoch: 50, i: 601, loss=0.007
Epoch: 50, i: 701, loss=0.006
Epoch: 50, i: 801, loss=0.005
Epoch: 50, i: 901, loss=0.006
Epoch: 50, i: 1001, loss=0.006
Epoch: 50, i: 1101, loss=0.006
Epoch: 51, i: 1, loss=0.000
Epoch: 51, i: 101, loss=0.001
Epoch: 51, i: 201, loss=0.001
Epoch: 51, i: 301, loss=0.001
Epoch: 51, i: 401, loss=0.001
Epoch: 51, i: 501, loss=0.001
Epoch: 51, i: 601, loss=0.001
Epoch: 51, i: 701, loss=0.002
Epoch: 51, i: 801, loss=0.001
Epoch: 51, i: 901, loss=0.001
Epoch: 51, i: 1001, loss=0.001
Epoch: 51, i: 1101, loss=0.001
Epoch: 52, i: 1, loss=0.000
Epoch: 52, i: 101, loss=0.015
Epoch: 52, i: 201, loss=0.009
Epoch: 52, i: 301, loss=0.006
Epoch: 52, i: 401, loss=0.004
Epoch: 52, i: 501, loss=0.004
Epoch: 52, i: 601, loss=0.010
Epoch: 52, i: 701, loss=0.009
Epoch: 52, i: 801, loss=0.009
Epoch: 52, i: 901, loss=0.010
Epoch: 52, i: 1001, loss=0.009
Epoch: 52, i: 1101, loss=0.008
Epoch: 53, i: 1, loss=0.000
Epoch: 53, i: 101, loss=0.009
Epoch: 53, i: 201, loss=0.007
Epoch: 53, i: 301, loss=0.008
Epoch: 53, i: 401, loss=0.012
Epoch: 53, i: 501, loss=0.010
Epoch: 53, i: 601, loss=0.008
Epoch: 53, i: 701, loss=0.007
Epoch: 53, i: 801, loss=0.006
Epoch: 53, i: 901, loss=0.006
Epoch: 53, i: 1001, loss=0.005
Epoch: 53, i: 1101, loss=0.005
Epoch: 54, i: 1, loss=0.000
Epoch: 54, i: 101, loss=0.001
Epoch: 54, i: 201, loss=0.015
Epoch: 54, i: 301, loss=0.015
Epoch: 54, i: 401, loss=0.013
Epoch: 54, i: 501, loss=0.011
Epoch: 54, i: 601, loss=0.009
Epoch: 54, i: 701, loss=0.010
Epoch: 54, i: 801, loss=0.009
Epoch: 54, i: 901, loss=0.008
Epoch: 54, i: 1001, loss=0.008
Epoch: 54, i: 1101, loss=0.008
Epoch: 55, i: 1, loss=0.000
Epoch: 55, i: 101, loss=0.003
Epoch: 55, i: 201, loss=0.006
Epoch: 55, i: 301, loss=0.005
Epoch: 55, i: 401, loss=0.003
Epoch: 55, i: 501, loss=0.004
Epoch: 55, i: 601, loss=0.003
Epoch: 55, i: 701, loss=0.003
Epoch: 55, i: 801, loss=0.003
Epoch: 55, i: 901, loss=0.003
Epoch: 55, i: 1001, loss=0.003
Epoch: 55, i: 1101, loss=0.004
Epoch: 56, i: 1, loss=0.275
Epoch: 56, i: 101, loss=0.023
Epoch: 56, i: 201, loss=0.012
Epoch: 56, i: 301, loss=0.008
Epoch: 56, i: 401, loss=0.006
Epoch: 56, i: 501, loss=0.006
Epoch: 56, i: 601, loss=0.006
Epoch: 56, i: 701, loss=0.006
Epoch: 56, i: 801, loss=0.007
Epoch: 56, i: 901, loss=0.006
Epoch: 56, i: 1001, loss=0.006
Epoch: 56, i: 1101, loss=0.006
Epoch: 57, i: 1, loss=0.000
Epoch: 57, i: 101, loss=0.008
Epoch: 57, i: 201, loss=0.010
Epoch: 57, i: 301, loss=0.007
Epoch: 57, i: 401, loss=0.011
Epoch: 57, i: 501, loss=0.009
Epoch: 57, i: 601, loss=0.010
Epoch: 57, i: 701, loss=0.009
Epoch: 57, i: 801, loss=0.008
Epoch: 57, i: 901, loss=0.008
Epoch: 57, i: 1001, loss=0.009
Epoch: 57, i: 1101, loss=0.008
Epoch: 58, i: 1, loss=0.000
Epoch: 58, i: 101, loss=0.000
Epoch: 58, i: 201, loss=0.000
Epoch: 58, i: 301, loss=0.007
Epoch: 58, i: 401, loss=0.006
Epoch: 58, i: 501, loss=0.005
Epoch: 58, i: 601, loss=0.009
Epoch: 58, i: 701, loss=0.008
Epoch: 58, i: 801, loss=0.007
Epoch: 58, i: 901, loss=0.006
Epoch: 58, i: 1001, loss=0.007
Epoch: 58, i: 1101, loss=0.007
Epoch: 59, i: 1, loss=0.000
Epoch: 59, i: 101, loss=0.000
Epoch: 59, i: 201, loss=0.004
Epoch: 59, i: 301, loss=0.003
Epoch: 59, i: 401, loss=0.003
Epoch: 59, i: 501, loss=0.002
Epoch: 59, i: 601, loss=0.003
Epoch: 59, i: 701, loss=0.003
Epoch: 59, i: 801, loss=0.004
Epoch: 59, i: 901, loss=0.004
Epoch: 59, i: 1001, loss=0.004
Epoch: 59, i: 1101, loss=0.003
Epoch: 60, i: 1, loss=0.000
Epoch: 60, i: 101, loss=0.001
Epoch: 60, i: 201, loss=0.001
Epoch: 60, i: 301, loss=0.001
Epoch: 60, i: 401, loss=0.008
Epoch: 60, i: 501, loss=0.009
Epoch: 60, i: 601, loss=0.013
Epoch: 60, i: 701, loss=0.012
Epoch: 60, i: 801, loss=0.010
Epoch: 60, i: 901, loss=0.009
Epoch: 60, i: 1001, loss=0.009
Epoch: 60, i: 1101, loss=0.010
Epoch: 61, i: 1, loss=0.000
Epoch: 61, i: 101, loss=0.005
Epoch: 61, i: 201, loss=0.006
Epoch: 61, i: 301, loss=0.004
Epoch: 61, i: 401, loss=0.003
Epoch: 61, i: 501, loss=0.003
Epoch: 61, i: 601, loss=0.004
Epoch: 61, i: 701, loss=0.004
Epoch: 61, i: 801, loss=0.003
Epoch: 61, i: 901, loss=0.003
Epoch: 61, i: 1001, loss=0.003
Epoch: 61, i: 1101, loss=0.003
Epoch: 62, i: 1, loss=0.000
Epoch: 62, i: 101, loss=0.002
Epoch: 62, i: 201, loss=0.002
Epoch: 62, i: 301, loss=0.006
Epoch: 62, i: 401, loss=0.005
Epoch: 62, i: 501, loss=0.004
Epoch: 62, i: 601, loss=0.004
Epoch: 62, i: 701, loss=0.004
Epoch: 62, i: 801, loss=0.003
Epoch: 62, i: 901, loss=0.003
Epoch: 62, i: 1001, loss=0.008
Epoch: 62, i: 1101, loss=0.007
Epoch: 63, i: 1, loss=0.000
Epoch: 63, i: 101, loss=0.005
Epoch: 63, i: 201, loss=0.007
Epoch: 63, i: 301, loss=0.007
Epoch: 63, i: 401, loss=0.008
Epoch: 63, i: 501, loss=0.007
Epoch: 63, i: 601, loss=0.005
Epoch: 63, i: 701, loss=0.005
Epoch: 63, i: 801, loss=0.004
Epoch: 63, i: 901, loss=0.004
Epoch: 63, i: 1001, loss=0.004
Epoch: 63, i: 1101, loss=0.003
Epoch: 64, i: 1, loss=0.000
Epoch: 64, i: 101, loss=0.008
Epoch: 64, i: 201, loss=0.008
Epoch: 64, i: 301, loss=0.006
Epoch: 64, i: 401, loss=0.011
Epoch: 64, i: 501, loss=0.009
Epoch: 64, i: 601, loss=0.008
Epoch: 64, i: 701, loss=0.007
Epoch: 64, i: 801, loss=0.007
Epoch: 64, i: 901, loss=0.009
Epoch: 64, i: 1001, loss=0.008
Epoch: 64, i: 1101, loss=0.009
Epoch: 65, i: 1, loss=0.000
Epoch: 65, i: 101, loss=0.001
Epoch: 65, i: 201, loss=0.001
Epoch: 65, i: 301, loss=0.001
Epoch: 65, i: 401, loss=0.001
Epoch: 65, i: 501, loss=0.001
Epoch: 65, i: 601, loss=0.004
Epoch: 65, i: 701, loss=0.004
Epoch: 65, i: 801, loss=0.004
Epoch: 65, i: 901, loss=0.003
Epoch: 65, i: 1001, loss=0.003
Epoch: 65, i: 1101, loss=0.003
Epoch: 66, i: 1, loss=0.000
Epoch: 66, i: 101, loss=0.024
Epoch: 66, i: 201, loss=0.012
Epoch: 66, i: 301, loss=0.008
Epoch: 66, i: 401, loss=0.006
Epoch: 66, i: 501, loss=0.005
Epoch: 66, i: 601, loss=0.005
Epoch: 66, i: 701, loss=0.004
Epoch: 66, i: 801, loss=0.005
Epoch: 66, i: 901, loss=0.008
Epoch: 66, i: 1001, loss=0.007
Epoch: 66, i: 1101, loss=0.008
Epoch: 67, i: 1, loss=0.000
Epoch: 67, i: 101, loss=0.027
Epoch: 67, i: 201, loss=0.014
Epoch: 67, i: 301, loss=0.018
Epoch: 67, i: 401, loss=0.015
Epoch: 67, i: 501, loss=0.013
Epoch: 67, i: 601, loss=0.012
Epoch: 67, i: 701, loss=0.010
Epoch: 67, i: 801, loss=0.009
Epoch: 67, i: 901, loss=0.009
Epoch: 67, i: 1001, loss=0.008
Epoch: 67, i: 1101, loss=0.009
Epoch: 68, i: 1, loss=0.000
Epoch: 68, i: 101, loss=0.007
Epoch: 68, i: 201, loss=0.004
Epoch: 68, i: 301, loss=0.005
Epoch: 68, i: 401, loss=0.007
Epoch: 68, i: 501, loss=0.006
Epoch: 68, i: 601, loss=0.007
Epoch: 68, i: 701, loss=0.006
Epoch: 68, i: 801, loss=0.005
Epoch: 68, i: 901, loss=0.004
Epoch: 68, i: 1001, loss=0.005
Epoch: 68, i: 1101, loss=0.004
Epoch: 69, i: 1, loss=0.000
Epoch: 69, i: 101, loss=0.009
Epoch: 69, i: 201, loss=0.009
Epoch: 69, i: 301, loss=0.006
Epoch: 69, i: 401, loss=0.005
Epoch: 69, i: 501, loss=0.005
Epoch: 69, i: 601, loss=0.005
Epoch: 69, i: 701, loss=0.005
Epoch: 69, i: 801, loss=0.004
Epoch: 69, i: 901, loss=0.005
Epoch: 69, i: 1001, loss=0.004
Epoch: 69, i: 1101, loss=0.004
Epoch: 70, i: 1, loss=0.000
Epoch: 70, i: 101, loss=0.007
Epoch: 70, i: 201, loss=0.004
Epoch: 70, i: 301, loss=0.003
Epoch: 70, i: 401, loss=0.002
Epoch: 70, i: 501, loss=0.002
Epoch: 70, i: 601, loss=0.002
Epoch: 70, i: 701, loss=0.001
Epoch: 70, i: 801, loss=0.001
Epoch: 70, i: 901, loss=0.002
Epoch: 70, i: 1001, loss=0.002
Epoch: 70, i: 1101, loss=0.004
Epoch: 71, i: 1, loss=0.000
Epoch: 71, i: 101, loss=0.000
Epoch: 71, i: 201, loss=0.001
Epoch: 71, i: 301, loss=0.003
Epoch: 71, i: 401, loss=0.002
Epoch: 71, i: 501, loss=0.002
Epoch: 71, i: 601, loss=0.005
Epoch: 71, i: 701, loss=0.006
Epoch: 71, i: 801, loss=0.005
Epoch: 71, i: 901, loss=0.007
Epoch: 71, i: 1001, loss=0.007
Epoch: 71, i: 1101, loss=0.007
Epoch: 72, i: 1, loss=0.000
Epoch: 72, i: 101, loss=0.002
Epoch: 72, i: 201, loss=0.001
Epoch: 72, i: 301, loss=0.002
Epoch: 72, i: 401, loss=0.002
Epoch: 72, i: 501, loss=0.002
Epoch: 72, i: 601, loss=0.002
Epoch: 72, i: 701, loss=0.002
Epoch: 72, i: 801, loss=0.002
Epoch: 72, i: 901, loss=0.003
Epoch: 72, i: 1001, loss=0.004
Epoch: 72, i: 1101, loss=0.003
Epoch: 73, i: 1, loss=0.000
Epoch: 73, i: 101, loss=0.000
Epoch: 73, i: 201, loss=0.000
Epoch: 73, i: 301, loss=0.007
Epoch: 73, i: 401, loss=0.005
Epoch: 73, i: 501, loss=0.004
Epoch: 73, i: 601, loss=0.004
Epoch: 73, i: 701, loss=0.003
Epoch: 73, i: 801, loss=0.004
Epoch: 73, i: 901, loss=0.003
Epoch: 73, i: 1001, loss=0.003
Epoch: 73, i: 1101, loss=0.003
Epoch: 74, i: 1, loss=0.000
Epoch: 74, i: 101, loss=0.000
Epoch: 74, i: 201, loss=0.000
Epoch: 74, i: 301, loss=0.001
Epoch: 74, i: 401, loss=0.001
Epoch: 74, i: 501, loss=0.001
Epoch: 74, i: 601, loss=0.003
Epoch: 74, i: 701, loss=0.003
Epoch: 74, i: 801, loss=0.007
Epoch: 74, i: 901, loss=0.006
Epoch: 74, i: 1001, loss=0.009
Epoch: 74, i: 1101, loss=0.008
Epoch: 75, i: 1, loss=0.000
Epoch: 75, i: 101, loss=0.030
Epoch: 75, i: 201, loss=0.016
Epoch: 75, i: 301, loss=0.012
Epoch: 75, i: 401, loss=0.009
Epoch: 75, i: 501, loss=0.008
Epoch: 75, i: 601, loss=0.007
Epoch: 75, i: 701, loss=0.006
Epoch: 75, i: 801, loss=0.007
Epoch: 75, i: 901, loss=0.007
Epoch: 75, i: 1001, loss=0.007
Epoch: 75, i: 1101, loss=0.006
Epoch: 76, i: 1, loss=0.000
Epoch: 76, i: 101, loss=0.000
Epoch: 76, i: 201, loss=0.000
Epoch: 76, i: 301, loss=0.001
Epoch: 76, i: 401, loss=0.000
Epoch: 76, i: 501, loss=0.000
Epoch: 76, i: 601, loss=0.000
Epoch: 76, i: 701, loss=0.003
Epoch: 76, i: 801, loss=0.003
Epoch: 76, i: 901, loss=0.004
Epoch: 76, i: 1001, loss=0.005
Epoch: 76, i: 1101, loss=0.005
Epoch: 77, i: 1, loss=0.000
Epoch: 77, i: 101, loss=0.001
Epoch: 77, i: 201, loss=0.001
Epoch: 77, i: 301, loss=0.000
Epoch: 77, i: 401, loss=0.002
Epoch: 77, i: 501, loss=0.002
Epoch: 77, i: 601, loss=0.003
Epoch: 77, i: 701, loss=0.007
Epoch: 77, i: 801, loss=0.007
Epoch: 77, i: 901, loss=0.007
Epoch: 77, i: 1001, loss=0.007
Epoch: 77, i: 1101, loss=0.007
Epoch: 78, i: 1, loss=0.000
Epoch: 78, i: 101, loss=0.002
Epoch: 78, i: 201, loss=0.003
Epoch: 78, i: 301, loss=0.013
Epoch: 78, i: 401, loss=0.010
Epoch: 78, i: 501, loss=0.008
Epoch: 78, i: 601, loss=0.007
Epoch: 78, i: 701, loss=0.008
Epoch: 78, i: 801, loss=0.009
Epoch: 78, i: 901, loss=0.008
Epoch: 78, i: 1001, loss=0.007
Epoch: 78, i: 1101, loss=0.007
Epoch: 79, i: 1, loss=0.000
Epoch: 79, i: 101, loss=0.019
Epoch: 79, i: 201, loss=0.010
Epoch: 79, i: 301, loss=0.007
Epoch: 79, i: 401, loss=0.006
Epoch: 79, i: 501, loss=0.004
Epoch: 79, i: 601, loss=0.007
Epoch: 79, i: 701, loss=0.006
Epoch: 79, i: 801, loss=0.006
Epoch: 79, i: 901, loss=0.005
Epoch: 79, i: 1001, loss=0.005
Epoch: 79, i: 1101, loss=0.006
Epoch: 80, i: 1, loss=0.000
Epoch: 80, i: 101, loss=0.001
Epoch: 80, i: 201, loss=0.001
Epoch: 80, i: 301, loss=0.006
Epoch: 80, i: 401, loss=0.005
Epoch: 80, i: 501, loss=0.004
Epoch: 80, i: 601, loss=0.003
Epoch: 80, i: 701, loss=0.003
Epoch: 80, i: 801, loss=0.004
Epoch: 80, i: 901, loss=0.004
Epoch: 80, i: 1001, loss=0.005
Epoch: 80, i: 1101, loss=0.006
Epoch: 81, i: 1, loss=0.000
Epoch: 81, i: 101, loss=0.001
Epoch: 81, i: 201, loss=0.001
Epoch: 81, i: 301, loss=0.002
Epoch: 81, i: 401, loss=0.001
Epoch: 81, i: 501, loss=0.002
Epoch: 81, i: 601, loss=0.003
Epoch: 81, i: 701, loss=0.007
Epoch: 81, i: 801, loss=0.006
Epoch: 81, i: 901, loss=0.006
Epoch: 81, i: 1001, loss=0.005
Epoch: 81, i: 1101, loss=0.008
Epoch: 82, i: 1, loss=0.000
Epoch: 82, i: 101, loss=0.002
Epoch: 82, i: 201, loss=0.001
Epoch: 82, i: 301, loss=0.006
Epoch: 82, i: 401, loss=0.008
Epoch: 82, i: 501, loss=0.007
Epoch: 82, i: 601, loss=0.006
Epoch: 82, i: 701, loss=0.006
Epoch: 82, i: 801, loss=0.007
Epoch: 82, i: 901, loss=0.008
Epoch: 82, i: 1001, loss=0.009
Epoch: 82, i: 1101, loss=0.009
Epoch: 83, i: 1, loss=0.000
Epoch: 83, i: 101, loss=0.002
Epoch: 83, i: 201, loss=0.001
Epoch: 83, i: 301, loss=0.001
Epoch: 83, i: 401, loss=0.001
Epoch: 83, i: 501, loss=0.003
Epoch: 83, i: 601, loss=0.005
Epoch: 83, i: 701, loss=0.005
Epoch: 83, i: 801, loss=0.009
Epoch: 83, i: 901, loss=0.010
Epoch: 83, i: 1001, loss=0.010
Epoch: 83, i: 1101, loss=0.009
Epoch: 84, i: 1, loss=0.000
Epoch: 84, i: 101, loss=0.007
Epoch: 84, i: 201, loss=0.004
Epoch: 84, i: 301, loss=0.003
Epoch: 84, i: 401, loss=0.004
Epoch: 84, i: 501, loss=0.003
Epoch: 84, i: 601, loss=0.003
Epoch: 84, i: 701, loss=0.004
Epoch: 84, i: 801, loss=0.003
Epoch: 84, i: 901, loss=0.003
Epoch: 84, i: 1001, loss=0.003
Epoch: 84, i: 1101, loss=0.003
Epoch: 85, i: 1, loss=0.000
Epoch: 85, i: 101, loss=0.003
Epoch: 85, i: 201, loss=0.006
Epoch: 85, i: 301, loss=0.005
Epoch: 85, i: 401, loss=0.006
Epoch: 85, i: 501, loss=0.005
Epoch: 85, i: 601, loss=0.006
Epoch: 85, i: 701, loss=0.006
Epoch: 85, i: 801, loss=0.007
Epoch: 85, i: 901, loss=0.006
Epoch: 85, i: 1001, loss=0.006
Epoch: 85, i: 1101, loss=0.006
Epoch: 86, i: 1, loss=0.000
Epoch: 86, i: 101, loss=0.004
Epoch: 86, i: 201, loss=0.002
Epoch: 86, i: 301, loss=0.002
Epoch: 86, i: 401, loss=0.002
Epoch: 86, i: 501, loss=0.002
Epoch: 86, i: 601, loss=0.002
Epoch: 86, i: 701, loss=0.002
Epoch: 86, i: 801, loss=0.001
Epoch: 86, i: 901, loss=0.002
Epoch: 86, i: 1001, loss=0.003
Epoch: 86, i: 1101, loss=0.003
Epoch: 87, i: 1, loss=0.000
Epoch: 87, i: 101, loss=0.001
Epoch: 87, i: 201, loss=0.011
Epoch: 87, i: 301, loss=0.008
Epoch: 87, i: 401, loss=0.008
Epoch: 87, i: 501, loss=0.008
Epoch: 87, i: 601, loss=0.010
Epoch: 87, i: 701, loss=0.010
Epoch: 87, i: 801, loss=0.009
Epoch: 87, i: 901, loss=0.008
Epoch: 87, i: 1001, loss=0.008
Epoch: 87, i: 1101, loss=0.007
Epoch: 88, i: 1, loss=0.000
Epoch: 88, i: 101, loss=0.006
Epoch: 88, i: 201, loss=0.003
Epoch: 88, i: 301, loss=0.002
Epoch: 88, i: 401, loss=0.005
Epoch: 88, i: 501, loss=0.006
Epoch: 88, i: 601, loss=0.007
Epoch: 88, i: 701, loss=0.007
Epoch: 88, i: 801, loss=0.007
Epoch: 88, i: 901, loss=0.008
Epoch: 88, i: 1001, loss=0.007
Epoch: 88, i: 1101, loss=0.007
Epoch: 89, i: 1, loss=0.000
Epoch: 89, i: 101, loss=0.000
Epoch: 89, i: 201, loss=0.000
Epoch: 89, i: 301, loss=0.000
Epoch: 89, i: 401, loss=0.000
Epoch: 89, i: 501, loss=0.001
Epoch: 89, i: 601, loss=0.001
Epoch: 89, i: 701, loss=0.002
Epoch: 89, i: 801, loss=0.002
Epoch: 89, i: 901, loss=0.003
Epoch: 89, i: 1001, loss=0.003
Epoch: 89, i: 1101, loss=0.003
Epoch: 90, i: 1, loss=0.000
Epoch: 90, i: 101, loss=0.000
Epoch: 90, i: 201, loss=0.000
Epoch: 90, i: 301, loss=0.003
Epoch: 90, i: 401, loss=0.002
Epoch: 90, i: 501, loss=0.002
Epoch: 90, i: 601, loss=0.002
Epoch: 90, i: 701, loss=0.002
Epoch: 90, i: 801, loss=0.003
Epoch: 90, i: 901, loss=0.003
Epoch: 90, i: 1001, loss=0.003
Epoch: 90, i: 1101, loss=0.003
Epoch: 91, i: 1, loss=0.005
Epoch: 91, i: 101, loss=0.001
Epoch: 91, i: 201, loss=0.001
Epoch: 91, i: 301, loss=0.002
Epoch: 91, i: 401, loss=0.008
Epoch: 91, i: 501, loss=0.007
Epoch: 91, i: 601, loss=0.006
Epoch: 91, i: 701, loss=0.005
Epoch: 91, i: 801, loss=0.004
Epoch: 91, i: 901, loss=0.004
Epoch: 91, i: 1001, loss=0.004
Epoch: 91, i: 1101, loss=0.004
Epoch: 92, i: 1, loss=0.000
Epoch: 92, i: 101, loss=0.002
Epoch: 92, i: 201, loss=0.002
Epoch: 92, i: 301, loss=0.002
Epoch: 92, i: 401, loss=0.002
Epoch: 92, i: 501, loss=0.002
Epoch: 92, i: 601, loss=0.002
Epoch: 92, i: 701, loss=0.001
Epoch: 92, i: 801, loss=0.001
Epoch: 92, i: 901, loss=0.006
Epoch: 92, i: 1001, loss=0.007
Epoch: 92, i: 1101, loss=0.007
Epoch: 93, i: 1, loss=0.000
Epoch: 93, i: 101, loss=0.017
Epoch: 93, i: 201, loss=0.017
Epoch: 93, i: 301, loss=0.011
Epoch: 93, i: 401, loss=0.009
Epoch: 93, i: 501, loss=0.007
Epoch: 93, i: 601, loss=0.007
Epoch: 93, i: 701, loss=0.006
Epoch: 93, i: 801, loss=0.006
Epoch: 93, i: 901, loss=0.006
Epoch: 93, i: 1001, loss=0.006
Epoch: 93, i: 1101, loss=0.005
Epoch: 94, i: 1, loss=0.000
Epoch: 94, i: 101, loss=0.000
Epoch: 94, i: 201, loss=0.005
Epoch: 94, i: 301, loss=0.008
Epoch: 94, i: 401, loss=0.009
Epoch: 94, i: 501, loss=0.008
Epoch: 94, i: 601, loss=0.008
Epoch: 94, i: 701, loss=0.007
Epoch: 94, i: 801, loss=0.006
Epoch: 94, i: 901, loss=0.006
Epoch: 94, i: 1001, loss=0.007
Epoch: 94, i: 1101, loss=0.006
Epoch: 95, i: 1, loss=0.000
Epoch: 95, i: 101, loss=0.040
Epoch: 95, i: 201, loss=0.023
Epoch: 95, i: 301, loss=0.018
Epoch: 95, i: 401, loss=0.021
Epoch: 95, i: 501, loss=0.017
Epoch: 95, i: 601, loss=0.015
Epoch: 95, i: 701, loss=0.013
Epoch: 95, i: 801, loss=0.011
Epoch: 95, i: 901, loss=0.011
Epoch: 95, i: 1001, loss=0.010
Epoch: 95, i: 1101, loss=0.009
Epoch: 96, i: 1, loss=0.000
Epoch: 96, i: 101, loss=0.006
Epoch: 96, i: 201, loss=0.005
Epoch: 96, i: 301, loss=0.006
Epoch: 96, i: 401, loss=0.005
Epoch: 96, i: 501, loss=0.004
Epoch: 96, i: 601, loss=0.005
Epoch: 96, i: 701, loss=0.006
Epoch: 96, i: 801, loss=0.007
Epoch: 96, i: 901, loss=0.006
Epoch: 96, i: 1001, loss=0.006
Epoch: 96, i: 1101, loss=0.006
Epoch: 97, i: 1, loss=0.000
Epoch: 97, i: 101, loss=0.006
Epoch: 97, i: 201, loss=0.003
Epoch: 97, i: 301, loss=0.005
Epoch: 97, i: 401, loss=0.008
Epoch: 97, i: 501, loss=0.007
Epoch: 97, i: 601, loss=0.007
Epoch: 97, i: 701, loss=0.006
Epoch: 97, i: 801, loss=0.006
Epoch: 97, i: 901, loss=0.007
Epoch: 97, i: 1001, loss=0.006
Epoch: 97, i: 1101, loss=0.007
Epoch: 98, i: 1, loss=0.000
Epoch: 98, i: 101, loss=0.000
Epoch: 98, i: 201, loss=0.003
Epoch: 98, i: 301, loss=0.003
Epoch: 98, i: 401, loss=0.005
Epoch: 98, i: 501, loss=0.006
Epoch: 98, i: 601, loss=0.006
Epoch: 98, i: 701, loss=0.008
Epoch: 98, i: 801, loss=0.008
Epoch: 98, i: 901, loss=0.009
Epoch: 98, i: 1001, loss=0.008
Epoch: 98, i: 1101, loss=0.007
Epoch: 99, i: 1, loss=0.000
Epoch: 99, i: 101, loss=0.014
Epoch: 99, i: 201, loss=0.008
Epoch: 99, i: 301, loss=0.006
Epoch: 99, i: 401, loss=0.004
Epoch: 99, i: 501, loss=0.003
Epoch: 99, i: 601, loss=0.003
Epoch: 99, i: 701, loss=0.004
Epoch: 99, i: 801, loss=0.003
Epoch: 99, i: 901, loss=0.003
Epoch: 99, i: 1001, loss=0.003
Epoch: 99, i: 1101, loss=0.002
Epoch: 100, i: 1, loss=0.000
Epoch: 100, i: 101, loss=0.028
Epoch: 100, i: 201, loss=0.014
Epoch: 100, i: 301, loss=0.021
Epoch: 100, i: 401, loss=0.022
Epoch: 100, i: 501, loss=0.019
Epoch: 100, i: 601, loss=0.016
Epoch: 100, i: 701, loss=0.014
Epoch: 100, i: 801, loss=0.014
Epoch: 100, i: 901, loss=0.013
Epoch: 100, i: 1001, loss=0.012
Epoch: 100, i: 1101, loss=0.011
metrics = MetricCollection([
Recall(num_classes=CLASSES, average='macro'),
Precision(num_classes=len(labels_path), average='macro')
])
with torch.no_grad():
for step, (x, y) in enumerate(test_data_loader):
outputs = model(x)
_, predicted = torch.max(outputs.data, 1)
metrics(predicted, y)
results = metrics.compute()
print("Recall: {}".format(results["Recall"]))
print("Precision: {}".format(results["Precision"]))
print(results)
Exception ignored in: <function _MultiProcessingDataLoaderIter.__del__ at 0x7fce79d5eb90>
Traceback (most recent call last):
File "/shared-libs/python3.7/py/lib/python3.7/site-packages/torch/utils/data/dataloader.py", line 1328, in __del__
Exception ignored in: self._shutdown_workers()
File "/shared-libs/python3.7/py/lib/python3.7/site-packages/torch/utils/data/dataloader.py", line 1320, in _shutdown_workers
<function _MultiProcessingDataLoaderIter.__del__ at 0x7fce79d5eb90>if w.is_alive():
Traceback (most recent call last):
File "/shared-libs/python3.7/py/lib/python3.7/site-packages/torch/utils/data/dataloader.py", line 1328, in __del__
File "/usr/local/lib/python3.7/multiprocessing/process.py", line 151, in is_alive
self._shutdown_workers()
File "/shared-libs/python3.7/py/lib/python3.7/site-packages/torch/utils/data/dataloader.py", line 1320, in _shutdown_workers
assert self._parent_pid == os.getpid(), 'can only test a child process'
AssertionErrorif w.is_alive():: Exception ignored in: can only test a child process
File "/usr/local/lib/python3.7/multiprocessing/process.py", line 151, in is_alive
assert self._parent_pid == os.getpid(), 'can only test a child process'
<function _MultiProcessingDataLoaderIter.__del__ at 0x7fce79d5eb90>AssertionError
Exception ignored in: : Traceback (most recent call last):
<function tqdm.__del__ at 0x7fcede1ace60>can only test a child process File "/shared-libs/python3.7/py/lib/python3.7/site-packages/torch/utils/data/dataloader.py", line 1328, in __del__
Exception ignored in:
Traceback (most recent call last):
<function _MultiProcessingDataLoaderIter.__del__ at 0x7fce79d5eb90>Exception ignored in: self._shutdown_workers() File "/shared-libs/python3.7/py/lib/python3.7/site-packages/tqdm/std.py", line 1147, in __del__
<function tqdm.__del__ at 0x7fcede1ace60>
Traceback (most recent call last):
Traceback (most recent call last):
File "/shared-libs/python3.7/py/lib/python3.7/site-packages/torch/utils/data/dataloader.py", line 1328, in __del__
self.close() File "/shared-libs/python3.7/py/lib/python3.7/site-packages/tqdm/std.py", line 1147, in __del__
File "/shared-libs/python3.7/py/lib/python3.7/site-packages/torch/utils/data/dataloader.py", line 1320, in _shutdown_workers
File "/shared-libs/python3.7/py/lib/python3.7/site-packages/tqdm/notebook.py", line 286, in close
if w.is_alive():self._shutdown_workers()
self.close() File "/shared-libs/python3.7/py/lib/python3.7/site-packages/torch/utils/data/dataloader.py", line 1320, in _shutdown_workers
self.disp(bar_style='danger', check_delay=False) File "/usr/local/lib/python3.7/multiprocessing/process.py", line 151, in is_alive
if w.is_alive(): File "/shared-libs/python3.7/py/lib/python3.7/site-packages/tqdm/notebook.py", line 286, in close
AttributeError File "/usr/local/lib/python3.7/multiprocessing/process.py", line 151, in is_alive
assert self._parent_pid == os.getpid(), 'can only test a child process' self.disp(bar_style='danger', check_delay=False)assert self._parent_pid == os.getpid(), 'can only test a child process':
AssertionError
'tqdm' object has no attribute 'disp': AttributeError: can only test a child processAssertionError:
can only test a child processException ignored in: <function tqdm.__del__ at 0x7fcede1ace60>
Traceback (most recent call last):
'tqdm' object has no attribute 'disp' File "/shared-libs/python3.7/py/lib/python3.7/site-packages/tqdm/std.py", line 1147, in __del__
Exception ignored in: Exception ignored in: <function tqdm.__del__ at 0x7fcede1ace60><function tqdm.__del__ at 0x7fcede1ace60> Exception ignored in:
self.close()
Traceback (most recent call last):
<function tqdm.__del__ at 0x7fcede1ace60>
File "/shared-libs/python3.7/py/lib/python3.7/site-packages/tqdm/std.py", line 1147, in __del__
Traceback (most recent call last):
Traceback (most recent call last):
File "/shared-libs/python3.7/py/lib/python3.7/site-packages/tqdm/std.py", line 1147, in __del__
File "/shared-libs/python3.7/py/lib/python3.7/site-packages/tqdm/notebook.py", line 286, in close
File "/shared-libs/python3.7/py/lib/python3.7/site-packages/tqdm/std.py", line 1147, in __del__
self.close()
self.close()
self.disp(bar_style='danger', check_delay=False)self.close() File "/shared-libs/python3.7/py/lib/python3.7/site-packages/tqdm/notebook.py", line 286, in close
File "/shared-libs/python3.7/py/lib/python3.7/site-packages/tqdm/notebook.py", line 286, in close
IOPub message rate exceeded.
The notebook server will temporarily stop sending output
to the client in order to avoid crashing it.
To change this limit, set the config variable
`--NotebookApp.iopub_msg_rate_limit`.
Current values:
NotebookApp.iopub_msg_rate_limit=50.0 (msgs/sec)
NotebookApp.rate_limit_window=3.0 (secs)
Accuracy: 0.9938936233520508
Precision: 0.9939824342727661
{'Accuracy': tensor(0.9939), 'Precision': tensor(0.9940)}