4.3. Train & evaluate (CNN)

Train & evaluate on multiple train/test splits with different random seeds

if 'google.colab' in str(get_ipython()):
    from google.colab import drive
    drive.mount('/content/drive',  force_remount=False)
    !pip install mirzai
else:
The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload
# Python utilities
from pathlib import Path
import pickle

# Data science stack
import pandas as pd

from mirzai.data.loading import load_kssl
from mirzai.data.selection import (select_y, select_tax_order, select_X)
from mirzai.data.transform import log_transform_y
from mirzai.data.torch import DataLoaders, SNV_transform
from mirzai.training.cnn import (Model, weights_init)
from mirzai.training.cnn import Learner, Learners
from mirzai.training.core import load_dumps

# Deep Learning stack
import torch
from torch.optim import Adam
from torch.nn import MSELoss
from torch.optim.lr_scheduler import CyclicLR

from fastcore.transform import compose

import warnings
warnings.filterwarnings('ignore')

1. Load and transform

Load data

# For testing purpose
#src_dir = 'test'
#fnames = ['spectra-features-smp.npy', 'spectra-wavenumbers-smp.npy', 
#          'depth-order-smp.npy', 'target-smp.npy', 
#          'tax-order-lu-smp.pkl', 'spectra-id-smp.npy']


# or with all data
src_dir = '/content/drive/MyDrive/research/predict-k-mirs-dl/data/potassium'
fnames = ['spectra-features.npy', 'spectra-wavenumbers.npy', 
          'depth-order.npy', 'target.npy', 
          'tax-order-lu.pkl', 'spectra-id.npy']

X, X_names, depth_order, y, tax_lookup, X_id = load_kssl(src_dir, fnames=fnames)
data = X, y, X_id, depth_order
transforms = [select_y, select_tax_order, select_X, log_transform_y]
X, y, X_id, depth_order = compose(*transforms)(data)

Experiment

Setup

# Is a GPU available?
use_cuda = torch.cuda.is_available()
device = torch.device('cuda:0' if use_cuda else 'cpu')
print(f'Runtime is: {device}')

params_scheduler = {
    'base_lr': 3e-5,
    'max_lr': 1e-3,
    'step_size_up': 5,
    'mode': 'triangular',
    'cycle_momentum': False
}

n_epochs = 201
seeds = range(20)
seeds = range(15, 20)
Runtime is: cuda:0

Train on all Soil Taxonomic Orders

# Replace following Paths with yours
dest_dir_loss = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/all/losses')
dest_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/all/models')

learners = Learners(Model, tax_lookup, seeds=seeds, device=device)
learners.train((X, y, depth_order[:, -1]), 
               dest_dir_loss=dest_dir_loss,
               dest_dir_model=dest_dir_model,
               n_epochs=n_epochs,
               sc_kwargs=params_scheduler)
--------------------------------------------------------------------------------
Seed: 15
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.21006273476802928 | Validation loss: 0.16685934806555774
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 1
Training loss: 0.10366790779631614 | Validation loss: 0.12193218717532875
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 2
Training loss: 0.08515885396031883 | Validation loss: 0.07348759699843627
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 3
Training loss: 0.07681338573754655 | Validation loss: 0.0775093322174739
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 4
Training loss: 0.07012089212630385 | Validation loss: 0.06804058407392122
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 5
Training loss: 0.06663906943111673 | Validation loss: 0.06383055649631847
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 6
Training loss: 0.06138754077439057 | Validation loss: 0.05653325315004429
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 7
Training loss: 0.057238537138034624 | Validation loss: 0.05207979377458053
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 8
Training loss: 0.053663239335846595 | Validation loss: 0.04987848757774429
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 9
Training loss: 0.05063598560660667 | Validation loss: 0.04656141813415869
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 10
Training loss: 0.048641224010429515 | Validation loss: 0.045020436414772956
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 11
Training loss: 0.04944960257005474 | Validation loss: 0.04620409672832595
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 12
Training loss: 0.05068350452599739 | Validation loss: 0.0466461240322189
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 13
Training loss: 0.05165666111700941 | Validation loss: 0.048602522936015004
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 14
Training loss: 0.05240861789184058 | Validation loss: 0.04709551842734877
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 15
Training loss: 0.053400266255567395 | Validation loss: 0.05373546291571275
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 16
Training loss: 0.050950179472345654 | Validation loss: 0.046860707412778806
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 17
Training loss: 0.04810693795888091 | Validation loss: 0.049740820958287316
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 18
Training loss: 0.045981014085068245 | Validation loss: 0.04217774676472212
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 19
Training loss: 0.04369535342254859 | Validation loss: 0.04023268423249236
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 20
Training loss: 0.04157303913503768 | Validation loss: 0.03929079252303438
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 21
Training loss: 0.04292749061604859 | Validation loss: 0.03935050310781308
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 22
Training loss: 0.04409183335387859 | Validation loss: 0.04125337862302508
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 23
Training loss: 0.04542167893222233 | Validation loss: 0.04289548397393881
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 24
Training loss: 0.04644744235868236 | Validation loss: 0.04298555405101681
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 25
Training loss: 0.04782074668232029 | Validation loss: 0.04539380502779927
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 26
Training loss: 0.04569561580951348 | Validation loss: 0.04396933422679395
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 27
Training loss: 0.04414120492503399 | Validation loss: 0.04126848547463923
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 28
Training loss: 0.041882850408488076 | Validation loss: 0.03920497682637873
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 29
Training loss: 0.03984382633563483 | Validation loss: 0.03720222598156043
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 30
Training loss: 0.03824256880274849 | Validation loss: 0.03624860502252009
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 31
Training loss: 0.039079163438002544 | Validation loss: 0.036830263261773945
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 32
Training loss: 0.04015510225774881 | Validation loss: 0.037671556770471876
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 33
Training loss: 0.04150084875373712 | Validation loss: 0.040750166566102905
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 34
Training loss: 0.0429485932956853 | Validation loss: 0.04076049813127096
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 35
Training loss: 0.044326993446122474 | Validation loss: 0.04463050882043564
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 36
Training loss: 0.042592402374349886 | Validation loss: 0.04191957023489264
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 37
Training loss: 0.04073864440012019 | Validation loss: 0.039296620451243575
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 38
Training loss: 0.0388192206421339 | Validation loss: 0.040280445091492305
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 39
Training loss: 0.03695870364381836 | Validation loss: 0.03545188218915621
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 40
Training loss: 0.03567687908414839 | Validation loss: 0.03444945969643582
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 41
Training loss: 0.03621109440926404 | Validation loss: 0.03518754746600063
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 42
Training loss: 0.03740448744116923 | Validation loss: 0.035378930587486354
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 43
Training loss: 0.038526631041987267 | Validation loss: 0.037030073467938775
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 44
Training loss: 0.04034058869666031 | Validation loss: 0.03999876431230687
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 45
Training loss: 0.041215690155330255 | Validation loss: 0.041170561809785074
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 46
Training loss: 0.040143450241770566 | Validation loss: 0.03838985987765863
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 47
Training loss: 0.03800150607052574 | Validation loss: 0.04004549053786075
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 48
Training loss: 0.03671635882598971 | Validation loss: 0.03537545478449459
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 49
Training loss: 0.0347628424257612 | Validation loss: 0.034168427686446005
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 50
Training loss: 0.03355114189009586 | Validation loss: 0.03320952699379583
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 51
Training loss: 0.034235434844085255 | Validation loss: 0.033966143739170736
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 52
Training loss: 0.03527097588709343 | Validation loss: 0.03463689532889202
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 53
Training loss: 0.036384562783925906 | Validation loss: 0.043450314378514224
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 54
Training loss: 0.037796340584864946 | Validation loss: 0.037529862032527415
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 55
Training loss: 0.040052222718638696 | Validation loss: 0.037125598194193
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 56
Training loss: 0.037771799428366476 | Validation loss: 0.03799022023722661
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 57
Training loss: 0.03619785493654089 | Validation loss: 0.037243562148867454
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 58
Training loss: 0.03466686871229106 | Validation loss: 0.036433838901266585
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 59
Training loss: 0.03298729962181652 | Validation loss: 0.03261748232375995
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 60
Training loss: 0.0318513594633775 | Validation loss: 0.03203852672492508
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853]
------------------------------
Epoch: 61
Training loss: 0.03245821607750144 | Validation loss: 0.03227031828456484
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853]
------------------------------
Epoch: 62
Training loss: 0.033735571010221586 | Validation loss: 0.036995998938131124
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853]
------------------------------
Epoch: 63
Training loss: 0.03462631959838455 | Validation loss: 0.03523477534475052
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853]
------------------------------
Epoch: 64
Training loss: 0.03623737948744991 | Validation loss: 0.03887613358355201
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853]
------------------------------
Epoch: 65
Training loss: 0.037692032123761855 | Validation loss: 0.0393562819314214
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853]
------------------------------
Epoch: 66
Training loss: 0.03629782138663659 | Validation loss: 0.03982825133850617
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853]
------------------------------
Epoch: 67
Training loss: 0.034753578830525045 | Validation loss: 0.035794618621930084
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853]
------------------------------
Epoch: 68
Training loss: 0.03335795502130705 | Validation loss: 0.03471514085653873
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853]
------------------------------
Epoch: 69
Training loss: 0.031571629029097346 | Validation loss: 0.03225022736541201
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853]
------------------------------
Epoch: 70
Training loss: 0.03022792704007405 | Validation loss: 0.03138879530824128
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888 ]
------------------------------
Epoch: 71
Training loss: 0.03081579728044687 | Validation loss: 0.0321810659581581
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888 ]
------------------------------
Epoch: 72
Training loss: 0.031975348947540394 | Validation loss: 0.03330272281196265
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888 ]
------------------------------
Epoch: 73
Training loss: 0.03338813357143186 | Validation loss: 0.03498803695614359
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888 ]
------------------------------
Epoch: 74
Training loss: 0.03510964766335417 | Validation loss: 0.03675926781663325
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888 ]
------------------------------
Epoch: 75
Training loss: 0.03621827504931267 | Validation loss: 0.04031967806868848
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888 ]
------------------------------
Epoch: 76
Training loss: 0.03502747318872405 | Validation loss: 0.03644606344138099
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888 ]
------------------------------
Epoch: 77
Training loss: 0.033388999948923394 | Validation loss: 0.03493262910638499
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888 ]
------------------------------
Epoch: 78
Training loss: 0.031588033860087336 | Validation loss: 0.03346493154500438
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888 ]
------------------------------
Epoch: 79
Training loss: 0.030269897492477802 | Validation loss: 0.031828868301766636
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888 ]
------------------------------
Epoch: 80
Training loss: 0.029260817702536978 | Validation loss: 0.030966343737281528
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634]
------------------------------
Epoch: 81
Training loss: 0.02962339904496637 | Validation loss: 0.03175661460686047
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634]
------------------------------
Epoch: 82
Training loss: 0.030601339510964654 | Validation loss: 0.03278937654784032
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634]
------------------------------
Epoch: 83
Training loss: 0.03195925210107587 | Validation loss: 0.034539599556772584
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634]
------------------------------
Epoch: 84
Training loss: 0.03342047210885432 | Validation loss: 0.03554835073254277
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634]
------------------------------
Epoch: 85
Training loss: 0.03485352619034128 | Validation loss: 0.03597379817866382
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634]
------------------------------
Epoch: 86
Training loss: 0.03383251438632343 | Validation loss: 0.03613145378278156
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634]
------------------------------
Epoch: 87
Training loss: 0.032122112861027806 | Validation loss: 0.03380987030841344
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634]
------------------------------
Epoch: 88
Training loss: 0.030471733455018208 | Validation loss: 0.03271121671835406
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634]
------------------------------
Epoch: 89
Training loss: 0.029372684216662125 | Validation loss: 0.031361686634476735
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634]
------------------------------
Epoch: 90
Training loss: 0.028345677224317873 | Validation loss: 0.03078324074931113
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324]
------------------------------
Epoch: 91
Training loss: 0.02855107771859717 | Validation loss: 0.03187606106223786
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324]
------------------------------
Epoch: 92
Training loss: 0.029404923101652443 | Validation loss: 0.03323598527473159
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324]
------------------------------
Epoch: 93
Training loss: 0.03089467578340234 | Validation loss: 0.03677618085711667
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324]
------------------------------
Epoch: 94
Training loss: 0.032298622542889566 | Validation loss: 0.03654344159846021
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324]
------------------------------
Epoch: 95
Training loss: 0.033855747049271065 | Validation loss: 0.03389050771144375
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324]
------------------------------
Epoch: 96
Training loss: 0.032686932765013416 | Validation loss: 0.03563051058424521
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324]
------------------------------
Epoch: 97
Training loss: 0.03117338846085637 | Validation loss: 0.036191234661810165
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324]
------------------------------
Epoch: 98
Training loss: 0.029448336268973162 | Validation loss: 0.03330194152298227
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324]
------------------------------
Epoch: 99
Training loss: 0.028292122928565002 | Validation loss: 0.03124773993560698
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324]
------------------------------
Epoch: 100
Training loss: 0.027255275193235184 | Validation loss: 0.030370980245679354
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 101
Training loss: 0.027862145766197874 | Validation loss: 0.031165265132038468
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 102
Training loss: 0.028608309474451043 | Validation loss: 0.03325484803490407
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 103
Training loss: 0.02942115707344955 | Validation loss: 0.03360139277755423
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 104
Training loss: 0.031027071798535606 | Validation loss: 0.035984445478668255
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 105
Training loss: 0.03268752752080941 | Validation loss: 0.04319226613218805
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 106
Training loss: 0.03163052153018281 | Validation loss: 0.033679972509894754
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 107
Training loss: 0.030197662804605747 | Validation loss: 0.03366826350215526
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 108
Training loss: 0.028440817642414313 | Validation loss: 0.03247853203684883
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 109
Training loss: 0.02726938468074118 | Validation loss: 0.031231511301830807
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 110
Training loss: 0.026660842424089923 | Validation loss: 0.030472978850645302
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 111
Training loss: 0.026930124313739225 | Validation loss: 0.030931837073799255
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 112
Training loss: 0.027782215082313953 | Validation loss: 0.03232833159576475
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 113
Training loss: 0.02897368960804856 | Validation loss: 0.03295310706196897
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 114
Training loss: 0.030728193830449398 | Validation loss: 0.034547940753729994
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 115
Training loss: 0.03180026859100028 | Validation loss: 0.03586612556096727
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 116
Training loss: 0.030403358757305217 | Validation loss: 0.036287696990885035
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 117
Training loss: 0.029338854772962747 | Validation loss: 0.033448813400701084
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 118
Training loss: 0.027786362097657277 | Validation loss: 0.03349754812640954
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 119
Training loss: 0.02651610360645843 | Validation loss: 0.03075202837982009
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 120
Training loss: 0.025863564623644444 | Validation loss: 0.030081888875075145
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189]
------------------------------
Epoch: 121
Training loss: 0.025841036838054013 | Validation loss: 0.030718148438737984
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189]
------------------------------
Epoch: 122
Training loss: 0.026887093637666597 | Validation loss: 0.03189418640449247
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189]
------------------------------
Epoch: 123
Training loss: 0.0282036214269419 | Validation loss: 0.033052585845961505
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189]
------------------------------
Epoch: 124
Training loss: 0.02943545867190293 | Validation loss: 0.03484091803068872
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189]
------------------------------
Epoch: 125
Training loss: 0.031106999646285622 | Validation loss: 0.03587077220127118
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189]
------------------------------
Epoch: 126
Training loss: 0.029798714455041127 | Validation loss: 0.03478194975945274
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189]
------------------------------
Epoch: 127
Training loss: 0.028319378263375713 | Validation loss: 0.03545742583320995
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189]
------------------------------
Epoch: 128
Training loss: 0.02707512585783568 | Validation loss: 0.031950104305833844
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189]
------------------------------
Epoch: 129
Training loss: 0.025894583061733468 | Validation loss: 0.03058983964960923
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189]
------------------------------
Epoch: 130
Training loss: 0.02535024630070818 | Validation loss: 0.030067410971145188
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741]
------------------------------
Epoch: 131
Training loss: 0.025085657799715454 | Validation loss: 0.03055939608278264
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741]
------------------------------
Epoch: 132
Training loss: 0.026115530444580447 | Validation loss: 0.03141266045745759
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741]
------------------------------
Epoch: 133
Training loss: 0.027371244338216392 | Validation loss: 0.03283754703217903
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741]
------------------------------
Epoch: 134
Training loss: 0.028676321570061176 | Validation loss: 0.0335505915624378
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741]
------------------------------
Epoch: 135
Training loss: 0.030171366977710716 | Validation loss: 0.036135951370264575
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741]
------------------------------
Epoch: 136
Training loss: 0.028916694708791834 | Validation loss: 0.03220553534616411
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741]
------------------------------
Epoch: 137
Training loss: 0.02758560871511082 | Validation loss: 0.034407987646692624
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741]
------------------------------
Epoch: 138
Training loss: 0.026476486200580417 | Validation loss: 0.032068598825913085
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741]
------------------------------
Epoch: 139
Training loss: 0.025373654041299962 | Validation loss: 0.03088633716930594
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741]
------------------------------
Epoch: 140
Training loss: 0.02446029111412977 | Validation loss: 0.029658473261623783
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 141
Training loss: 0.024613056748951454 | Validation loss: 0.030765600189302876
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 142
Training loss: 0.02563061149273889 | Validation loss: 0.031055306257531706
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 143
Training loss: 0.026793520601546963 | Validation loss: 0.03368187746016589
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 144
Training loss: 0.028121095472455435 | Validation loss: 0.034668188031136464
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 145
Training loss: 0.02954825781392095 | Validation loss: 0.036907955427217275
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 146
Training loss: 0.02867951834992337 | Validation loss: 0.03384090841343972
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 147
Training loss: 0.027033594139030306 | Validation loss: 0.03186083252055455
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 148
Training loss: 0.02586826024298358 | Validation loss: 0.03187882715621881
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 149
Training loss: 0.02469652157055038 | Validation loss: 0.030002151368663903
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 150
Training loss: 0.024060468244417682 | Validation loss: 0.02966953616226669
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 151
Training loss: 0.02407615681684862 | Validation loss: 0.030171342457817718
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 152
Training loss: 0.025213620124942087 | Validation loss: 0.03115276776386046
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 153
Training loss: 0.02618761574271993 | Validation loss: 0.03224841546498041
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 154
Training loss: 0.02742909563532994 | Validation loss: 0.033071257239949386
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 155
Training loss: 0.028960566331566438 | Validation loss: 0.03520545495294892
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 156
Training loss: 0.027878794489578734 | Validation loss: 0.03331690206629249
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 157
Training loss: 0.026770222055777088 | Validation loss: 0.03253524844427552
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 158
Training loss: 0.025255439297926768 | Validation loss: 0.031415996751624396
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 159
Training loss: 0.023955525235478274 | Validation loss: 0.030671831624236253
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 160
Training loss: 0.023950381831936977 | Validation loss: 0.02982574759001753
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 161
Training loss: 0.023566104945594694 | Validation loss: 0.030525477438242035
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 162
Training loss: 0.02436839387015124 | Validation loss: 0.03077274331806508
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 163
Training loss: 0.025670240620356492 | Validation loss: 0.03336958942390912
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 164
Training loss: 0.026950350978162403 | Validation loss: 0.03207134384563012
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 165
Training loss: 0.02825807590371247 | Validation loss: 0.03577499762507139
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 166
Training loss: 0.027466312799727412 | Validation loss: 0.034949956346402127
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 167
Training loss: 0.025565710374315483 | Validation loss: 0.03352450120336978
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 168
Training loss: 0.024536826447987065 | Validation loss: 0.030833024449184933
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 169
Training loss: 0.023544609255164745 | Validation loss: 0.030002048099001425
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 170
Training loss: 0.02321960965692117 | Validation loss: 0.02953936348347801
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 171
Training loss: 0.02307436841917701 | Validation loss: 0.030252535151630903
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 172
Training loss: 0.02397868466677188 | Validation loss: 0.031158533524227355
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 173
Training loss: 0.025020400550781096 | Validation loss: 0.032842995162097224
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 174
Training loss: 0.026263095045693248 | Validation loss: 0.033730272541597356
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 175
Training loss: 0.02800633968862404 | Validation loss: 0.03604198408733427
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 176
Training loss: 0.02651089855550429 | Validation loss: 0.03513341484410045
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 177
Training loss: 0.02560398147412114 | Validation loss: 0.0321963859095642
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 178
Training loss: 0.02422986511053063 | Validation loss: 0.03157056310106959
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 179
Training loss: 0.023086803899271282 | Validation loss: 0.030515983684268673
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 180
Training loss: 0.022762703376558176 | Validation loss: 0.02935167956701686
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168]
------------------------------
Epoch: 181
Training loss: 0.022699793172343892 | Validation loss: 0.030329733221602123
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168]
------------------------------
Epoch: 182
Training loss: 0.023518949007344116 | Validation loss: 0.030502196112894907
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168]
------------------------------
Epoch: 183
Training loss: 0.024388733594935007 | Validation loss: 0.03174259513616562
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168]
------------------------------
Epoch: 184
Training loss: 0.025955424753135056 | Validation loss: 0.03257236281096672
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168]
------------------------------
Epoch: 185
Training loss: 0.027296889776833297 | Validation loss: 0.03400973833899582
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168]
------------------------------
Epoch: 186
Training loss: 0.026026336777786627 | Validation loss: 0.03210558142282267
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168]
------------------------------
Epoch: 187
Training loss: 0.025013451092311834 | Validation loss: 0.03260836849170449
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168]
------------------------------
Epoch: 188
Training loss: 0.02365047061141199 | Validation loss: 0.03127120152544395
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168]
------------------------------
Epoch: 189
Training loss: 0.022748118945705432 | Validation loss: 0.030008847141160373
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168]
------------------------------
Epoch: 190
Training loss: 0.022331331235972623 | Validation loss: 0.029065387826248082
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168 0.02906539]
------------------------------
Epoch: 191
Training loss: 0.022117540770114994 | Validation loss: 0.029946955860452314
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168 0.02906539]
------------------------------
Epoch: 192
Training loss: 0.02293831821417392 | Validation loss: 0.030338736745504153
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168 0.02906539]
------------------------------
Epoch: 193
Training loss: 0.024056396900408526 | Validation loss: 0.03252197858935173
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168 0.02906539]
------------------------------
Epoch: 194
Training loss: 0.02526241676986408 | Validation loss: 0.032178846088988064
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168 0.02906539]
------------------------------
Epoch: 195
Training loss: 0.026862020887559148 | Validation loss: 0.0375595488214651
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168 0.02906539]
------------------------------
Epoch: 196
Training loss: 0.025642202743585772 | Validation loss: 0.03363440044850639
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168 0.02906539]
------------------------------
Epoch: 197
Training loss: 0.024161111101907804 | Validation loss: 0.032072389045054404
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168 0.02906539]
------------------------------
Epoch: 198
Training loss: 0.02327819517813623 | Validation loss: 0.030471804131448798
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168 0.02906539]
------------------------------
Epoch: 199
Training loss: 0.022183483882667863 | Validation loss: 0.02956509131905252
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168 0.02906539]
------------------------------
Epoch: 200
Training loss: 0.022008079754951313 | Validation loss: 0.029146107779074032
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
 0.03203853 0.0313888  0.03096634 0.03078324 0.03037098 0.03047298
 0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
 0.02935168 0.02906539 0.02914611]
--------------------------------------------------------------------------------
Seed: 16
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.2737787661369978 | Validation loss: 0.20023219089592453
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 1
Training loss: 0.10422682451216254 | Validation loss: 0.09987394712799419
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 2
Training loss: 0.08268818595468647 | Validation loss: 0.07145754689663912
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 3
Training loss: 0.07512161015041584 | Validation loss: 0.06633957702897292
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 4
Training loss: 0.07036189057893581 | Validation loss: 0.06764761305752054
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 5
Training loss: 0.06704740458366087 | Validation loss: 0.06609862109860487
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 6
Training loss: 0.06204092073965612 | Validation loss: 0.05567961194412371
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 7
Training loss: 0.05805403113504505 | Validation loss: 0.05421012262525284
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 8
Training loss: 0.054467795774208634 | Validation loss: 0.05056867451794379
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 9
Training loss: 0.05140738620965441 | Validation loss: 0.04763612818731144
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 10
Training loss: 0.049082192368658746 | Validation loss: 0.04559225025298321
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 11
Training loss: 0.04975575218380495 | Validation loss: 0.046683357748310124
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 12
Training loss: 0.05101741386364823 | Validation loss: 0.047293581431154655
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 13
Training loss: 0.05192214983339444 | Validation loss: 0.0476417343206374
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 14
Training loss: 0.053264485360453216 | Validation loss: 0.05344951939068537
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 15
Training loss: 0.05388570320975827 | Validation loss: 0.05068364296009583
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 16
Training loss: 0.051310861563882024 | Validation loss: 0.04789036223912133
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 17
Training loss: 0.04877522815290222 | Validation loss: 0.051986201707504495
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 18
Training loss: 0.04632410562347885 | Validation loss: 0.043588970103754406
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 19
Training loss: 0.04381964350573894 | Validation loss: 0.042146987932841336
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 20
Training loss: 0.04200194686500898 | Validation loss: 0.03990887502719343
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 21
Training loss: 0.04261076600862828 | Validation loss: 0.041349465662069554
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 22
Training loss: 0.044018229102994515 | Validation loss: 0.044328473145719124
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 23
Training loss: 0.04533218726970431 | Validation loss: 0.04635683298770305
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 24
Training loss: 0.04664624337194942 | Validation loss: 0.05748243245864864
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 25
Training loss: 0.04778335881456146 | Validation loss: 0.04696861471552237
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 26
Training loss: 0.045971616663544726 | Validation loss: 0.04174737498757586
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 27
Training loss: 0.04344583072655607 | Validation loss: 0.04394390559301967
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 28
Training loss: 0.04164245962000298 | Validation loss: 0.041117376507779135
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 29
Training loss: 0.0392682946013932 | Validation loss: 0.03982357207718676
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 30
Training loss: 0.037741293180110595 | Validation loss: 0.036664327384030394
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 31
Training loss: 0.038508784161572614 | Validation loss: 0.03875069599598646
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 32
Training loss: 0.03946622626353642 | Validation loss: 0.03949111793190241
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 33
Training loss: 0.04122776931929072 | Validation loss: 0.043168582302411045
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 34
Training loss: 0.04259670557505561 | Validation loss: 0.042725293262474304
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 35
Training loss: 0.04403580553411675 | Validation loss: 0.04463229184986743
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 36
Training loss: 0.04248637171447512 | Validation loss: 0.040492692583166394
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 37
Training loss: 0.040044852228116566 | Validation loss: 0.038837947511831214
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 38
Training loss: 0.0383974302965253 | Validation loss: 0.03937832881754215
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 39
Training loss: 0.03638932610473295 | Validation loss: 0.03644921438587186
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 40
Training loss: 0.03480823439413931 | Validation loss: 0.03473438378588288
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 41
Training loss: 0.03557032718246172 | Validation loss: 0.036001175196956746
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 42
Training loss: 0.036576151781840115 | Validation loss: 0.037461821863477206
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 43
Training loss: 0.038131684358195056 | Validation loss: 0.03910830907062092
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 44
Training loss: 0.03957182635905177 | Validation loss: 0.04135864841199554
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 45
Training loss: 0.04120923336925293 | Validation loss: 0.04687123353370523
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 46
Training loss: 0.03937091541890876 | Validation loss: 0.04366569883659878
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 47
Training loss: 0.03759949713465061 | Validation loss: 0.03793287259913915
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 48
Training loss: 0.035793748596228483 | Validation loss: 0.036690053769049394
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 49
Training loss: 0.034185784380318315 | Validation loss: 0.0347253294348453
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 50
Training loss: 0.03280941256061636 | Validation loss: 0.033420166544682155
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 51
Training loss: 0.03306701497768792 | Validation loss: 0.03450813103236456
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 52
Training loss: 0.03430615315089134 | Validation loss: 0.03600950290209952
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 53
Training loss: 0.03589855856720475 | Validation loss: 0.037268991961218086
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 54
Training loss: 0.037301842755762725 | Validation loss: 0.04395043955440015
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 55
Training loss: 0.038821042116592076 | Validation loss: 0.043963905704100574
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 56
Training loss: 0.03712832113071601 | Validation loss: 0.040459541430845194
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 57
Training loss: 0.035522612095920475 | Validation loss: 0.038693558564821706
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 58
Training loss: 0.03354250567352883 | Validation loss: 0.03509865964935944
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 59
Training loss: 0.03192239735774168 | Validation loss: 0.03395114307481367
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 60
Training loss: 0.030853263973295103 | Validation loss: 0.03248945271711698
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945]
------------------------------
Epoch: 61
Training loss: 0.03124623723130116 | Validation loss: 0.03348119809926875
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945]
------------------------------
Epoch: 62
Training loss: 0.032311980573159794 | Validation loss: 0.035267103158465
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945]
------------------------------
Epoch: 63
Training loss: 0.033898110482218406 | Validation loss: 0.03626691218696337
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945]
------------------------------
Epoch: 64
Training loss: 0.03541915445023869 | Validation loss: 0.03732870159819063
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945]
------------------------------
Epoch: 65
Training loss: 0.03680497418546418 | Validation loss: 0.04455761301570234
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945]
------------------------------
Epoch: 66
Training loss: 0.03531477025592714 | Validation loss: 0.0376130314873515
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945]
------------------------------
Epoch: 67
Training loss: 0.033475336373057656 | Validation loss: 0.03557608191715141
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945]
------------------------------
Epoch: 68
Training loss: 0.03201586188103153 | Validation loss: 0.03565459449536505
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945]
------------------------------
Epoch: 69
Training loss: 0.030342980738047247 | Validation loss: 0.033357611354605285
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945]
------------------------------
Epoch: 70
Training loss: 0.029202395655933445 | Validation loss: 0.03172996544600588
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997]
------------------------------
Epoch: 71
Training loss: 0.029675779442530213 | Validation loss: 0.03339542720499819
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997]
------------------------------
Epoch: 72
Training loss: 0.030540388210412613 | Validation loss: 0.03487979933059057
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997]
------------------------------
Epoch: 73
Training loss: 0.03213733445218669 | Validation loss: 0.03967391237187966
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997]
------------------------------
Epoch: 74
Training loss: 0.03372008166198919 | Validation loss: 0.04293370014468653
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997]
------------------------------
Epoch: 75
Training loss: 0.03520092460983176 | Validation loss: 0.04376732475594082
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997]
------------------------------
Epoch: 76
Training loss: 0.033938281122580405 | Validation loss: 0.03703245200810179
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997]
------------------------------
Epoch: 77
Training loss: 0.032296309855355934 | Validation loss: 0.03492208946067675
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997]
------------------------------
Epoch: 78
Training loss: 0.030508256989428554 | Validation loss: 0.035185562314844765
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997]
------------------------------
Epoch: 79
Training loss: 0.02909078923380369 | Validation loss: 0.03294597795249614
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997]
------------------------------
Epoch: 80
Training loss: 0.02803419754514104 | Validation loss: 0.031490433136973764
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 81
Training loss: 0.028328925535251483 | Validation loss: 0.032534939574852456
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 82
Training loss: 0.02926808329002096 | Validation loss: 0.03435256992326637
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 83
Training loss: 0.03089131600513145 | Validation loss: 0.035728449348063594
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 84
Training loss: 0.032449012219825185 | Validation loss: 0.037415493403322404
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 85
Training loss: 0.03403688801679848 | Validation loss: 0.04564265021642225
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 86
Training loss: 0.03234556234463697 | Validation loss: 0.041697077766324565
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 87
Training loss: 0.03099816353665298 | Validation loss: 0.03541089097384067
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 88
Training loss: 0.029276077776751296 | Validation loss: 0.03348477607576457
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 89
Training loss: 0.027866539251110628 | Validation loss: 0.032401630176907094
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 90
Training loss: 0.026741057555521214 | Validation loss: 0.030844259166480165
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 91
Training loss: 0.02716905342902636 | Validation loss: 0.03190479325377835
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 92
Training loss: 0.02814049190863615 | Validation loss: 0.03378076414552938
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 93
Training loss: 0.02952301814257512 | Validation loss: 0.034746238747529225
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 94
Training loss: 0.031075982355850008 | Validation loss: 0.03731691609837313
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 95
Training loss: 0.03271141126256905 | Validation loss: 0.060955908380251014
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 96
Training loss: 0.03134089423614869 | Validation loss: 0.03622718771045978
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 97
Training loss: 0.029479217560646514 | Validation loss: 0.033980404232851175
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 98
Training loss: 0.028381742420606315 | Validation loss: 0.03344887570865386
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 99
Training loss: 0.02674665158214854 | Validation loss: 0.03224219867250824
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 100
Training loss: 0.0260751176383493 | Validation loss: 0.030910984786078993
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 101
Training loss: 0.02635237188665123 | Validation loss: 0.03226050084242515
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 102
Training loss: 0.027261614670251004 | Validation loss: 0.033868492984798104
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 103
Training loss: 0.028737410438232358 | Validation loss: 0.03410222200033939
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 104
Training loss: 0.03006675193823258 | Validation loss: 0.056256531152577526
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 105
Training loss: 0.03191802492016912 | Validation loss: 0.048147553182413094
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 106
Training loss: 0.030185575500739314 | Validation loss: 0.03620794531683215
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 107
Training loss: 0.029039505783901146 | Validation loss: 0.03526321702958208
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 108
Training loss: 0.027365574273134134 | Validation loss: 0.03291359628690819
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 109
Training loss: 0.026011773122027224 | Validation loss: 0.03202740053555607
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 110
Training loss: 0.02524538153294826 | Validation loss: 0.030587680316406542
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 111
Training loss: 0.02533113995959293 | Validation loss: 0.032261734877803684
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 112
Training loss: 0.02640439592041515 | Validation loss: 0.03358038018696603
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 113
Training loss: 0.027467445776555834 | Validation loss: 0.03410669124429732
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 114
Training loss: 0.029203762646808518 | Validation loss: 0.03419871860703008
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 115
Training loss: 0.030872044258010316 | Validation loss: 0.035971489265165496
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 116
Training loss: 0.029417167569869968 | Validation loss: 0.03570391744605999
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 117
Training loss: 0.0279873248426185 | Validation loss: 0.03376021398544575
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 118
Training loss: 0.02645933153804421 | Validation loss: 0.032196724564826064
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 119
Training loss: 0.025299409457563062 | Validation loss: 0.03130617110569656
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 120
Training loss: 0.024414063379989834 | Validation loss: 0.030250687490060792
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069]
------------------------------
Epoch: 121
Training loss: 0.024591018528747188 | Validation loss: 0.03162372223538371
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069]
------------------------------
Epoch: 122
Training loss: 0.025364326536802092 | Validation loss: 0.03282762849621013
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069]
------------------------------
Epoch: 123
Training loss: 0.026572306790777784 | Validation loss: 0.03422165076058786
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069]
------------------------------
Epoch: 124
Training loss: 0.028306469948702 | Validation loss: 0.035084132006210564
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069]
------------------------------
Epoch: 125
Training loss: 0.030099466125898824 | Validation loss: 0.04039957774061281
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069]
------------------------------
Epoch: 126
Training loss: 0.028633888242546264 | Validation loss: 0.037205871458338424
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069]
------------------------------
Epoch: 127
Training loss: 0.027315277815246442 | Validation loss: 0.03901227530652443
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069]
------------------------------
Epoch: 128
Training loss: 0.025800537531113794 | Validation loss: 0.03346189024108174
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069]
------------------------------
Epoch: 129
Training loss: 0.024668992970021456 | Validation loss: 0.031561887114251085
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069]
------------------------------
Epoch: 130
Training loss: 0.02404350608699097 | Validation loss: 0.030343229462087683
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323]
------------------------------
Epoch: 131
Training loss: 0.02407458088888721 | Validation loss: 0.031308728204298336
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323]
------------------------------
Epoch: 132
Training loss: 0.024918176032104125 | Validation loss: 0.03360111465825971
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323]
------------------------------
Epoch: 133
Training loss: 0.026058742077104513 | Validation loss: 0.03443116572710265
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323]
------------------------------
Epoch: 134
Training loss: 0.027624107491203004 | Validation loss: 0.03449264597312539
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323]
------------------------------
Epoch: 135
Training loss: 0.02928206292494369 | Validation loss: 0.03776389098338849
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323]
------------------------------
Epoch: 136
Training loss: 0.028131398687681813 | Validation loss: 0.03479092995083965
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323]
------------------------------
Epoch: 137
Training loss: 0.026286100760637598 | Validation loss: 0.0348371911167571
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323]
------------------------------
Epoch: 138
Training loss: 0.0250626021893475 | Validation loss: 0.03248374907514163
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323]
------------------------------
Epoch: 139
Training loss: 0.024081416701252654 | Validation loss: 0.03174565414345897
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323]
------------------------------
Epoch: 140
Training loss: 0.02352582935544508 | Validation loss: 0.030051779883823036
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 141
Training loss: 0.023359468449677127 | Validation loss: 0.0312756019174657
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 142
Training loss: 0.02415206788408416 | Validation loss: 0.03251146805365529
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 143
Training loss: 0.02550879377490423 | Validation loss: 0.03490573369724825
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 144
Training loss: 0.026806120814821557 | Validation loss: 0.0345633335873089
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 145
Training loss: 0.0286723116860087 | Validation loss: 0.03890983096892591
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 146
Training loss: 0.027140730675713227 | Validation loss: 0.03415699658371442
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 147
Training loss: 0.02588838122154432 | Validation loss: 0.03404608176015647
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 148
Training loss: 0.024426499718074195 | Validation loss: 0.03193692797053177
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 149
Training loss: 0.023166143655314983 | Validation loss: 0.03149128182733481
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 150
Training loss: 0.0229603738123441 | Validation loss: 0.029910476128282272
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 151
Training loss: 0.02284284436792927 | Validation loss: 0.030855711153914445
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 152
Training loss: 0.02365900123194887 | Validation loss: 0.03222047771632144
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 153
Training loss: 0.024800017015003843 | Validation loss: 0.03301934850921409
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 154
Training loss: 0.026273147880437043 | Validation loss: 0.03420751300368425
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 155
Training loss: 0.027664226583410494 | Validation loss: 0.0349949984110694
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 156
Training loss: 0.026663722442444707 | Validation loss: 0.033630230348656136
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 157
Training loss: 0.025194934456550525 | Validation loss: 0.03472642444766465
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 158
Training loss: 0.023837915450283215 | Validation loss: 0.032156074790498326
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 159
Training loss: 0.023005821732950845 | Validation loss: 0.031500728331110645
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 160
Training loss: 0.02235974420061863 | Validation loss: 0.0296877749115888
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 161
Training loss: 0.022434936120911818 | Validation loss: 0.030912983049927033
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 162
Training loss: 0.02321441728148727 | Validation loss: 0.03159520993487234
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 163
Training loss: 0.024087414784378952 | Validation loss: 0.03426515600289655
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 164
Training loss: 0.02584073524720468 | Validation loss: 0.03549671795351052
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 165
Training loss: 0.027481816246130275 | Validation loss: 0.04569036421258893
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 166
Training loss: 0.025911257378610334 | Validation loss: 0.03599856464208755
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 167
Training loss: 0.0245359625546335 | Validation loss: 0.03235892685278591
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 168
Training loss: 0.02326479062573676 | Validation loss: 0.03178735901560403
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 169
Training loss: 0.02218883088652382 | Validation loss: 0.03116621243544912
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 170
Training loss: 0.021824004793609308 | Validation loss: 0.029547290860024174
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 171
Training loss: 0.021793495756885637 | Validation loss: 0.0311219496570066
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 172
Training loss: 0.022444999180954155 | Validation loss: 0.03314602608273251
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 173
Training loss: 0.023874418106018088 | Validation loss: 0.033780165544125884
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 174
Training loss: 0.025131154348927102 | Validation loss: 0.03473723620440053
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 175
Training loss: 0.0267372918269399 | Validation loss: 0.03518312112881546
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 176
Training loss: 0.02542390567012219 | Validation loss: 0.03591320638965189
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 177
Training loss: 0.02414658128038254 | Validation loss: 0.03431402006705778
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 178
Training loss: 0.02274914544788886 | Validation loss: 0.03193719099910386
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 179
Training loss: 0.021763978673577836 | Validation loss: 0.030923344120712935
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 180
Training loss: 0.02152854327009771 | Validation loss: 0.029603901574701334
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039 ]
------------------------------
Epoch: 181
Training loss: 0.021365951617912014 | Validation loss: 0.03074364638598883
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039 ]
------------------------------
Epoch: 182
Training loss: 0.021945311958300257 | Validation loss: 0.035094125188095904
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039 ]
------------------------------
Epoch: 183
Training loss: 0.02318344924848263 | Validation loss: 0.03233091902416364
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039 ]
------------------------------
Epoch: 184
Training loss: 0.0248239246356543 | Validation loss: 0.03261617678258799
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039 ]
------------------------------
Epoch: 185
Training loss: 0.026298621777577077 | Validation loss: 0.03375745768271453
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039 ]
------------------------------
Epoch: 186
Training loss: 0.024853772596514775 | Validation loss: 0.03509717840965079
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039 ]
------------------------------
Epoch: 187
Training loss: 0.023714199431211225 | Validation loss: 0.03329228342766256
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039 ]
------------------------------
Epoch: 188
Training loss: 0.022584474363224947 | Validation loss: 0.032880636723299994
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039 ]
------------------------------
Epoch: 189
Training loss: 0.02153710593188944 | Validation loss: 0.03080118345343961
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039 ]
------------------------------
Epoch: 190
Training loss: 0.02122366759563085 | Validation loss: 0.029605589848242502
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039  0.02960559]
------------------------------
Epoch: 191
Training loss: 0.020808289958692088 | Validation loss: 0.030839143395094217
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039  0.02960559]
------------------------------
Epoch: 192
Training loss: 0.021608949259407526 | Validation loss: 0.03275987589979066
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039  0.02960559]
------------------------------
Epoch: 193
Training loss: 0.022862843535730513 | Validation loss: 0.03243703589634558
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039  0.02960559]
------------------------------
Epoch: 194
Training loss: 0.024137301308168903 | Validation loss: 0.034653909075840386
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039  0.02960559]
------------------------------
Epoch: 195
Training loss: 0.02555413278989406 | Validation loss: 0.03679713180733729
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039  0.02960559]
------------------------------
Epoch: 196
Training loss: 0.024435840169422504 | Validation loss: 0.03782723306686477
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039  0.02960559]
------------------------------
Epoch: 197
Training loss: 0.02327967907932741 | Validation loss: 0.03321316239910316
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039  0.02960559]
------------------------------
Epoch: 198
Training loss: 0.022098688396815884 | Validation loss: 0.03147691657222741
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039  0.02960559]
------------------------------
Epoch: 199
Training loss: 0.02113150341131937 | Validation loss: 0.03040765568745875
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039  0.02960559]
------------------------------
Epoch: 200
Training loss: 0.020685331293704413 | Validation loss: 0.029657005612631286
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
 0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
 0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
 0.0296039  0.02960559 0.02965701]
Early stopping!
--------------------------------------------------------------------------------
Seed: 17
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.19528153301839987 | Validation loss: 0.15108045474090406
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 1
Training loss: 0.09630404248481661 | Validation loss: 0.07499229344250881
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 2
Training loss: 0.0770322958220119 | Validation loss: 0.0657210370476267
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 3
Training loss: 0.07153833540721144 | Validation loss: 0.06682331098523815
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 4
Training loss: 0.06794721161301329 | Validation loss: 0.058392948561669454
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 5
Training loss: 0.06519884280823758 | Validation loss: 0.05638599817731739
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 6
Training loss: 0.06049888590204082 | Validation loss: 0.062485484785474506
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 7
Training loss: 0.056857929563179144 | Validation loss: 0.04876981789361587
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 8
Training loss: 0.053296953524044885 | Validation loss: 0.04630405498685035
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 9
Training loss: 0.05026454213588667 | Validation loss: 0.04334998397832423
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 10
Training loss: 0.04759277868282607 | Validation loss: 0.04201392273921355
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 11
Training loss: 0.04893287182528907 | Validation loss: 0.042212111397390874
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 12
Training loss: 0.0500142074952738 | Validation loss: 0.04350175510729309
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 13
Training loss: 0.05130318975384076 | Validation loss: 0.04694888262753993
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 14
Training loss: 0.05227278390583971 | Validation loss: 0.048200364991099434
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 15
Training loss: 0.05314655587678467 | Validation loss: 0.05896381667889325
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 16
Training loss: 0.05050081228030714 | Validation loss: 0.04554605564778357
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 17
Training loss: 0.048094492702649566 | Validation loss: 0.0431616822735662
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 18
Training loss: 0.04548985188584921 | Validation loss: 0.04048775626033281
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 19
Training loss: 0.04332584545280286 | Validation loss: 0.03895747074774936
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 20
Training loss: 0.04135821033525831 | Validation loss: 0.037356803407975
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 21
Training loss: 0.04213623469237121 | Validation loss: 0.038663210462679905
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 22
Training loss: 0.04348462626730334 | Validation loss: 0.04059584674516083
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 23
Training loss: 0.044896001835889 | Validation loss: 0.0408192587175728
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 24
Training loss: 0.04613425673396275 | Validation loss: 0.040819176052392055
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 25
Training loss: 0.04764102466081775 | Validation loss: 0.04785382432813665
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 26
Training loss: 0.04551267926581204 | Validation loss: 0.04141203267911894
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 27
Training loss: 0.04350365237809542 | Validation loss: 0.038893470408774056
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 28
Training loss: 0.041088982838147325 | Validation loss: 0.03846450380783165
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 29
Training loss: 0.039170486238367094 | Validation loss: 0.035727420409696294
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 30
Training loss: 0.03741839054172609 | Validation loss: 0.03534357691496874
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358]
------------------------------
Epoch: 31
Training loss: 0.038331123063428785 | Validation loss: 0.03712475580056157
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358]
------------------------------
Epoch: 32
Training loss: 0.03958883121752686 | Validation loss: 0.03590556875332794
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358]
------------------------------
Epoch: 33
Training loss: 0.04092563144536322 | Validation loss: 0.03797215149136244
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358]
------------------------------
Epoch: 34
Training loss: 0.042431336150167316 | Validation loss: 0.03862651510048756
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358]
------------------------------
Epoch: 35
Training loss: 0.04385021059612531 | Validation loss: 0.04650221689216859
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358]
------------------------------
Epoch: 36
Training loss: 0.042333089369789176 | Validation loss: 0.04138898033549828
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358]
------------------------------
Epoch: 37
Training loss: 0.040143472958341475 | Validation loss: 0.04083230859080775
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358]
------------------------------
Epoch: 38
Training loss: 0.038212460662810296 | Validation loss: 0.03604175413307627
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358]
------------------------------
Epoch: 39
Training loss: 0.03621550740795137 | Validation loss: 0.03490787063750018
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358]
------------------------------
Epoch: 40
Training loss: 0.0347418464531418 | Validation loss: 0.033902818650270984
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282]
------------------------------
Epoch: 41
Training loss: 0.03551816663844144 | Validation loss: 0.0339503469918154
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282]
------------------------------
Epoch: 42
Training loss: 0.03664073076623484 | Validation loss: 0.036111429589182405
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282]
------------------------------
Epoch: 43
Training loss: 0.03794130406940106 | Validation loss: 0.038581867809448625
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282]
------------------------------
Epoch: 44
Training loss: 0.039544879462511284 | Validation loss: 0.03753844731017551
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282]
------------------------------
Epoch: 45
Training loss: 0.04120968023817399 | Validation loss: 0.040164445156017234
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282]
------------------------------
Epoch: 46
Training loss: 0.03963290285832417 | Validation loss: 0.03767492653455882
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282]
------------------------------
Epoch: 47
Training loss: 0.037655649592139295 | Validation loss: 0.03940686863739934
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282]
------------------------------
Epoch: 48
Training loss: 0.0360215248523544 | Validation loss: 0.03606964662605155
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282]
------------------------------
Epoch: 49
Training loss: 0.03379709184294435 | Validation loss: 0.03300323327426362
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282]
------------------------------
Epoch: 50
Training loss: 0.032713453326645624 | Validation loss: 0.032592986331247124
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 51
Training loss: 0.03327064899779035 | Validation loss: 0.0332814001074407
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 52
Training loss: 0.03456818787784382 | Validation loss: 0.03814794470976412
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 53
Training loss: 0.035899267839011186 | Validation loss: 0.03423762608286554
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 54
Training loss: 0.03718895989829513 | Validation loss: 0.037633575235320404
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 55
Training loss: 0.038761256454241146 | Validation loss: 0.0388596230145313
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 56
Training loss: 0.03721329200770852 | Validation loss: 0.03893185608023036
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 57
Training loss: 0.035715234900392065 | Validation loss: 0.03628284927556472
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 58
Training loss: 0.03370676121645145 | Validation loss: 0.033211706354554775
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 59
Training loss: 0.0323047479999611 | Validation loss: 0.03253601488978725
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 60
Training loss: 0.030942986885225034 | Validation loss: 0.03169571242369382
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571]
------------------------------
Epoch: 61
Training loss: 0.03151171489281389 | Validation loss: 0.03195847763754098
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571]
------------------------------
Epoch: 62
Training loss: 0.0326535613743559 | Validation loss: 0.03367336982020498
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571]
------------------------------
Epoch: 63
Training loss: 0.03397060671223577 | Validation loss: 0.0392299928265599
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571]
------------------------------
Epoch: 64
Training loss: 0.03557851326406208 | Validation loss: 0.036114410315928734
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571]
------------------------------
Epoch: 65
Training loss: 0.036995837769258445 | Validation loss: 0.046228273697527106
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571]
------------------------------
Epoch: 66
Training loss: 0.03573583279219346 | Validation loss: 0.035999696652314304
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571]
------------------------------
Epoch: 67
Training loss: 0.03398162927240221 | Validation loss: 0.033870757285472564
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571]
------------------------------
Epoch: 68
Training loss: 0.032125317916919395 | Validation loss: 0.03275708983594601
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571]
------------------------------
Epoch: 69
Training loss: 0.030659903030600545 | Validation loss: 0.030907584725162095
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571]
------------------------------
Epoch: 70
Training loss: 0.029677720091445006 | Validation loss: 0.030892113479167486
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211]
------------------------------
Epoch: 71
Training loss: 0.030018180326419317 | Validation loss: 0.030626152551411528
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211]
------------------------------
Epoch: 72
Training loss: 0.03106435915953883 | Validation loss: 0.033222900274976166
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211]
------------------------------
Epoch: 73
Training loss: 0.032380191656929534 | Validation loss: 0.03328017588803726
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211]
------------------------------
Epoch: 74
Training loss: 0.03383174836525591 | Validation loss: 0.03539824518745979
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211]
------------------------------
Epoch: 75
Training loss: 0.03549576116777135 | Validation loss: 0.03629769839807949
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211]
------------------------------
Epoch: 76
Training loss: 0.034087463657877695 | Validation loss: 0.03376856591321726
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211]
------------------------------
Epoch: 77
Training loss: 0.03234960998346164 | Validation loss: 0.03851411953172852
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211]
------------------------------
Epoch: 78
Training loss: 0.03068069219369236 | Validation loss: 0.0314846167884833
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211]
------------------------------
Epoch: 79
Training loss: 0.029369526592222608 | Validation loss: 0.030833860068062765
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211]
------------------------------
Epoch: 80
Training loss: 0.02836777383780591 | Validation loss: 0.030553860598103662
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 81
Training loss: 0.028889779986879662 | Validation loss: 0.031743324707896835
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 82
Training loss: 0.029690888467406137 | Validation loss: 0.031142891442353746
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 83
Training loss: 0.031053277377889852 | Validation loss: 0.03351418073165469
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 84
Training loss: 0.03267707209850801 | Validation loss: 0.042135526879435094
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 85
Training loss: 0.03411234047458192 | Validation loss: 0.0370925584132165
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 86
Training loss: 0.03266451155994175 | Validation loss: 0.03274107290909881
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 87
Training loss: 0.031198181013034027 | Validation loss: 0.0340254512013851
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 88
Training loss: 0.029595627138706466 | Validation loss: 0.031916605308651924
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 89
Training loss: 0.028043366407638225 | Validation loss: 0.030369010976458017
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 90
Training loss: 0.027346802133824823 | Validation loss: 0.030075445589897908
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 91
Training loss: 0.027608434050013935 | Validation loss: 0.030171571629105415
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 92
Training loss: 0.028327987452338294 | Validation loss: 0.03024076547605538
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 93
Training loss: 0.029624219871984107 | Validation loss: 0.03404002234471583
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 94
Training loss: 0.03148827917679528 | Validation loss: 0.03371854062167417
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 95
Training loss: 0.03307377927529618 | Validation loss: 0.04036632132411531
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 96
Training loss: 0.031799388683665046 | Validation loss: 0.03298156123311646
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 97
Training loss: 0.03007377838939575 | Validation loss: 0.033205109755549814
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 98
Training loss: 0.028580836875650183 | Validation loss: 0.030641627497971058
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 99
Training loss: 0.027427082016008106 | Validation loss: 0.0313515687625097
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 100
Training loss: 0.026483976918338672 | Validation loss: 0.030348778084184215
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 101
Training loss: 0.026674966022599576 | Validation loss: 0.03149610185023166
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 102
Training loss: 0.02751217969953574 | Validation loss: 0.03084385859887157
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 103
Training loss: 0.028731230297125876 | Validation loss: 0.031040501080255592
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 104
Training loss: 0.030062061833496405 | Validation loss: 0.03604654427123281
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 105
Training loss: 0.0318882070439611 | Validation loss: 0.039417957194742906
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 106
Training loss: 0.030514564989391334 | Validation loss: 0.03316041232144411
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 107
Training loss: 0.028916336958091267 | Validation loss: 0.035398528771062865
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 108
Training loss: 0.02764455142969955 | Validation loss: 0.031008046623154553
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 109
Training loss: 0.02622884780894525 | Validation loss: 0.031359491927499795
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 110
Training loss: 0.02562284080117325 | Validation loss: 0.030416654530022524
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 111
Training loss: 0.02577647100859065 | Validation loss: 0.03090014162811294
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 112
Training loss: 0.026568011741894555 | Validation loss: 0.030865917913615704
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 113
Training loss: 0.028003387864473768 | Validation loss: 0.03234168109937315
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 114
Training loss: 0.029407237495999695 | Validation loss: 0.032332020902396306
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 115
Training loss: 0.03110336661650469 | Validation loss: 0.03286148103333152
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 116
Training loss: 0.02993302761877733 | Validation loss: 0.035776853462499855
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 117
Training loss: 0.02837571760045555 | Validation loss: 0.03131979187966975
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 118
Training loss: 0.02665195077214038 | Validation loss: 0.031021781488264028
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 119
Training loss: 0.02563444459370858 | Validation loss: 0.030956934082560835
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 120
Training loss: 0.024944440375529522 | Validation loss: 0.030078876598746376
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888]
------------------------------
Epoch: 121
Training loss: 0.02519254589051844 | Validation loss: 0.03037429168259939
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888]
------------------------------
Epoch: 122
Training loss: 0.02574926438213392 | Validation loss: 0.030591894485122336
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888]
------------------------------
Epoch: 123
Training loss: 0.027375541950070012 | Validation loss: 0.03259252623316988
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888]
------------------------------
Epoch: 124
Training loss: 0.028651660226659454 | Validation loss: 0.03243902798709089
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888]
------------------------------
Epoch: 125
Training loss: 0.030220652338278694 | Validation loss: 0.03472108380720679
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888]
------------------------------
Epoch: 126
Training loss: 0.02893120789346762 | Validation loss: 0.0334128510635511
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888]
------------------------------
Epoch: 127
Training loss: 0.02742837137619664 | Validation loss: 0.03295834002340526
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888]
------------------------------
Epoch: 128
Training loss: 0.02586745542719755 | Validation loss: 0.03359922730421598
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888]
------------------------------
Epoch: 129
Training loss: 0.02518866923601022 | Validation loss: 0.030972652326840742
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888]
------------------------------
Epoch: 130
Training loss: 0.024370399135069585 | Validation loss: 0.029321946928986407
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195]
------------------------------
Epoch: 131
Training loss: 0.024452643489142455 | Validation loss: 0.03170669283223363
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195]
------------------------------
Epoch: 132
Training loss: 0.02514903414646149 | Validation loss: 0.02934865020545183
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195]
------------------------------
Epoch: 133
Training loss: 0.026473972576076355 | Validation loss: 0.03256165103541803
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195]
------------------------------
Epoch: 134
Training loss: 0.027980016625126985 | Validation loss: 0.037934470789886154
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195]
------------------------------
Epoch: 135
Training loss: 0.02950241145545866 | Validation loss: 0.033272428256748
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195]
------------------------------
Epoch: 136
Training loss: 0.02817217055726503 | Validation loss: 0.03544298366569312
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195]
------------------------------
Epoch: 137
Training loss: 0.02676052859971505 | Validation loss: 0.03308902620708784
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195]
------------------------------
Epoch: 138
Training loss: 0.025434287322692456 | Validation loss: 0.0314219071554531
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195]
------------------------------
Epoch: 139
Training loss: 0.024347204041271286 | Validation loss: 0.03068882338208171
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195]
------------------------------
Epoch: 140
Training loss: 0.023864611201391623 | Validation loss: 0.029601984030970956
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 141
Training loss: 0.023795595388918175 | Validation loss: 0.032649262017051206
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 142
Training loss: 0.0245595584966345 | Validation loss: 0.03269626537288448
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 143
Training loss: 0.025921545380095796 | Validation loss: 0.03559081084136151
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 144
Training loss: 0.02733462275735535 | Validation loss: 0.033264496363699436
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 145
Training loss: 0.029037256825964634 | Validation loss: 0.0364198326833744
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 146
Training loss: 0.027521122688808897 | Validation loss: 0.03114709215222207
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 147
Training loss: 0.026036638752777334 | Validation loss: 0.03610302006776354
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 148
Training loss: 0.024805288894769302 | Validation loss: 0.030778389053204947
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 149
Training loss: 0.023793241487610646 | Validation loss: 0.030905219281207673
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 150
Training loss: 0.02341242203824442 | Validation loss: 0.029345920012719864
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 151
Training loss: 0.023260569254919067 | Validation loss: 0.030717732623812898
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 152
Training loss: 0.023884564654637626 | Validation loss: 0.03010071575872402
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 153
Training loss: 0.025170819309547426 | Validation loss: 0.029870342207759356
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 154
Training loss: 0.026484384465463987 | Validation loss: 0.04114546122408546
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 155
Training loss: 0.028278595922830125 | Validation loss: 0.037228376563407675
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 156
Training loss: 0.026891657869023543 | Validation loss: 0.035947980639417614
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 157
Training loss: 0.025646816195707446 | Validation loss: 0.03072655048485087
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 158
Training loss: 0.024305647449003254 | Validation loss: 0.03094838262922996
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 159
Training loss: 0.0235144635589104 | Validation loss: 0.03033747435011695
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 160
Training loss: 0.02274166181863205 | Validation loss: 0.028855146320981788
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 161
Training loss: 0.022850188836418678 | Validation loss: 0.030827936905938966
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 162
Training loss: 0.023490403991056914 | Validation loss: 0.03056321562622237
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 163
Training loss: 0.024536157205634877 | Validation loss: 0.030726340179026656
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 164
Training loss: 0.025968177275233498 | Validation loss: 0.03182310807164264
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 165
Training loss: 0.02745464058156587 | Validation loss: 0.03240670689043745
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 166
Training loss: 0.02638791023944248 | Validation loss: 0.03207409606867396
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 167
Training loss: 0.025040658959004237 | Validation loss: 0.03346892037486608
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 168
Training loss: 0.023840111028586373 | Validation loss: 0.03250710043924308
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 169
Training loss: 0.022957657927676567 | Validation loss: 0.03261066463866592
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 170
Training loss: 0.02253573403536071 | Validation loss: 0.028775880929181534
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 171
Training loss: 0.022440487484990317 | Validation loss: 0.030062347749429466
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 172
Training loss: 0.022867488988315848 | Validation loss: 0.03976280561986223
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 173
Training loss: 0.02382546374629303 | Validation loss: 0.03142198853491418
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 174
Training loss: 0.025262574162004208 | Validation loss: 0.03314230741059358
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 175
Training loss: 0.02710123380965106 | Validation loss: 0.03711360927044818
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 176
Training loss: 0.025813494960220135 | Validation loss: 0.034733611777161076
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 177
Training loss: 0.0244471754999758 | Validation loss: 0.030967152215936017
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 178
Training loss: 0.023251139880598 | Validation loss: 0.03004695099275724
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 179
Training loss: 0.0224692894349344 | Validation loss: 0.03162622818542001
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 180
Training loss: 0.021959586049738068 | Validation loss: 0.0287618160874179
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
 0.02876182]
------------------------------
Epoch: 181
Training loss: 0.021884380696766723 | Validation loss: 0.03256968013216964
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
 0.02876182]
------------------------------
Epoch: 182
Training loss: 0.02237591364670281 | Validation loss: 0.03341920127416343
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
 0.02876182]
------------------------------
Epoch: 183
Training loss: 0.023713711671179204 | Validation loss: 0.029729280975210454
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
 0.02876182]
------------------------------
Epoch: 184
Training loss: 0.025079944019763194 | Validation loss: 0.03119673509051842
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
 0.02876182]
------------------------------
Epoch: 185
Training loss: 0.02635635260866559 | Validation loss: 0.03225741138170778
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
 0.02876182]
------------------------------
Epoch: 186
Training loss: 0.025441277809771085 | Validation loss: 0.03134904677394481
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
 0.02876182]
------------------------------
Epoch: 187
Training loss: 0.024010141896208616 | Validation loss: 0.031274090043013604
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
 0.02876182]
------------------------------
Epoch: 188
Training loss: 0.023025057314552264 | Validation loss: 0.03273896375193005
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
 0.02876182]
------------------------------
Epoch: 189
Training loss: 0.022124788933800255 | Validation loss: 0.03352637900517578
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
 0.02876182]
------------------------------
Epoch: 190
Training loss: 0.02186805220626984 | Validation loss: 0.029281317882767294
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568  0.03534358 0.03390282 0.03259299
 0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
 0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
 0.02876182 0.02928132]
Early stopping!
--------------------------------------------------------------------------------
Seed: 18
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.14745687956987755 | Validation loss: 0.11181736810017476
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 1
Training loss: 0.09623743342162352 | Validation loss: 0.08580441937773628
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 2
Training loss: 0.08750657575556964 | Validation loss: 0.08224849567740364
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 3
Training loss: 0.08265806757294991 | Validation loss: 0.07812433627195063
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 4
Training loss: 0.07909638743011618 | Validation loss: 0.10523036336608693
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 5
Training loss: 0.07519952726997728 | Validation loss: 0.07433472433646696
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 6
Training loss: 0.06963092798191145 | Validation loss: 0.0681168861621249
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 7
Training loss: 0.0647309234413487 | Validation loss: 0.061909659196977065
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 8
Training loss: 0.0603061709351339 | Validation loss: 0.06635272014985043
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 9
Training loss: 0.056970927845980006 | Validation loss: 0.053171325606846176
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 10
Training loss: 0.054451444431934067 | Validation loss: 0.050428496311064316
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 11
Training loss: 0.05530370387989353 | Validation loss: 0.05998936325179792
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 12
Training loss: 0.0562868577292497 | Validation loss: 0.08638219957330585
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 13
Training loss: 0.0572645927277049 | Validation loss: 0.06002994229506075
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 14
Training loss: 0.05791082459604588 | Validation loss: 0.07916613048420543
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 15
Training loss: 0.058337017147633154 | Validation loss: 0.06136379625021884
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 16
Training loss: 0.0556387303206395 | Validation loss: 0.05869993445488204
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 17
Training loss: 0.05294741382895727 | Validation loss: 0.05184250018369835
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 18
Training loss: 0.05023024163912894 | Validation loss: 0.05074060479162541
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 19
Training loss: 0.047944747648517215 | Validation loss: 0.04994288424804675
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 20
Training loss: 0.045913187161777315 | Validation loss: 0.04389677417621148
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677]
------------------------------
Epoch: 21
Training loss: 0.047116746218493724 | Validation loss: 0.04913257856943966
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677]
------------------------------
Epoch: 22
Training loss: 0.048278160490403614 | Validation loss: 0.04979463033707796
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677]
------------------------------
Epoch: 23
Training loss: 0.04970042983906006 | Validation loss: 0.05419276432191904
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677]
------------------------------
Epoch: 24
Training loss: 0.05063182357918677 | Validation loss: 0.05380100643146882
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677]
------------------------------
Epoch: 25
Training loss: 0.05186005225991757 | Validation loss: 0.05329234860178116
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677]
------------------------------
Epoch: 26
Training loss: 0.04985330287572436 | Validation loss: 0.054778525721182865
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677]
------------------------------
Epoch: 27
Training loss: 0.047700814294122804 | Validation loss: 0.04927220066959879
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677]
------------------------------
Epoch: 28
Training loss: 0.04578730350316275 | Validation loss: 0.04668996552318598
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677]
------------------------------
Epoch: 29
Training loss: 0.04332482842838494 | Validation loss: 0.04906922478261774
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677]
------------------------------
Epoch: 30
Training loss: 0.04196088215130873 | Validation loss: 0.04077261508302351
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262]
------------------------------
Epoch: 31
Training loss: 0.042725179914575744 | Validation loss: 0.04192748965810886
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262]
------------------------------
Epoch: 32
Training loss: 0.043890829696723324 | Validation loss: 0.0423654904506639
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262]
------------------------------
Epoch: 33
Training loss: 0.04479648191188499 | Validation loss: 0.045411624986909135
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262]
------------------------------
Epoch: 34
Training loss: 0.046417209704765885 | Validation loss: 0.04752046215982564
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262]
------------------------------
Epoch: 35
Training loss: 0.04789944863548194 | Validation loss: 0.04866404211626644
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262]
------------------------------
Epoch: 36
Training loss: 0.04611892869886686 | Validation loss: 0.049108236747901
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262]
------------------------------
Epoch: 37
Training loss: 0.044292686737186504 | Validation loss: 0.049626477540726154
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262]
------------------------------
Epoch: 38
Training loss: 0.042089813981043896 | Validation loss: 0.05028616577650594
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262]
------------------------------
Epoch: 39
Training loss: 0.040471516651336074 | Validation loss: 0.04539883885104044
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262]
------------------------------
Epoch: 40
Training loss: 0.038953275320772814 | Validation loss: 0.038260011332093086
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 41
Training loss: 0.039753880176700535 | Validation loss: 0.03953419919166945
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 42
Training loss: 0.04075378145267233 | Validation loss: 0.04775480268341777
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 43
Training loss: 0.04191550611716321 | Validation loss: 0.05315274405664047
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 44
Training loss: 0.04337615864333059 | Validation loss: 0.04678723331441922
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 45
Training loss: 0.0448561115373718 | Validation loss: 0.04518167654761171
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 46
Training loss: 0.04328001322276069 | Validation loss: 0.04366327006270928
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 47
Training loss: 0.04148955256348168 | Validation loss: 0.04564844665274156
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 48
Training loss: 0.03969575352251794 | Validation loss: 0.04270295615810736
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 49
Training loss: 0.03778248318328016 | Validation loss: 0.045216823161571426
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 50
Training loss: 0.036728900827020114 | Validation loss: 0.036400728900216325
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 51
Training loss: 0.037182638656318656 | Validation loss: 0.039920183492049705
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 52
Training loss: 0.03852524513958066 | Validation loss: 0.04021004818182076
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 53
Training loss: 0.039626261542045224 | Validation loss: 0.039501024509030105
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 54
Training loss: 0.04119911792644072 | Validation loss: 0.04457332704842618
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 55
Training loss: 0.042616045866968245 | Validation loss: 0.045998089163836124
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 56
Training loss: 0.04113453015039756 | Validation loss: 0.050729175304285196
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 57
Training loss: 0.03976550421266868 | Validation loss: 0.04352686356390472
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 58
Training loss: 0.03769710026296428 | Validation loss: 0.0416220211283823
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 59
Training loss: 0.03594528460933819 | Validation loss: 0.041781470377360824
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 60
Training loss: 0.034993118661507144 | Validation loss: 0.03542220634531922
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221]
------------------------------
Epoch: 61
Training loss: 0.03528238928649487 | Validation loss: 0.041782269765317966
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221]
------------------------------
Epoch: 62
Training loss: 0.036277946592565245 | Validation loss: 0.048255273973387955
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221]
------------------------------
Epoch: 63
Training loss: 0.037810007194570436 | Validation loss: 0.04529560640849899
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221]
------------------------------
Epoch: 64
Training loss: 0.039236139712349224 | Validation loss: 0.046266291455357475
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221]
------------------------------
Epoch: 65
Training loss: 0.040906800351318294 | Validation loss: 0.06313739067553419
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221]
------------------------------
Epoch: 66
Training loss: 0.03917124048407065 | Validation loss: 0.04793259138818336
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221]
------------------------------
Epoch: 67
Training loss: 0.037500780060205345 | Validation loss: 0.04579635729304457
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221]
------------------------------
Epoch: 68
Training loss: 0.0361412501221112 | Validation loss: 0.04605197491107789
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221]
------------------------------
Epoch: 69
Training loss: 0.03438679540280517 | Validation loss: 0.0456059823983011
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221]
------------------------------
Epoch: 70
Training loss: 0.033415974244314035 | Validation loss: 0.03462613776194311
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614]
------------------------------
Epoch: 71
Training loss: 0.03395316714611579 | Validation loss: 0.038469126250232216
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614]
------------------------------
Epoch: 72
Training loss: 0.03479183258353109 | Validation loss: 0.03594474876876426
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614]
------------------------------
Epoch: 73
Training loss: 0.03633535791401143 | Validation loss: 0.04774723141000862
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614]
------------------------------
Epoch: 74
Training loss: 0.0374829898621359 | Validation loss: 0.04101652198726625
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614]
------------------------------
Epoch: 75
Training loss: 0.0391945250370855 | Validation loss: 0.0401327273915563
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614]
------------------------------
Epoch: 76
Training loss: 0.03788902773430175 | Validation loss: 0.03995021490330717
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614]
------------------------------
Epoch: 77
Training loss: 0.03639131400176859 | Validation loss: 0.03722566730482916
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614]
------------------------------
Epoch: 78
Training loss: 0.0348304618996986 | Validation loss: 0.04006522394453002
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614]
------------------------------
Epoch: 79
Training loss: 0.03319978419834323 | Validation loss: 0.04412269613713817
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614]
------------------------------
Epoch: 80
Training loss: 0.03219999324468031 | Validation loss: 0.03391640984445019
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 81
Training loss: 0.032688427642244465 | Validation loss: 0.037923850531203555
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 82
Training loss: 0.03355343060890638 | Validation loss: 0.04907491460310674
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 83
Training loss: 0.03503366941531137 | Validation loss: 0.040808099231361285
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 84
Training loss: 0.036361888898023234 | Validation loss: 0.04570449341451172
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 85
Training loss: 0.037688679410159354 | Validation loss: 0.03851071792959639
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 86
Training loss: 0.036509045416225246 | Validation loss: 0.05178985225481797
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 87
Training loss: 0.03476176010243096 | Validation loss: 0.04965500327535963
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 88
Training loss: 0.033466790973274846 | Validation loss: 0.04419837694252487
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 89
Training loss: 0.03203875968768078 | Validation loss: 0.037252725571789574
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 90
Training loss: 0.0312534848193264 | Validation loss: 0.03305692382288718
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 91
Training loss: 0.031425257888072586 | Validation loss: 0.035670075104036164
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 92
Training loss: 0.032341907633189844 | Validation loss: 0.05314765029908281
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 93
Training loss: 0.03363316069544095 | Validation loss: 0.05042708106338978
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 94
Training loss: 0.03508961931472336 | Validation loss: 0.04901893657788766
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 95
Training loss: 0.036707183874146204 | Validation loss: 0.06835057373793252
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 96
Training loss: 0.0352418556319931 | Validation loss: 0.05205297728885064
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 97
Training loss: 0.03360738364271966 | Validation loss: 0.03979173940565206
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 98
Training loss: 0.032393003445426664 | Validation loss: 0.04202516406642652
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 99
Training loss: 0.030928450876729578 | Validation loss: 0.03461811653610352
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 100
Training loss: 0.030302010686503445 | Validation loss: 0.032624518037237954
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 101
Training loss: 0.030482625337610914 | Validation loss: 0.03657771112908304
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 102
Training loss: 0.031544560573877785 | Validation loss: 0.04379735982655424
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 103
Training loss: 0.03267700951289767 | Validation loss: 0.05044605840096431
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 104
Training loss: 0.033956850249151606 | Validation loss: 0.039384911364290565
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 105
Training loss: 0.035652619702228175 | Validation loss: 0.040526895561313205
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 106
Training loss: 0.0343283357183031 | Validation loss: 0.037462938774739746
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 107
Training loss: 0.0329232818115224 | Validation loss: 0.04072877669097048
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 108
Training loss: 0.03152846630963343 | Validation loss: 0.039494824257835875
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 109
Training loss: 0.030297402225574113 | Validation loss: 0.035708916085088145
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 110
Training loss: 0.02962929017907815 | Validation loss: 0.032125119894611094
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 111
Training loss: 0.029658222022855025 | Validation loss: 0.03462422362207312
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 112
Training loss: 0.030390083743992637 | Validation loss: 0.037167447627381944
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 113
Training loss: 0.03176091816716307 | Validation loss: 0.035366525972443344
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 114
Training loss: 0.0330082182680792 | Validation loss: 0.03707461589864925
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 115
Training loss: 0.03469608666642943 | Validation loss: 0.05098501968700274
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 116
Training loss: 0.033389072871664316 | Validation loss: 0.05167623942272853
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 117
Training loss: 0.0319534340987026 | Validation loss: 0.03803503140807152
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 118
Training loss: 0.03054207461651444 | Validation loss: 0.037278614163530614
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 119
Training loss: 0.029319434328642712 | Validation loss: 0.03323572590551545
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 120
Training loss: 0.028277252811762528 | Validation loss: 0.031856293105973606
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629]
------------------------------
Epoch: 121
Training loss: 0.028927232044396966 | Validation loss: 0.03381007744938926
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629]
------------------------------
Epoch: 122
Training loss: 0.029823541352488686 | Validation loss: 0.036562808286563483
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629]
------------------------------
Epoch: 123
Training loss: 0.030995890572410457 | Validation loss: 0.03735244338425387
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629]
------------------------------
Epoch: 124
Training loss: 0.03232222717754015 | Validation loss: 0.0393577231737101
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629]
------------------------------
Epoch: 125
Training loss: 0.033686123715024295 | Validation loss: 0.039191240626098835
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629]
------------------------------
Epoch: 126
Training loss: 0.0328642097200949 | Validation loss: 0.04119740207308689
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629]
------------------------------
Epoch: 127
Training loss: 0.03124009068563991 | Validation loss: 0.045967824147206494
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629]
------------------------------
Epoch: 128
Training loss: 0.029692287028833166 | Validation loss: 0.03530660837855751
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629]
------------------------------
Epoch: 129
Training loss: 0.028460362538380007 | Validation loss: 0.034659543915330306
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629]
------------------------------
Epoch: 130
Training loss: 0.02782241098296748 | Validation loss: 0.03151542620848766
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543]
------------------------------
Epoch: 131
Training loss: 0.028115404061203105 | Validation loss: 0.0364043337779235
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543]
------------------------------
Epoch: 132
Training loss: 0.028829176158563594 | Validation loss: 0.0347792377069065
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543]
------------------------------
Epoch: 133
Training loss: 0.030203928205727298 | Validation loss: 0.03419680051109959
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543]
------------------------------
Epoch: 134
Training loss: 0.03136087996760515 | Validation loss: 0.04011434100700163
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543]
------------------------------
Epoch: 135
Training loss: 0.033034575681429444 | Validation loss: 0.040510393481338974
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543]
------------------------------
Epoch: 136
Training loss: 0.03180332440336594 | Validation loss: 0.03888363813140751
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543]
------------------------------
Epoch: 137
Training loss: 0.030477852163667694 | Validation loss: 0.03862581931186461
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543]
------------------------------
Epoch: 138
Training loss: 0.0292505194876695 | Validation loss: 0.03288874167751158
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543]
------------------------------
Epoch: 139
Training loss: 0.027981118863727165 | Validation loss: 0.03388794129961623
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543]
------------------------------
Epoch: 140
Training loss: 0.027372297242386486 | Validation loss: 0.031212307529243748
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 141
Training loss: 0.0276586958958848 | Validation loss: 0.03246931614667441
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 142
Training loss: 0.028360963867808595 | Validation loss: 0.034810532453115536
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 143
Training loss: 0.029655160085393453 | Validation loss: 0.03974294517420034
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 144
Training loss: 0.030910991771168655 | Validation loss: 0.04399756938878414
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 145
Training loss: 0.0325270962652083 | Validation loss: 0.061640194658420785
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 146
Training loss: 0.03149095214683357 | Validation loss: 0.03760540243073375
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 147
Training loss: 0.029875266428039535 | Validation loss: 0.035697247552792584
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 148
Training loss: 0.028500897502783305 | Validation loss: 0.03381003710581402
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 149
Training loss: 0.027681679243045883 | Validation loss: 0.032980115103444695
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 150
Training loss: 0.026877311056509146 | Validation loss: 0.031143162226452765
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 151
Training loss: 0.026977618478832576 | Validation loss: 0.03266228110719044
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 152
Training loss: 0.027789666411818893 | Validation loss: 0.034961970028492204
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 153
Training loss: 0.02894545074526631 | Validation loss: 0.03532901065077929
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 154
Training loss: 0.030454562234464945 | Validation loss: 0.040225627527168364
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 155
Training loss: 0.03164946059806375 | Validation loss: 0.037975336828854234
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 156
Training loss: 0.03068030412000875 | Validation loss: 0.04220050962361614
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 157
Training loss: 0.029348198092527922 | Validation loss: 0.03511155945603299
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 158
Training loss: 0.028144483990769277 | Validation loss: 0.03515485296668732
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 159
Training loss: 0.027031416854432482 | Validation loss: 0.034209251535677276
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 160
Training loss: 0.026445907158373787 | Validation loss: 0.030806712303475467
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 161
Training loss: 0.026552394952801033 | Validation loss: 0.03321779652836576
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 162
Training loss: 0.027329204999108424 | Validation loss: 0.035172222956883166
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 163
Training loss: 0.028377699224813657 | Validation loss: 0.03415004107936294
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 164
Training loss: 0.02962424848998684 | Validation loss: 0.0380124523576382
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 165
Training loss: 0.031245342592386104 | Validation loss: 0.03870395737477636
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 166
Training loss: 0.03022429422295733 | Validation loss: 0.03497945312080921
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 167
Training loss: 0.02867190224232196 | Validation loss: 0.034943901974938615
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 168
Training loss: 0.02769064278008167 | Validation loss: 0.0328868226667421
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 169
Training loss: 0.026110646025255674 | Validation loss: 0.032032206835868084
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 170
Training loss: 0.025901495435964463 | Validation loss: 0.03086405956244047
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 171
Training loss: 0.025832738417959734 | Validation loss: 0.03191902321161686
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 172
Training loss: 0.02658675519811855 | Validation loss: 0.032392649120131956
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 173
Training loss: 0.02785930638549864 | Validation loss: 0.04055770080272866
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 174
Training loss: 0.029304441305676724 | Validation loss: 0.03938528715707032
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 175
Training loss: 0.030693826810126817 | Validation loss: 0.042542108787899525
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 176
Training loss: 0.02968439479967154 | Validation loss: 0.03487302249182114
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 177
Training loss: 0.028509433252010934 | Validation loss: 0.035466368128834046
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 178
Training loss: 0.027170329408786133 | Validation loss: 0.03241105497180097
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 179
Training loss: 0.026051353732871964 | Validation loss: 0.031891154834127
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 180
Training loss: 0.025475371672082546 | Validation loss: 0.030502695054541118
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027 ]
------------------------------
Epoch: 181
Training loss: 0.02571635264591644 | Validation loss: 0.03273352388852993
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027 ]
------------------------------
Epoch: 182
Training loss: 0.02636262705855761 | Validation loss: 0.03367737308968749
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027 ]
------------------------------
Epoch: 183
Training loss: 0.02711716690467392 | Validation loss: 0.03316663689416858
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027 ]
------------------------------
Epoch: 184
Training loss: 0.028923610236319737 | Validation loss: 0.03617072011451278
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027 ]
------------------------------
Epoch: 185
Training loss: 0.029695096961955915 | Validation loss: 0.03914791269771821
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027 ]
------------------------------
Epoch: 186
Training loss: 0.02894029467949629 | Validation loss: 0.033959697874312376
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027 ]
------------------------------
Epoch: 187
Training loss: 0.02806392555055392 | Validation loss: 0.032832851015295074
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027 ]
------------------------------
Epoch: 188
Training loss: 0.026699587195840348 | Validation loss: 0.03351602898663388
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027 ]
------------------------------
Epoch: 189
Training loss: 0.02575078903052105 | Validation loss: 0.032145418597599576
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027 ]
------------------------------
Epoch: 190
Training loss: 0.025277233614740293 | Validation loss: 0.030475220247377866
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027  0.03047522]
------------------------------
Epoch: 191
Training loss: 0.02529354150293351 | Validation loss: 0.031475040253944105
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027  0.03047522]
------------------------------
Epoch: 192
Training loss: 0.02586796419332024 | Validation loss: 0.03231935241811835
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027  0.03047522]
------------------------------
Epoch: 193
Training loss: 0.026974914326904503 | Validation loss: 0.03522281436068294
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027  0.03047522]
------------------------------
Epoch: 194
Training loss: 0.028522872728090294 | Validation loss: 0.03937122993896493
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027  0.03047522]
------------------------------
Epoch: 195
Training loss: 0.029662396922876224 | Validation loss: 0.04080926054940287
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027  0.03047522]
------------------------------
Epoch: 196
Training loss: 0.02874099444799624 | Validation loss: 0.03661016435815697
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027  0.03047522]
------------------------------
Epoch: 197
Training loss: 0.027630861122601144 | Validation loss: 0.03896229999321225
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027  0.03047522]
------------------------------
Epoch: 198
Training loss: 0.026198979643151515 | Validation loss: 0.03245780790900498
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027  0.03047522]
------------------------------
Epoch: 199
Training loss: 0.025587171060399806 | Validation loss: 0.0329771255332548
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027  0.03047522]
------------------------------
Epoch: 200
Training loss: 0.02489157276609399 | Validation loss: 0.030435980206965346
Validation loss (ends of cycles): [0.11181737 0.0504285  0.04389677 0.04077262 0.03826001 0.03640073
 0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
 0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
 0.0305027  0.03047522 0.03043598]
--------------------------------------------------------------------------------
Seed: 19
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.13013751572364662 | Validation loss: 0.10866411221502102
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 1
Training loss: 0.0962619811872123 | Validation loss: 0.08431469047425595
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 2
Training loss: 0.08808117919403502 | Validation loss: 0.08335764244594406
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 3
Training loss: 0.08320651522573583 | Validation loss: 0.07264648778446481
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 4
Training loss: 0.078821630328967 | Validation loss: 0.086400652452644
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 5
Training loss: 0.07551912005668081 | Validation loss: 0.07294148150666625
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 6
Training loss: 0.06994353534147789 | Validation loss: 0.07389080859061363
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 7
Training loss: 0.06581751633926815 | Validation loss: 0.05924121888799477
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 8
Training loss: 0.061769033043359324 | Validation loss: 0.05550177810730132
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 9
Training loss: 0.05849388224901411 | Validation loss: 0.05526111535398306
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 10
Training loss: 0.05599109683640477 | Validation loss: 0.05028300414240993
Validation loss (ends of cycles): [0.10866411 0.050283  ]
------------------------------
Epoch: 11
Training loss: 0.05651184237070792 | Validation loss: 0.05119481994317169
Validation loss (ends of cycles): [0.10866411 0.050283  ]
------------------------------
Epoch: 12
Training loss: 0.05796342328608799 | Validation loss: 0.05256845900970223
Validation loss (ends of cycles): [0.10866411 0.050283  ]
------------------------------
Epoch: 13
Training loss: 0.05864124310452227 | Validation loss: 0.05311432339053238
Validation loss (ends of cycles): [0.10866411 0.050283  ]
------------------------------
Epoch: 14
Training loss: 0.05971920895060216 | Validation loss: 0.053835257805422344
Validation loss (ends of cycles): [0.10866411 0.050283  ]
------------------------------
Epoch: 15
Training loss: 0.0600251086206564 | Validation loss: 0.05795030089804029
Validation loss (ends of cycles): [0.10866411 0.050283  ]
------------------------------
Epoch: 16
Training loss: 0.05733736497020041 | Validation loss: 0.05345632078175524
Validation loss (ends of cycles): [0.10866411 0.050283  ]
------------------------------
Epoch: 17
Training loss: 0.05464613109489712 | Validation loss: 0.05284101431942092
Validation loss (ends of cycles): [0.10866411 0.050283  ]
------------------------------
Epoch: 18
Training loss: 0.05230115667557505 | Validation loss: 0.04781449882857568
Validation loss (ends of cycles): [0.10866411 0.050283  ]
------------------------------
Epoch: 19
Training loss: 0.04979933290119012 | Validation loss: 0.04415781805868697
Validation loss (ends of cycles): [0.10866411 0.050283  ]
------------------------------
Epoch: 20
Training loss: 0.048008861920363674 | Validation loss: 0.04327868562257659
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869]
------------------------------
Epoch: 21
Training loss: 0.04882487160425017 | Validation loss: 0.043836381352317016
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869]
------------------------------
Epoch: 22
Training loss: 0.049844463726168305 | Validation loss: 0.044874199962787395
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869]
------------------------------
Epoch: 23
Training loss: 0.05121253936104183 | Validation loss: 0.04625493370458088
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869]
------------------------------
Epoch: 24
Training loss: 0.05227855082021982 | Validation loss: 0.06580113441543242
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869]
------------------------------
Epoch: 25
Training loss: 0.05329341503725512 | Validation loss: 0.04736212479461611
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869]
------------------------------
Epoch: 26
Training loss: 0.05157633041174657 | Validation loss: 0.04704422052059554
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869]
------------------------------
Epoch: 27
Training loss: 0.04926364007405937 | Validation loss: 0.05678830411186261
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869]
------------------------------
Epoch: 28
Training loss: 0.047098851822294646 | Validation loss: 0.043971751379755744
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869]
------------------------------
Epoch: 29
Training loss: 0.044588426964936825 | Validation loss: 0.041111332207786296
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869]
------------------------------
Epoch: 30
Training loss: 0.043297484392432244 | Validation loss: 0.04007098248508652
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098]
------------------------------
Epoch: 31
Training loss: 0.044140496467218154 | Validation loss: 0.041449001322673484
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098]
------------------------------
Epoch: 32
Training loss: 0.04528621606360094 | Validation loss: 0.043515847155214414
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098]
------------------------------
Epoch: 33
Training loss: 0.046532035345123623 | Validation loss: 0.04557813516100951
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098]
------------------------------
Epoch: 34
Training loss: 0.04797187640779014 | Validation loss: 0.04983178259129018
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098]
------------------------------
Epoch: 35
Training loss: 0.04931510559569194 | Validation loss: 0.047406337331090356
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098]
------------------------------
Epoch: 36
Training loss: 0.04766238858651986 | Validation loss: 0.045556718137411946
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098]
------------------------------
Epoch: 37
Training loss: 0.045729071468364184 | Validation loss: 0.043208298611298074
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098]
------------------------------
Epoch: 38
Training loss: 0.04352147483237557 | Validation loss: 0.04185216669487742
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098]
------------------------------
Epoch: 39
Training loss: 0.041625120196726144 | Validation loss: 0.03896412177555329
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098]
------------------------------
Epoch: 40
Training loss: 0.04038160263494713 | Validation loss: 0.03796320309681175
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 41
Training loss: 0.04075280056100368 | Validation loss: 0.03851696993924875
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 42
Training loss: 0.04215318926217724 | Validation loss: 0.03938400522863443
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 43
Training loss: 0.04349201472703455 | Validation loss: 0.04077490941679056
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 44
Training loss: 0.04480456488250982 | Validation loss: 0.04196629445769091
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 45
Training loss: 0.046417191071897804 | Validation loss: 0.04734774543780141
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 46
Training loss: 0.044565415281186425 | Validation loss: 0.042343570760129824
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 47
Training loss: 0.04292275310736003 | Validation loss: 0.046334737252477
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 48
Training loss: 0.04080903969382442 | Validation loss: 0.039976048242069975
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 49
Training loss: 0.038945722352442015 | Validation loss: 0.03901533678635559
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 50
Training loss: 0.03775392111123165 | Validation loss: 0.03635692800832006
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693]
------------------------------
Epoch: 51
Training loss: 0.03847538466898974 | Validation loss: 0.037682123169039204
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693]
------------------------------
Epoch: 52
Training loss: 0.03952817849660894 | Validation loss: 0.038835352138344166
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693]
------------------------------
Epoch: 53
Training loss: 0.040770487166853164 | Validation loss: 0.04136987426112183
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693]
------------------------------
Epoch: 54
Training loss: 0.04249348099743141 | Validation loss: 0.04505128795857978
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693]
------------------------------
Epoch: 55
Training loss: 0.04351341669029725 | Validation loss: 0.054758314850979144
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693]
------------------------------
Epoch: 56
Training loss: 0.04219748325440593 | Validation loss: 0.044231936352047245
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693]
------------------------------
Epoch: 57
Training loss: 0.040449189506177825 | Validation loss: 0.04580309190911002
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693]
------------------------------
Epoch: 58
Training loss: 0.03866173452184367 | Validation loss: 0.0378917996994162
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693]
------------------------------
Epoch: 59
Training loss: 0.037179108895178094 | Validation loss: 0.03695562399462261
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693]
------------------------------
Epoch: 60
Training loss: 0.035941219553131405 | Validation loss: 0.034984574340020134
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457]
------------------------------
Epoch: 61
Training loss: 0.03639172680041657 | Validation loss: 0.03606705074157335
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457]
------------------------------
Epoch: 62
Training loss: 0.0375638076628551 | Validation loss: 0.03725699788635283
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457]
------------------------------
Epoch: 63
Training loss: 0.039039047017847515 | Validation loss: 0.038210892637746526
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457]
------------------------------
Epoch: 64
Training loss: 0.04020949403707701 | Validation loss: 0.04285801162498187
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457]
------------------------------
Epoch: 65
Training loss: 0.041740124291689024 | Validation loss: 0.041410352474292825
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457]
------------------------------
Epoch: 66
Training loss: 0.040310035649445054 | Validation loss: 0.05140106689877215
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457]
------------------------------
Epoch: 67
Training loss: 0.03865856288173273 | Validation loss: 0.04588074636010997
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457]
------------------------------
Epoch: 68
Training loss: 0.036848236220030806 | Validation loss: 0.039373264755163576
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457]
------------------------------
Epoch: 69
Training loss: 0.03542020992562908 | Validation loss: 0.036005012377832844
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457]
------------------------------
Epoch: 70
Training loss: 0.034385565286098915 | Validation loss: 0.03407285567702709
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286]
------------------------------
Epoch: 71
Training loss: 0.034998325296442985 | Validation loss: 0.03499888496852554
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286]
------------------------------
Epoch: 72
Training loss: 0.03608989844824679 | Validation loss: 0.037975290641847965
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286]
------------------------------
Epoch: 73
Training loss: 0.037150994481291534 | Validation loss: 0.03802902005876588
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286]
------------------------------
Epoch: 74
Training loss: 0.03874208623778046 | Validation loss: 0.041800051696796334
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286]
------------------------------
Epoch: 75
Training loss: 0.04030366619135891 | Validation loss: 0.04402384821292574
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286]
------------------------------
Epoch: 76
Training loss: 0.03897080734265807 | Validation loss: 0.038405700957616876
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286]
------------------------------
Epoch: 77
Training loss: 0.03700368836206773 | Validation loss: 0.03706241132839855
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286]
------------------------------
Epoch: 78
Training loss: 0.03561326385912023 | Validation loss: 0.03773653503935949
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286]
------------------------------
Epoch: 79
Training loss: 0.03413904608890971 | Validation loss: 0.034595715953449235
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286]
------------------------------
Epoch: 80
Training loss: 0.033077710618880964 | Validation loss: 0.03346761355621625
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 81
Training loss: 0.03339501076990255 | Validation loss: 0.03464310675595714
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 82
Training loss: 0.03449362341413553 | Validation loss: 0.03528704326105329
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 83
Training loss: 0.03574517990213152 | Validation loss: 0.03956749140227263
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 84
Training loss: 0.03712140995547117 | Validation loss: 0.04075488192647432
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 85
Training loss: 0.03851752373111236 | Validation loss: 0.04053122321127263
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 86
Training loss: 0.037289504063550354 | Validation loss: 0.038621348683285504
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 87
Training loss: 0.03606580819487278 | Validation loss: 0.03755889262641426
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 88
Training loss: 0.03403875287750545 | Validation loss: 0.03664958490207132
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 89
Training loss: 0.032725286617779764 | Validation loss: 0.03518285962497502
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 90
Training loss: 0.03179220572630985 | Validation loss: 0.03308977194097454
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 91
Training loss: 0.03204954804313611 | Validation loss: 0.03412141737453969
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 92
Training loss: 0.033113976240187415 | Validation loss: 0.03650290599768668
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 93
Training loss: 0.03438642540282944 | Validation loss: 0.03640461193842698
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 94
Training loss: 0.035784698719356765 | Validation loss: 0.03660603991783826
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 95
Training loss: 0.03738688092631119 | Validation loss: 0.04603489715836744
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 96
Training loss: 0.0360823278718694 | Validation loss: 0.03907236244232781
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 97
Training loss: 0.03440698718571463 | Validation loss: 0.03614757999579991
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 98
Training loss: 0.032923828401624805 | Validation loss: 0.03688841782905887
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 99
Training loss: 0.03173442275582365 | Validation loss: 0.034847187158544506
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 100
Training loss: 0.030615978687268307 | Validation loss: 0.03256259926190946
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 101
Training loss: 0.030597679183165742 | Validation loss: 0.03352231219971338
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 102
Training loss: 0.03192661127614547 | Validation loss: 0.034362965502438295
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 103
Training loss: 0.03307850291681161 | Validation loss: 0.035471899154703175
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 104
Training loss: 0.03460319688741675 | Validation loss: 0.04395314779099638
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 105
Training loss: 0.03603993922812996 | Validation loss: 0.03989762382631281
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 106
Training loss: 0.03485340225568965 | Validation loss: 0.03633215443222924
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 107
Training loss: 0.03364605744420661 | Validation loss: 0.03456024982167029
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 108
Training loss: 0.03178448307852253 | Validation loss: 0.03427424061779691
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 109
Training loss: 0.03050562876870665 | Validation loss: 0.03418495161541268
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 110
Training loss: 0.029790386914158255 | Validation loss: 0.03241037907886558
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038]
------------------------------
Epoch: 111
Training loss: 0.03013055137904933 | Validation loss: 0.03372434017339111
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038]
------------------------------
Epoch: 112
Training loss: 0.030869591665842872 | Validation loss: 0.03451865892527641
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038]
------------------------------
Epoch: 113
Training loss: 0.032268728440312124 | Validation loss: 0.036281384115593625
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038]
------------------------------
Epoch: 114
Training loss: 0.03397129182373797 | Validation loss: 0.037896803192860255
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038]
------------------------------
Epoch: 115
Training loss: 0.03532382988418621 | Validation loss: 0.04804187614174016
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038]
------------------------------
Epoch: 116
Training loss: 0.03400072505730608 | Validation loss: 0.03747494302821898
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038]
------------------------------
Epoch: 117
Training loss: 0.032401689846163956 | Validation loss: 0.03554237289436623
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038]
------------------------------
Epoch: 118
Training loss: 0.03092662235776945 | Validation loss: 0.03437768438814488
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038]
------------------------------
Epoch: 119
Training loss: 0.029526078819634583 | Validation loss: 0.03346088269840827
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038]
------------------------------
Epoch: 120
Training loss: 0.028847131215395245 | Validation loss: 0.03171695443282349
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695]
------------------------------
Epoch: 121
Training loss: 0.029099270120248433 | Validation loss: 0.03273795564057289
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695]
------------------------------
Epoch: 122
Training loss: 0.03011367364243905 | Validation loss: 0.033902542846920215
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695]
------------------------------
Epoch: 123
Training loss: 0.03149592533238291 | Validation loss: 0.04488189828343096
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695]
------------------------------
Epoch: 124
Training loss: 0.03292691715219329 | Validation loss: 0.03982129518305306
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695]
------------------------------
Epoch: 125
Training loss: 0.03405568103294995 | Validation loss: 0.049960068979226384
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695]
------------------------------
Epoch: 126
Training loss: 0.0330583095444205 | Validation loss: 0.03734910000214534
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695]
------------------------------
Epoch: 127
Training loss: 0.03171656856324348 | Validation loss: 0.04288117178773458
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695]
------------------------------
Epoch: 128
Training loss: 0.03015985843582504 | Validation loss: 0.03478364519511176
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695]
------------------------------
Epoch: 129
Training loss: 0.02886176664715882 | Validation loss: 0.033183983189210425
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695]
------------------------------
Epoch: 130
Training loss: 0.02814806290759199 | Validation loss: 0.03104466441359404
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695 0.03104466]
------------------------------
Epoch: 131
Training loss: 0.028255866756149398 | Validation loss: 0.0327843960325143
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695 0.03104466]
------------------------------
Epoch: 132
Training loss: 0.02914011667071893 | Validation loss: 0.033506754794017934
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695 0.03104466]
------------------------------
Epoch: 133
Training loss: 0.030517671123425676 | Validation loss: 0.034617729789981275
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695 0.03104466]
------------------------------
Epoch: 134
Training loss: 0.03195116536941115 | Validation loss: 0.03802656727653425
Validation loss (ends of cycles): [0.10866411 0.050283   0.04327869 0.04007098 0.0379632  0.03635693
 0.03498457 0.03407286 0.03346761 0.03308977 0.0325626  0.03241038
 0.03171695 0.03104466]
------------------------------
Epoch: 135

Evaluate on all

# Replace following Paths with yours
src_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/all/models')
seeds = range(20)
learners = Learners(Model, tax_lookup, seeds=seeds, device=device)
perfs_global_all, y_hats_all, y_trues_all, ns_all = learners.evaluate((X, y, depth_order[:, -1]),
                                                                      src_dir_model=src_dir_model)
print(f'# of test samples: {ns_all.mean().item()}')
# of test samples: 4032.0
# Save spectific seed y_hat, y_true to plot "Observed vs. predicted" scatterplots
# Replace following Paths with yours
dest_dir_predicted = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/')
seed = 1
with open(dest_dir_predicted/f'predicted-true-cnn-seed-{seed}.pickle', 'wb') as f: 
    pickle.dump((y_hats_all[seed].to_numpy(), y_trues_all[seed].to_numpy()), f)
perfs_global_all.describe()
rpd rpiq r2 lccc rmse mse mae mape bias stb
count 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000
mean 2.184830 2.992808 0.790243 0.884580 0.595624 0.377839 0.237318 30.946025 0.004749 0.009024
std 0.041494 0.067646 0.007929 0.004986 0.155836 0.259428 0.012061 0.932127 0.012705 0.024443
min 2.114635 2.889447 0.776315 0.874575 0.459073 0.210748 0.221478 29.163000 -0.012504 -0.024802
25% 2.159791 2.930840 0.785559 0.881369 0.504516 0.254547 0.225294 30.303971 -0.005159 -0.010186
50% 2.183314 3.015281 0.790165 0.884643 0.571964 0.327175 0.236342 30.827668 0.004057 0.007958
75% 2.205628 3.028623 0.794389 0.887016 0.617017 0.380868 0.246811 31.663001 0.016258 0.031279
max 2.262492 3.141478 0.804596 0.895479 1.197708 1.434504 0.260846 32.642108 0.032962 0.062948

Evaluate on Soil Tax. Orders

# Replace following Paths with yours
src_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/all/models')
seeds = range(20)

for k, v in tax_lookup.items():
    print(80*'-')
    print(f'Test metrics on {k}')
    print(80*'-')
    learners = Learners(Model, tax_lookup, seeds=seeds, device=device)
    perfs_global, _, _, ns = learners.evaluate((X, y, depth_order[:, -1]),
                                               order=v,
                                               src_dir_model=src_dir_model)

    print(f'# of test samples: {ns.mean().item()}')
    print(perfs_global.describe())
--------------------------------------------------------------------------------
Test metrics on alfisols
--------------------------------------------------------------------------------
# of test samples: 422.4
             rpd       rpiq         r2       lccc       rmse        mse  \
count  20.000000  20.000000  20.000000  20.000000  20.000000  20.000000   
mean    1.809327   2.462286   0.691663   0.822509   0.382106   0.174798   
std     0.087705   0.182733   0.030946   0.018080   0.174094   0.157874   
min     1.602639   2.154843   0.609675   0.773057   0.150495   0.022649   
25%     1.752131   2.328294   0.673422   0.810919   0.233184   0.054400   
50%     1.814577   2.449380   0.695557   0.823634   0.371884   0.138515   
75%     1.875471   2.573424   0.715007   0.835819   0.459433   0.211082   
max     1.949812   2.768350   0.736324   0.847946   0.794326   0.630953   

             mae       mape       bias        stb  
count  20.000000  20.000000  20.000000  20.000000  
mean    0.134967  27.355655   0.003228   0.008986  
std     0.020114   1.268820   0.011029   0.028718  
min     0.101224  24.950266  -0.013376  -0.032392  
25%     0.122148  26.654700  -0.005885  -0.015329  
50%     0.130240  27.405189   0.004066   0.010273  
75%     0.149955  27.915920   0.012193   0.033151  
max     0.168116  29.876232   0.020493   0.053057  
--------------------------------------------------------------------------------
Test metrics on mollisols
--------------------------------------------------------------------------------
# of test samples: 977.6
             rpd       rpiq         r2       lccc       rmse        mse  \
count  20.000000  20.000000  20.000000  20.000000  20.000000  20.000000   
mean    2.082906   2.750063   0.767633   0.868226   0.441974   0.200178   
std     0.105370   0.152068   0.022648   0.014038   0.071353   0.074478   
min     1.910614   2.503716   0.725773   0.839147   0.351621   0.123637   
25%     2.016213   2.645241   0.753751   0.858901   0.398954   0.159184   
50%     2.073633   2.727649   0.767201   0.867132   0.434792   0.189089   
75%     2.158195   2.803545   0.785071   0.877551   0.462096   0.213538   
max     2.297545   3.113704   0.810361   0.892300   0.693749   0.481287   

             mae       mape       bias        stb  
count  20.000000  20.000000  20.000000  20.000000  
mean    0.217482  27.362061   0.006410   0.014927  
std     0.011783   1.171648   0.014516   0.034156  
min     0.198063  25.863001  -0.017837  -0.042497  
25%     0.208970  26.442070  -0.002144  -0.005110  
50%     0.217809  27.306423   0.006673   0.016067  
75%     0.226259  27.765553   0.015423   0.036009  
max     0.238283  30.162075   0.034060   0.082286  
--------------------------------------------------------------------------------
Test metrics on inceptisols
--------------------------------------------------------------------------------
# of test samples: 289.6
             rpd       rpiq         r2       lccc       rmse        mse  \
count  20.000000  20.000000  20.000000  20.000000  20.000000  20.000000   
mean    1.909866   2.651945   0.722069   0.838262   0.395929   0.161811   
std     0.112304   0.174091   0.033546   0.022861   0.072917   0.060037   
min     1.644649   2.362685   0.628917   0.773094   0.297366   0.088427   
25%     1.843177   2.472268   0.704578   0.825389   0.341724   0.116911   
50%     1.907174   2.692387   0.724081   0.840439   0.403799   0.163472   
75%     1.972514   2.807575   0.742076   0.852019   0.446248   0.199140   
max     2.168335   2.898377   0.786479   0.878068   0.566843   0.321311   

             mae       mape       bias        stb  
count  20.000000  20.000000  20.000000  20.000000  
mean    0.188652  35.004717  -0.003812  -0.007661  
std     0.019507   3.099745   0.016768   0.033159  
min     0.167560  31.472638  -0.023707  -0.052528  
25%     0.173922  33.050188  -0.018816  -0.037482  
50%     0.185284  34.055609  -0.007071  -0.014775  
75%     0.193686  36.080701   0.008284   0.017426  
max     0.240127  43.579751   0.026359   0.048525  
--------------------------------------------------------------------------------
Test metrics on entisols
--------------------------------------------------------------------------------
# of test samples: 164.8
             rpd       rpiq         r2       lccc       rmse        mse  \
count  20.000000  20.000000  20.000000  20.000000  20.000000  20.000000   
mean    2.146993   3.000346   0.772961   0.875231   0.323115   0.107320   
std     0.250396   0.376957   0.053555   0.032886   0.055413   0.036646   
min     1.703938   2.254587   0.653383   0.784883   0.225837   0.051002   
25%     2.015869   2.812905   0.752028   0.869245   0.281971   0.079520   
50%     2.098403   3.022831   0.771459   0.877648   0.317529   0.100827   
75%     2.324253   3.244836   0.813595   0.896911   0.360752   0.130172   
max     2.658273   3.657772   0.857475   0.920406   0.431522   0.186212   

             mae       mape       bias        stb  
count  20.000000  20.000000  20.000000  20.000000  
mean    0.164777  30.832117   0.004347   0.008009  
std     0.019656   3.450412   0.020474   0.042995  
min     0.122440  24.067251  -0.025000  -0.061117  
25%     0.151904  27.745744  -0.010036  -0.018291  
50%     0.168026  31.689824   0.003999   0.007015  
75%     0.175205  32.814956   0.017534   0.035175  
max     0.199315  37.243327   0.037462   0.079188  
--------------------------------------------------------------------------------
Test metrics on spodosols
--------------------------------------------------------------------------------
# of test samples: 64.0
             rpd       rpiq         r2       lccc       rmse        mse  \
count  20.000000  20.000000  20.000000  20.000000  20.000000  20.000000   
mean    2.181398   3.140114   0.779113   0.880473   0.412119   0.180965   
std     0.226818   0.688425   0.046961   0.025426   0.108204   0.098489   
min     1.776501   2.128022   0.677271   0.820691   0.268197   0.071930   
25%     2.026443   2.670819   0.752034   0.863932   0.326987   0.106931   
50%     2.153188   2.983713   0.780577   0.884321   0.386405   0.149414   
75%     2.367428   3.278641   0.818189   0.898756   0.483470   0.233907   
max     2.553907   4.704971   0.843790   0.914914   0.681660   0.464660   

             mae       mape       bias        stb  
count  20.000000  20.000000  20.000000  20.000000  
mean    0.223380  37.213633  -0.008654  -0.013208  
std     0.050800   4.705197   0.028514   0.043828  
min     0.148266  31.124797  -0.073955  -0.085358  
25%     0.184255  32.850098  -0.031603  -0.049806  
50%     0.215128  37.193228  -0.010030  -0.018675  
75%     0.268383  40.618369   0.015755   0.023395  
max     0.314407  45.255888   0.037978   0.053857  
--------------------------------------------------------------------------------
Test metrics on undefined
--------------------------------------------------------------------------------
# of test samples: 1553.6
             rpd       rpiq         r2       lccc       rmse        mse  \
count  20.000000  20.000000  20.000000  20.000000  20.000000  20.000000   
mean    2.292085   3.140178   0.809147   0.896603   0.756287   0.646963   
std     0.061811   0.126350   0.010024   0.006282   0.280963   0.667752   
min     2.200176   2.961841   0.793285   0.887092   0.533183   0.284284   
25%     2.250938   3.058737   0.802503   0.892838   0.609880   0.372009   
50%     2.294312   3.141787   0.809902   0.896849   0.712569   0.507759   
75%     2.315500   3.190854   0.813364   0.899591   0.796151   0.633870   
max     2.437377   3.432564   0.831563   0.912160   1.841250   3.390202   

             mae       mape       bias        stb  
count  20.000000  20.000000  20.000000  20.000000  
mean    0.292059  31.645606   0.004302   0.007476  
std     0.023362   1.247020   0.014321   0.025455  
min     0.258135  29.001465  -0.016584  -0.031293  
25%     0.272517  30.930875  -0.006044  -0.011075  
50%     0.289404  31.708111   0.001588   0.002819  
75%     0.306450  32.771565   0.014035   0.025830  
max     0.339195  33.584201   0.036192   0.061086  
--------------------------------------------------------------------------------
Test metrics on ultisols
--------------------------------------------------------------------------------
# of test samples: 192.0
             rpd       rpiq         r2       lccc       rmse        mse  \
count  20.000000  20.000000  20.000000  20.000000  20.000000  20.000000   
mean    1.632526   2.265209   0.617841   0.763237   0.260545   0.071859   
std     0.108925   0.250811   0.051179   0.038303   0.064686   0.036546   
min     1.428423   1.788526   0.507145   0.672963   0.169072   0.028586   
25%     1.568656   2.078099   0.591292   0.742977   0.216448   0.046878   
50%     1.622599   2.291745   0.617950   0.762776   0.246636   0.060842   
75%     1.695520   2.441266   0.650373   0.784582   0.298181   0.089221   
max     1.858865   2.726562   0.708979   0.823643   0.394320   0.155488   

             mae       mape       bias        stb  
count  20.000000  20.000000  20.000000  20.000000  
mean    0.135663  32.535614   0.017157   0.040477  
std     0.020282   2.442567   0.013812   0.032344  
min     0.107650  28.119987  -0.010331  -0.025534  
25%     0.121366  31.080629   0.011006   0.025485  
50%     0.136634  32.268478   0.015768   0.035893  
75%     0.147672  33.884998   0.027038   0.058695  
max     0.179378  38.015869   0.047486   0.105611  
--------------------------------------------------------------------------------
Test metrics on andisols
--------------------------------------------------------------------------------
# of test samples: 132.8
             rpd       rpiq         r2       lccc       rmse        mse  \
count  20.000000  20.000000  20.000000  20.000000  20.000000  20.000000   
mean    1.982082   2.557273   0.738402   0.856554   0.476687   0.248451   
std     0.162387   0.330953   0.041831   0.025708   0.149459   0.166614   
min     1.738631   1.923586   0.666517   0.798802   0.276538   0.076474   
25%     1.859672   2.319364   0.708526   0.839169   0.382923   0.146634   
50%     1.946365   2.628500   0.733634   0.856545   0.454000   0.206157   
75%     2.118545   2.801427   0.775115   0.878573   0.557137   0.310493   
max     2.280665   3.039670   0.806156   0.894918   0.833418   0.694586   

             mae       mape       bias        stb  
count  20.000000  20.000000  20.000000  20.000000  
mean    0.229848  32.559297   0.015201   0.033912  
std     0.026493   3.362845   0.023263   0.050849  
min     0.175337  27.092579  -0.025021  -0.052350  
25%     0.206640  30.636463  -0.003132  -0.006866  
50%     0.234442  33.061846   0.017000   0.037901  
75%     0.248909  35.253529   0.023922   0.056046  
max     0.262889  38.183209   0.064641   0.131825  
--------------------------------------------------------------------------------
Test metrics on histosols
--------------------------------------------------------------------------------
# of test samples: 80.0
             rpd       rpiq         r2       lccc       rmse        mse  \
count  20.000000  20.000000  20.000000  20.000000  20.000000  20.000000   
mean    2.093646   3.055053   0.758036   0.870220   0.866301   0.819420   
std     0.274214   0.770695   0.056379   0.028888   0.269391   0.556293   
min     1.656537   2.138313   0.630659   0.818658   0.570947   0.325981   
25%     1.920327   2.384933   0.725072   0.850471   0.698688   0.488250   
50%     2.058352   2.852104   0.760004   0.867652   0.787495   0.621255   
75%     2.199841   3.507436   0.790265   0.882838   0.946578   0.896546   
max     2.885291   4.842908   0.877941   0.932256   1.453742   2.113365   

             mae       mape       bias        stb  
count  20.000000  20.000000  20.000000  20.000000  
mean    0.449648  45.448765   0.024104   0.034142  
std     0.118631   7.463257   0.034970   0.048042  
min     0.294663  34.994602  -0.071150  -0.081119  
25%     0.362897  39.577908   0.011031   0.011864  
50%     0.415026  44.506969   0.034138   0.050279  
75%     0.547573  51.371342   0.046856   0.066266  
max     0.660869  57.288700   0.067678   0.099684  
--------------------------------------------------------------------------------
Test metrics on oxisols
--------------------------------------------------------------------------------
# of test samples: 32.0
            rpd      rpiq          r2      lccc      rmse       mse       mae  \
count  2.000000  8.000000    2.000000  2.000000  8.000000  8.000000  8.000000   
mean   0.118367  0.026377 -134.414165  0.030174  0.087786  0.013825  0.083611   
std    0.046499  0.051292   98.771373  0.054914  0.083623  0.022839  0.081877   
min    0.085487  0.000000 -204.256073 -0.008656  0.009060  0.000082  0.009060   
25%    0.101927  0.000000 -169.335119  0.010759  0.025124  0.000655  0.025124   
50%    0.118367  0.000000 -134.414165  0.030174  0.067840  0.004688  0.065089   
75%    0.134807  0.019050  -99.493211  0.049589  0.118719  0.014272  0.111745   
max    0.151247  0.134818  -64.572257  0.069005  0.260096  0.067650  0.260096   

             mape      bias        stb  
count    8.000000  8.000000   8.000000  
mean    65.191675 -0.177251        NaN  
std     60.839932  0.164703        NaN  
min      7.445914 -0.467451       -inf  
25%     20.544780 -0.258737        NaN  
50%     52.729677 -0.180752        NaN  
75%     88.397117 -0.074745 -10.384091  
max    193.393838  0.064107        inf  
--------------------------------------------------------------------------------
Test metrics on vertisols
--------------------------------------------------------------------------------
# of test samples: 94.4
             rpd       rpiq         r2       lccc       rmse        mse  \
count  20.000000  20.000000  20.000000  20.000000  20.000000  20.000000   
mean    2.039982   2.815862   0.744782   0.856862   0.273233   0.078001   
std     0.271953   0.511288   0.063033   0.036968   0.059333   0.036126   
min     1.577847   2.128196   0.592591   0.768764   0.182002   0.033125   
25%     1.851439   2.431125   0.703943   0.837489   0.232396   0.054013   
50%     1.992413   2.714564   0.744894   0.855618   0.253052   0.064036   
75%     2.178701   3.148894   0.786316   0.881447   0.303549   0.092144   
max     2.713312   4.022168   0.862141   0.923391   0.427089   0.182405   

             mae       mape       bias        stb  
count  20.000000  20.000000  20.000000  20.000000  
mean    0.173488  26.855655  -0.001182  -0.003823  
std     0.021406   3.272257   0.020647   0.055384  
min     0.138288  21.111543  -0.052648  -0.154459  
25%     0.157317  24.537949  -0.011511  -0.028144  
50%     0.173311  26.192354  -0.001295  -0.003575  
75%     0.188509  28.037003   0.009025   0.019823  
max     0.208709  33.903366   0.040271   0.099009  
--------------------------------------------------------------------------------
Test metrics on aridisols
--------------------------------------------------------------------------------
# of test samples: 163.2
             rpd       rpiq         r2       lccc       rmse        mse  \
count  20.000000  20.000000  20.000000  20.000000  20.000000  20.000000   
mean    1.814492   2.405396   0.691015   0.821971   0.662013   0.489155   
std     0.109405   0.241063   0.037256   0.022591   0.231457   0.352484   
min     1.613652   1.941601   0.613362   0.776206   0.373065   0.139178   
25%     1.750923   2.250058   0.671518   0.809034   0.485274   0.235720   
50%     1.814514   2.415395   0.694165   0.819871   0.598650   0.359678   
75%     1.885174   2.577828   0.716658   0.838137   0.773024   0.597940   
max     2.036902   2.856824   0.757279   0.864022   1.208685   1.460919   

             mae       mape       bias        stb  
count  20.000000  20.000000  20.000000  20.000000  
mean    0.300450  35.089144   0.008393   0.015292  
std     0.059640   3.065990   0.021957   0.044950  
min     0.205739  28.651589  -0.029821  -0.072086  
25%     0.256783  33.024969  -0.006753  -0.014939  
50%     0.295750  34.871383   0.012934   0.028759  
75%     0.330856  37.073732   0.020540   0.042719  
max     0.408710  41.000089   0.048986   0.085159  
--------------------------------------------------------------------------------
Test metrics on gelisols
--------------------------------------------------------------------------------
# of test samples: 60.8
             rpd       rpiq         r2       lccc       rmse        mse  \
count  20.000000  20.000000  20.000000  20.000000  20.000000  20.000000   
mean    2.094545   3.140563   0.744767   0.860555   0.588931   0.365489   
std     0.397299   1.088457   0.082866   0.047535   0.140113   0.173223   
min     1.563934   1.337414   0.579469   0.746647   0.378497   0.143260   
25%     1.827473   2.396965   0.692855   0.838851   0.495282   0.245315   
50%     2.025772   2.948529   0.749510   0.854638   0.568492   0.323183   
75%     2.235309   3.706015   0.795246   0.890133   0.710962   0.505690   
max     3.026351   5.757985   0.888340   0.943058   0.834707   0.696736   

             mae       mape       bias        stb  
count  20.000000  20.000000  20.000000  20.000000  
mean    0.308468  47.301260  -0.041032  -0.068766  
std     0.062994   9.997578   0.036220   0.072472  
min     0.194099  31.988180  -0.094267  -0.236790  
25%     0.259407  38.336892  -0.074615  -0.102293  
50%     0.303302  50.013152  -0.037441  -0.057775  
75%     0.363768  55.542475  -0.021546  -0.025043  
max     0.404708  60.821646   0.054468   0.089767  

Evaluate on Mollisols

# Replace following Paths with yours
src_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/all/models')
seeds = range(20)
order = 1
learners = Learners(Model, tax_lookup, seeds=seeds, device=device)
perfs_global_mollisols, _, _, _ = learners.evaluate((X, y, depth_order[:, -1]),
                                                    order=order,
                                                    src_dir_model=src_dir_model)

perfs_global_mollisols.describe()
rpd rpiq r2 lccc rmse mse mae mape bias stb
count 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000
mean 2.082906 2.750063 0.767633 0.868226 0.441974 0.200178 0.217482 27.362061 0.006410 0.014927
std 0.105370 0.152068 0.022648 0.014038 0.071353 0.074478 0.011783 1.171648 0.014516 0.034156
min 1.910614 2.503716 0.725773 0.839147 0.351621 0.123637 0.198063 25.863001 -0.017837 -0.042497
25% 2.016213 2.645241 0.753751 0.858901 0.398954 0.159184 0.208970 26.442070 -0.002144 -0.005110
50% 2.073633 2.727649 0.767201 0.867132 0.434792 0.189089 0.217809 27.306423 0.006673 0.016067
75% 2.158195 2.803545 0.785071 0.877551 0.462096 0.213538 0.226259 27.765553 0.015423 0.036009
max 2.297545 3.113704 0.810361 0.892300 0.693749 0.481287 0.238283 30.162075 0.034060 0.082286

Evaluate on Gelisols

# Replace following Paths with yours
src_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/all/models')
seeds = range(20)
order = 12
learners = Learners(Model, tax_lookup, seeds=seeds, device=device)
perfs_global_gelisols, _, _, _ = learners.evaluate((X, y, depth_order[:, -1]),
                                                   order = order,
                                                   src_dir_model=src_dir_model)

perfs_global_gelisols.describe()
rpd rpiq r2 lccc rmse mse mae mape bias stb
count 18.000000 18.000000 18.000000 18.000000 18.000000 18.000000 18.000000 18.000000 18.000000 18.000000
mean 2.064524 3.052019 0.742612 0.858711 0.584043 0.357354 0.308433 47.883342 -0.042954 -0.072261
std 0.339669 0.922599 0.076703 0.045312 0.131164 0.160157 0.061630 10.194859 0.037772 0.075571
min 1.563934 1.337414 0.579469 0.746647 0.378497 0.143260 0.194099 31.988180 -0.094267 -0.236790
25% 1.848496 2.458523 0.699683 0.841321 0.499613 0.249631 0.266246 38.717125 -0.076805 -0.105620
50% 2.025773 2.948530 0.749510 0.854638 0.568492 0.323183 0.303302 51.015234 -0.042409 -0.066190
75% 2.215420 3.613597 0.791696 0.889089 0.680335 0.464308 0.360686 55.561434 -0.021409 -0.028615
max 3.023628 4.793782 0.888340 0.943058 0.834707 0.696736 0.404708 60.821658 0.054468 0.089767

Evaluate on Vertisols

# Replace following Paths with yours
src_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/all/models')
seeds = range(20)
order = 10
learners = Learners(Model, tax_lookup, seeds=seeds, device=device)
perfs_global_vertisols, _, _, _ = learners.evaluate((X, y, depth_order[:, -1]),
                                                   order = order,
                                                   src_dir_model=src_dir_model)

perfs_global_vertisols.describe()
rpd rpiq r2 lccc rmse mse mae mape bias stb
count 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000
mean 2.039982 2.815862 0.744782 0.856862 0.273233 0.078001 0.173488 26.855653 -0.001182 -0.003823
std 0.271953 0.511288 0.063033 0.036968 0.059333 0.036126 0.021406 3.272256 0.020647 0.055384
min 1.577848 2.128196 0.592591 0.768764 0.182002 0.033125 0.138288 21.111539 -0.052648 -0.154459
25% 1.851439 2.431125 0.703943 0.837489 0.232396 0.054013 0.157317 24.537946 -0.011511 -0.028144
50% 1.992414 2.714563 0.744894 0.855618 0.253052 0.064036 0.173311 26.192354 -0.001295 -0.003575
75% 2.178701 3.148894 0.786316 0.881447 0.303549 0.092144 0.188509 28.037003 0.009025 0.019823
max 2.713312 4.022168 0.862141 0.923391 0.427089 0.182405 0.208709 33.903363 0.040271 0.099009

Train and test on Mollisols

# Replace following Paths with yours
dest_dir_loss = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/mollisols/losses')
dest_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/mollisols/models')

order = 1
seeds = range(20)
learners = Learners(Model, tax_lookup, seeds=seeds, device=device)
learners.train((X, y, depth_order[:, -1]), 
               order=order,
               dest_dir_loss=dest_dir_loss,
               dest_dir_model=dest_dir_model,
               n_epochs=n_epochs,
               sc_kwargs=params_scheduler)
Streaming output truncated to the last 5000 lines.
 0.0254631  0.02506297 0.02542974]
------------------------------
Epoch: 148
Training loss: 0.01703691177108154 | Validation loss: 0.03745046781440233
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974]
------------------------------
Epoch: 149
Training loss: 0.01636921336912379 | Validation loss: 0.029602385222398
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974]
------------------------------
Epoch: 150
Training loss: 0.01618192989227115 | Validation loss: 0.02575919661542465
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592 ]
------------------------------
Epoch: 151
Training loss: 0.016052027107501518 | Validation loss: 0.02995985579387895
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592 ]
------------------------------
Epoch: 152
Training loss: 0.01630766587521957 | Validation loss: 0.036192602445853164
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592 ]
------------------------------
Epoch: 153
Training loss: 0.016913761476016774 | Validation loss: 0.043863068752247714
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592 ]
------------------------------
Epoch: 154
Training loss: 0.01821006020264966 | Validation loss: 0.028571049008389998
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592 ]
------------------------------
Epoch: 155
Training loss: 0.0200055970297176 | Validation loss: 0.03199511515940058
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592 ]
------------------------------
Epoch: 156
Training loss: 0.01871272486881638 | Validation loss: 0.0318490580238145
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592 ]
------------------------------
Epoch: 157
Training loss: 0.01749704931957685 | Validation loss: 0.04083131170221444
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592 ]
------------------------------
Epoch: 158
Training loss: 0.016811477638096834 | Validation loss: 0.036076820102231254
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592 ]
------------------------------
Epoch: 159
Training loss: 0.016043470830333476 | Validation loss: 0.032022082824902286
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592 ]
------------------------------
Epoch: 160
Training loss: 0.01610600998130988 | Validation loss: 0.024903937371383453
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394]
------------------------------
Epoch: 161
Training loss: 0.015034414651062415 | Validation loss: 0.02793283073295807
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394]
------------------------------
Epoch: 162
Training loss: 0.01557806123010054 | Validation loss: 0.028809203798400945
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394]
------------------------------
Epoch: 163
Training loss: 0.01631182431984617 | Validation loss: 0.02934042409319302
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394]
------------------------------
Epoch: 164
Training loss: 0.017696567615304064 | Validation loss: 0.04410623669110496
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394]
------------------------------
Epoch: 165
Training loss: 0.0189071940410198 | Validation loss: 0.05060041310458348
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394]
------------------------------
Epoch: 166
Training loss: 0.018668438469496916 | Validation loss: 0.030868354253470898
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394]
------------------------------
Epoch: 167
Training loss: 0.017217437183598475 | Validation loss: 0.04276854185194805
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394]
------------------------------
Epoch: 168
Training loss: 0.016072474085554785 | Validation loss: 0.037263108963339495
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394]
------------------------------
Epoch: 169
Training loss: 0.015596854644922577 | Validation loss: 0.02746792192217605
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394]
------------------------------
Epoch: 170
Training loss: 0.015756507012612966 | Validation loss: 0.025396741316493214
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674]
------------------------------
Epoch: 171
Training loss: 0.01487494743220052 | Validation loss: 0.02894811505644486
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674]
------------------------------
Epoch: 172
Training loss: 0.014975935009745311 | Validation loss: 0.034921294720522286
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674]
------------------------------
Epoch: 173
Training loss: 0.01575764608664476 | Validation loss: 0.035113322580682824
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674]
------------------------------
Epoch: 174
Training loss: 0.01644058152272993 | Validation loss: 0.041267819253021275
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674]
------------------------------
Epoch: 175
Training loss: 0.018470817106794945 | Validation loss: 0.05743228978124158
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674]
------------------------------
Epoch: 176
Training loss: 0.01725016314802425 | Validation loss: 0.028639680866537422
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674]
------------------------------
Epoch: 177
Training loss: 0.016041831985800243 | Validation loss: 0.0426863385685559
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674]
------------------------------
Epoch: 178
Training loss: 0.014942902400709536 | Validation loss: 0.03376636066441906
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674]
------------------------------
Epoch: 179
Training loss: 0.015016029911990069 | Validation loss: 0.02791190057479102
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674]
------------------------------
Epoch: 180
Training loss: 0.015328187172358133 | Validation loss: 0.024651944123465438
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194]
------------------------------
Epoch: 181
Training loss: 0.014163121622892058 | Validation loss: 0.029626857617805744
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194]
------------------------------
Epoch: 182
Training loss: 0.01449333244212428 | Validation loss: 0.02998114056114493
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194]
------------------------------
Epoch: 183
Training loss: 0.015647320470259504 | Validation loss: 0.042618878442665625
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194]
------------------------------
Epoch: 184
Training loss: 0.01614186215043372 | Validation loss: 0.029807858361766255
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194]
------------------------------
Epoch: 185
Training loss: 0.01824572786346686 | Validation loss: 0.03824911529904809
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194]
------------------------------
Epoch: 186
Training loss: 0.01688562039171859 | Validation loss: 0.03462533713800126
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194]
------------------------------
Epoch: 187
Training loss: 0.015406720159689382 | Validation loss: 0.027114455973536802
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194]
------------------------------
Epoch: 188
Training loss: 0.015270027453650017 | Validation loss: 0.03875390183309029
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194]
------------------------------
Epoch: 189
Training loss: 0.014476752381923856 | Validation loss: 0.028192470909963394
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194]
------------------------------
Epoch: 190
Training loss: 0.015035120354091026 | Validation loss: 0.024417148613981133
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194 0.02441715]
------------------------------
Epoch: 191
Training loss: 0.01392070555641335 | Validation loss: 0.027935292764470494
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194 0.02441715]
------------------------------
Epoch: 192
Training loss: 0.014260541837738484 | Validation loss: 0.038586160296510005
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194 0.02441715]
------------------------------
Epoch: 193
Training loss: 0.015028002004766342 | Validation loss: 0.03351751615389668
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194 0.02441715]
------------------------------
Epoch: 194
Training loss: 0.015989612158844056 | Validation loss: 0.06615991421557706
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194 0.02441715]
------------------------------
Epoch: 195
Training loss: 0.017272643091119064 | Validation loss: 0.04459637872360904
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194 0.02441715]
------------------------------
Epoch: 196
Training loss: 0.01664832424638527 | Validation loss: 0.04218823045235256
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194 0.02441715]
------------------------------
Epoch: 197
Training loss: 0.015222868038227363 | Validation loss: 0.04500981189053634
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194 0.02441715]
------------------------------
Epoch: 198
Training loss: 0.014712775491025983 | Validation loss: 0.03708768857578779
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194 0.02441715]
------------------------------
Epoch: 199
Training loss: 0.014625412261835774 | Validation loss: 0.028367176780412937
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194 0.02441715]
------------------------------
Epoch: 200
Training loss: 0.014733538981907221 | Validation loss: 0.024383184050434624
Validation loss (ends of cycles): [0.09721728 0.04142509 0.03426372 0.03054056 0.02912892 0.02823934
 0.02746296 0.02677113 0.02637389 0.02613157 0.02631584 0.02581771
 0.0254631  0.02506297 0.02542974 0.0257592  0.02490394 0.02539674
 0.02465194 0.02441715 0.02438318]
--------------------------------------------------------------------------------
Seed: 11
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.23029391500886862 | Validation loss: 0.2263513808803899
Validation loss (ends of cycles): [0.22635138]
------------------------------
Epoch: 1
Training loss: 0.1298492884460261 | Validation loss: 0.07065250990646225
Validation loss (ends of cycles): [0.22635138]
------------------------------
Epoch: 2
Training loss: 0.06877072449258673 | Validation loss: 0.06425394796367202
Validation loss (ends of cycles): [0.22635138]
------------------------------
Epoch: 3
Training loss: 0.061665143847950105 | Validation loss: 0.06861826524670635
Validation loss (ends of cycles): [0.22635138]
------------------------------
Epoch: 4
Training loss: 0.058033256072217855 | Validation loss: 0.0625213146475809
Validation loss (ends of cycles): [0.22635138]
------------------------------
Epoch: 5
Training loss: 0.055559850668882936 | Validation loss: 0.08042318240872451
Validation loss (ends of cycles): [0.22635138]
------------------------------
Epoch: 6
Training loss: 0.0519350595560258 | Validation loss: 0.04697381053119898
Validation loss (ends of cycles): [0.22635138]
------------------------------
Epoch: 7
Training loss: 0.048388288273074766 | Validation loss: 0.11100427699940545
Validation loss (ends of cycles): [0.22635138]
------------------------------
Epoch: 8
Training loss: 0.04608837391696567 | Validation loss: 0.07308738904872111
Validation loss (ends of cycles): [0.22635138]
------------------------------
Epoch: 9
Training loss: 0.04391489770051425 | Validation loss: 0.10111017578414508
Validation loss (ends of cycles): [0.22635138]
------------------------------
Epoch: 10
Training loss: 0.0414222451229769 | Validation loss: 0.04029037490753191
Validation loss (ends of cycles): [0.22635138 0.04029037]
------------------------------
Epoch: 11
Training loss: 0.04222617237412227 | Validation loss: 0.05305742405887161
Validation loss (ends of cycles): [0.22635138 0.04029037]
------------------------------
Epoch: 12
Training loss: 0.042907760452055105 | Validation loss: 0.04166700045711228
Validation loss (ends of cycles): [0.22635138 0.04029037]
------------------------------
Epoch: 13
Training loss: 0.043801927363606004 | Validation loss: 0.11412931340081352
Validation loss (ends of cycles): [0.22635138 0.04029037]
------------------------------
Epoch: 14
Training loss: 0.04472829210685521 | Validation loss: 0.05646906566939184
Validation loss (ends of cycles): [0.22635138 0.04029037]
------------------------------
Epoch: 15
Training loss: 0.046035228539409674 | Validation loss: 0.04421177878975868
Validation loss (ends of cycles): [0.22635138 0.04029037]
------------------------------
Epoch: 16
Training loss: 0.043207717533185475 | Validation loss: 0.042158282095832486
Validation loss (ends of cycles): [0.22635138 0.04029037]
------------------------------
Epoch: 17
Training loss: 0.040840370588125736 | Validation loss: 0.038402439294649024
Validation loss (ends of cycles): [0.22635138 0.04029037]
------------------------------
Epoch: 18
Training loss: 0.03911800839279483 | Validation loss: 0.042062657086976936
Validation loss (ends of cycles): [0.22635138 0.04029037]
------------------------------
Epoch: 19
Training loss: 0.03648519006792486 | Validation loss: 0.03675581481573837
Validation loss (ends of cycles): [0.22635138 0.04029037]
------------------------------
Epoch: 20
Training loss: 0.034782703212515365 | Validation loss: 0.03432650278721537
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265 ]
------------------------------
Epoch: 21
Training loss: 0.035905772971549656 | Validation loss: 0.03975177935457656
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265 ]
------------------------------
Epoch: 22
Training loss: 0.037213405435223404 | Validation loss: 0.03562755955915366
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265 ]
------------------------------
Epoch: 23
Training loss: 0.03865513603043992 | Validation loss: 0.11057296767830849
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265 ]
------------------------------
Epoch: 24
Training loss: 0.03931918034587449 | Validation loss: 0.0369640770368278
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265 ]
------------------------------
Epoch: 25
Training loss: 0.039821294779942285 | Validation loss: 0.045848207348691564
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265 ]
------------------------------
Epoch: 26
Training loss: 0.038668711453948804 | Validation loss: 0.042994737558599026
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265 ]
------------------------------
Epoch: 27
Training loss: 0.03670025159551845 | Validation loss: 0.04410590357812388
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265 ]
------------------------------
Epoch: 28
Training loss: 0.03455162040389529 | Validation loss: 0.034312766577516286
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265 ]
------------------------------
Epoch: 29
Training loss: 0.032675492149452125 | Validation loss: 0.03203583408945373
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265 ]
------------------------------
Epoch: 30
Training loss: 0.03106907684719417 | Validation loss: 0.03267500549554825
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501]
------------------------------
Epoch: 31
Training loss: 0.03156270837502145 | Validation loss: 0.03273458672421319
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501]
------------------------------
Epoch: 32
Training loss: 0.03294173397532687 | Validation loss: 0.03579591427530561
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501]
------------------------------
Epoch: 33
Training loss: 0.03416234837481525 | Validation loss: 0.03482184831851295
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501]
------------------------------
Epoch: 34
Training loss: 0.03490711622909317 | Validation loss: 0.050317411577062945
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501]
------------------------------
Epoch: 35
Training loss: 0.03727862610655829 | Validation loss: 0.03666511264496616
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501]
------------------------------
Epoch: 36
Training loss: 0.03482806728774212 | Validation loss: 0.04250362701714039
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501]
------------------------------
Epoch: 37
Training loss: 0.0331079214924901 | Validation loss: 0.036069130791085105
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501]
------------------------------
Epoch: 38
Training loss: 0.031223616937584266 | Validation loss: 0.03177707828581333
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501]
------------------------------
Epoch: 39
Training loss: 0.029834562658930454 | Validation loss: 0.03063737727435572
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501]
------------------------------
Epoch: 40
Training loss: 0.027999205529932084 | Validation loss: 0.030917407213045017
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741]
------------------------------
Epoch: 41
Training loss: 0.028703854284892843 | Validation loss: 0.029636949034673826
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741]
------------------------------
Epoch: 42
Training loss: 0.029869006272799117 | Validation loss: 0.033834958083129356
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741]
------------------------------
Epoch: 43
Training loss: 0.031108734364492623 | Validation loss: 0.03308493041965578
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741]
------------------------------
Epoch: 44
Training loss: 0.03213380325615891 | Validation loss: 0.03766348745141711
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741]
------------------------------
Epoch: 45
Training loss: 0.03385435201121661 | Validation loss: 0.06962902418204717
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741]
------------------------------
Epoch: 46
Training loss: 0.032608688203239346 | Validation loss: 0.03770013412992869
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741]
------------------------------
Epoch: 47
Training loss: 0.03003532220268758 | Validation loss: 0.03331893762307508
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741]
------------------------------
Epoch: 48
Training loss: 0.029064043122154427 | Validation loss: 0.030430115326972946
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741]
------------------------------
Epoch: 49
Training loss: 0.027540825190007445 | Validation loss: 0.03614342541966055
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741]
------------------------------
Epoch: 50
Training loss: 0.02594501869109406 | Validation loss: 0.02946952005316104
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952]
------------------------------
Epoch: 51
Training loss: 0.026418998663321258 | Validation loss: 0.02940933160217745
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952]
------------------------------
Epoch: 52
Training loss: 0.027487545262840463 | Validation loss: 0.031994336284697056
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952]
------------------------------
Epoch: 53
Training loss: 0.02819501824025822 | Validation loss: 0.03246153142702367
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952]
------------------------------
Epoch: 54
Training loss: 0.029957246288990345 | Validation loss: 0.04025745957291552
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952]
------------------------------
Epoch: 55
Training loss: 0.03173960026020441 | Validation loss: 0.036120299715548754
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952]
------------------------------
Epoch: 56
Training loss: 0.029832631266274588 | Validation loss: 0.038290795271417925
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952]
------------------------------
Epoch: 57
Training loss: 0.02817412253991678 | Validation loss: 0.03558004015524473
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952]
------------------------------
Epoch: 58
Training loss: 0.026782008531556382 | Validation loss: 0.03356315528175661
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952]
------------------------------
Epoch: 59
Training loss: 0.0255094195689582 | Validation loss: 0.035678128977971416
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952]
------------------------------
Epoch: 60
Training loss: 0.024149992295003277 | Validation loss: 0.029395997058600187
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396  ]
------------------------------
Epoch: 61
Training loss: 0.024749841983997968 | Validation loss: 0.033817525553916185
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396  ]
------------------------------
Epoch: 62
Training loss: 0.025393155030527612 | Validation loss: 0.030692029611340592
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396  ]
------------------------------
Epoch: 63
Training loss: 0.02652351208016034 | Validation loss: 0.03221092772270952
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396  ]
------------------------------
Epoch: 64
Training loss: 0.0284297236192184 | Validation loss: 0.05041489251224058
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396  ]
------------------------------
Epoch: 65
Training loss: 0.029818793497525337 | Validation loss: 0.05839175677725247
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396  ]
------------------------------
Epoch: 66
Training loss: 0.0283982064877826 | Validation loss: 0.035961875425917764
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396  ]
------------------------------
Epoch: 67
Training loss: 0.026623119829161986 | Validation loss: 0.031139185119952475
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396  ]
------------------------------
Epoch: 68
Training loss: 0.025278138358342816 | Validation loss: 0.028934034053236246
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396  ]
------------------------------
Epoch: 69
Training loss: 0.024122276865854497 | Validation loss: 0.03136570605316332
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396  ]
------------------------------
Epoch: 70
Training loss: 0.02287417855227321 | Validation loss: 0.030645103260342563
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451 ]
------------------------------
Epoch: 71
Training loss: 0.023657108806603686 | Validation loss: 0.03877665574795434
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451 ]
------------------------------
Epoch: 72
Training loss: 0.02401751356305388 | Validation loss: 0.03245842074310141
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451 ]
------------------------------
Epoch: 73
Training loss: 0.0250010577000193 | Validation loss: 0.031518987978675535
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451 ]
------------------------------
Epoch: 74
Training loss: 0.0268116102750769 | Validation loss: 0.03552219265007547
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451 ]
------------------------------
Epoch: 75
Training loss: 0.0279629380249123 | Validation loss: 0.030541598929890564
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451 ]
------------------------------
Epoch: 76
Training loss: 0.026884139392418953 | Validation loss: 0.040323719648378234
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451 ]
------------------------------
Epoch: 77
Training loss: 0.025402794497435897 | Validation loss: 0.030501875061807886
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451 ]
------------------------------
Epoch: 78
Training loss: 0.024031604201025594 | Validation loss: 0.029525416969720806
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451 ]
------------------------------
Epoch: 79
Training loss: 0.02290574425634572 | Validation loss: 0.03327698606465544
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451 ]
------------------------------
Epoch: 80
Training loss: 0.02188899334921403 | Validation loss: 0.02941775681184871
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776]
------------------------------
Epoch: 81
Training loss: 0.022578422642668815 | Validation loss: 0.03252738980310304
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776]
------------------------------
Epoch: 82
Training loss: 0.02271142152395493 | Validation loss: 0.029476982774212956
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776]
------------------------------
Epoch: 83
Training loss: 0.02377902618722945 | Validation loss: 0.028569937317765186
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776]
------------------------------
Epoch: 84
Training loss: 0.025357732032539278 | Validation loss: 0.029860973424677337
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776]
------------------------------
Epoch: 85
Training loss: 0.02673468557262142 | Validation loss: 0.030372924005080546
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776]
------------------------------
Epoch: 86
Training loss: 0.025845129039077982 | Validation loss: 0.030393184827906743
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776]
------------------------------
Epoch: 87
Training loss: 0.024658304374150144 | Validation loss: 0.03845536522567272
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776]
------------------------------
Epoch: 88
Training loss: 0.023075178920526088 | Validation loss: 0.04256981957171645
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776]
------------------------------
Epoch: 89
Training loss: 0.022049285026600328 | Validation loss: 0.02848286697241877
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776]
------------------------------
Epoch: 90
Training loss: 0.021476434740593763 | Validation loss: 0.027760279697499106
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028]
------------------------------
Epoch: 91
Training loss: 0.021816314943891957 | Validation loss: 0.03098618079509054
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028]
------------------------------
Epoch: 92
Training loss: 0.022551793093997533 | Validation loss: 0.028238401216055666
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028]
------------------------------
Epoch: 93
Training loss: 0.02308060474527197 | Validation loss: 0.058629569464496205
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028]
------------------------------
Epoch: 94
Training loss: 0.02402396142725053 | Validation loss: 0.03021541171308075
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028]
------------------------------
Epoch: 95
Training loss: 0.025654969449208035 | Validation loss: 0.031186954717018774
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028]
------------------------------
Epoch: 96
Training loss: 0.024714468421823368 | Validation loss: 0.03125418509755816
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028]
------------------------------
Epoch: 97
Training loss: 0.023940351401735855 | Validation loss: 0.03264554476897631
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028]
------------------------------
Epoch: 98
Training loss: 0.022440335754971435 | Validation loss: 0.028461951529607177
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028]
------------------------------
Epoch: 99
Training loss: 0.021251822916442543 | Validation loss: 0.02842777268961072
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028]
------------------------------
Epoch: 100
Training loss: 0.020347900972588033 | Validation loss: 0.028554076927581003
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408]
------------------------------
Epoch: 101
Training loss: 0.020555462850999785 | Validation loss: 0.02733869418235762
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408]
------------------------------
Epoch: 102
Training loss: 0.021171740918776126 | Validation loss: 0.029619899511869465
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408]
------------------------------
Epoch: 103
Training loss: 0.02220173811585438 | Validation loss: 0.0321574957509126
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408]
------------------------------
Epoch: 104
Training loss: 0.023414065705506297 | Validation loss: 0.044605502991804054
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408]
------------------------------
Epoch: 105
Training loss: 0.024443574798694714 | Validation loss: 0.07190789042838983
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408]
------------------------------
Epoch: 106
Training loss: 0.023700900576493846 | Validation loss: 0.03251718570079122
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408]
------------------------------
Epoch: 107
Training loss: 0.02247146178960679 | Validation loss: 0.0362437991425395
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408]
------------------------------
Epoch: 108
Training loss: 0.022056101961439947 | Validation loss: 0.0278464819171599
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408]
------------------------------
Epoch: 109
Training loss: 0.020700616911583678 | Validation loss: 0.028671946642654284
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408]
------------------------------
Epoch: 110
Training loss: 0.01958366129282347 | Validation loss: 0.027183285082823465
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329]
------------------------------
Epoch: 111
Training loss: 0.019975665894268854 | Validation loss: 0.028586781550464884
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329]
------------------------------
Epoch: 112
Training loss: 0.020705172235555038 | Validation loss: 0.03128787662301745
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329]
------------------------------
Epoch: 113
Training loss: 0.02107667978436542 | Validation loss: 0.030051063679690872
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329]
------------------------------
Epoch: 114
Training loss: 0.022610661867112528 | Validation loss: 0.04852165055594274
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329]
------------------------------
Epoch: 115
Training loss: 0.023561506458308276 | Validation loss: 0.06899371064667191
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329]
------------------------------
Epoch: 116
Training loss: 0.0233066179448875 | Validation loss: 0.04566473427361676
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329]
------------------------------
Epoch: 117
Training loss: 0.021634616137565513 | Validation loss: 0.04841635775353227
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329]
------------------------------
Epoch: 118
Training loss: 0.020792985552679596 | Validation loss: 0.03520082090316074
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329]
------------------------------
Epoch: 119
Training loss: 0.019773167533406275 | Validation loss: 0.03094479950544025
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329]
------------------------------
Epoch: 120
Training loss: 0.01904741757950647 | Validation loss: 0.026810297376609275
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103 ]
------------------------------
Epoch: 121
Training loss: 0.019134292223801214 | Validation loss: 0.02902802246223603
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103 ]
------------------------------
Epoch: 122
Training loss: 0.02028068473820037 | Validation loss: 0.03634647925251296
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103 ]
------------------------------
Epoch: 123
Training loss: 0.021072582406090286 | Validation loss: 0.040620867628604174
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103 ]
------------------------------
Epoch: 124
Training loss: 0.022031776501032396 | Validation loss: 0.06249447592667171
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103 ]
------------------------------
Epoch: 125
Training loss: 0.023192723667839678 | Validation loss: 0.03574155330924051
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103 ]
------------------------------
Epoch: 126
Training loss: 0.02227437675635262 | Validation loss: 0.07030766138008662
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103 ]
------------------------------
Epoch: 127
Training loss: 0.020985245057268113 | Validation loss: 0.03414160332509449
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103 ]
------------------------------
Epoch: 128
Training loss: 0.019905526866772917 | Validation loss: 0.033079460595867465
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103 ]
------------------------------
Epoch: 129
Training loss: 0.01939150384756002 | Validation loss: 0.029185153178072402
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103 ]
------------------------------
Epoch: 130
Training loss: 0.01826417470366005 | Validation loss: 0.026967533764296343
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753]
------------------------------
Epoch: 131
Training loss: 0.018468463992154818 | Validation loss: 0.031554549100941846
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753]
------------------------------
Epoch: 132
Training loss: 0.019273764086599515 | Validation loss: 0.033024010847189596
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753]
------------------------------
Epoch: 133
Training loss: 0.019958532049994523 | Validation loss: 0.030514458859605447
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753]
------------------------------
Epoch: 134
Training loss: 0.02119076339626397 | Validation loss: 0.03831826276811106
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753]
------------------------------
Epoch: 135
Training loss: 0.02249043524583302 | Validation loss: 0.07168984226882458
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753]
------------------------------
Epoch: 136
Training loss: 0.021602770646776612 | Validation loss: 0.03142295591533184
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753]
------------------------------
Epoch: 137
Training loss: 0.02026850242182855 | Validation loss: 0.035348252459828346
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753]
------------------------------
Epoch: 138
Training loss: 0.01939214747474809 | Validation loss: 0.030120848145868098
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753]
------------------------------
Epoch: 139
Training loss: 0.018289907337582813 | Validation loss: 0.029019300393494114
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753]
------------------------------
Epoch: 140
Training loss: 0.01761306544038944 | Validation loss: 0.026833688773747002
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369]
------------------------------
Epoch: 141
Training loss: 0.017443752841933106 | Validation loss: 0.028782523203907267
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369]
------------------------------
Epoch: 142
Training loss: 0.018654791837012987 | Validation loss: 0.028098878982876028
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369]
------------------------------
Epoch: 143
Training loss: 0.019123788587976157 | Validation loss: 0.03758659366784351
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369]
------------------------------
Epoch: 144
Training loss: 0.020131324921409045 | Validation loss: 0.029096519175384725
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369]
------------------------------
Epoch: 145
Training loss: 0.02155784703980435 | Validation loss: 0.03155151415350182
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369]
------------------------------
Epoch: 146
Training loss: 0.020959843873856514 | Validation loss: 0.058786045626870224
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369]
------------------------------
Epoch: 147
Training loss: 0.019591272861613492 | Validation loss: 0.03170138590836099
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369]
------------------------------
Epoch: 148
Training loss: 0.01901846544673227 | Validation loss: 0.027040495770052075
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369]
------------------------------
Epoch: 149
Training loss: 0.017848043925908764 | Validation loss: 0.030091944117365137
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369]
------------------------------
Epoch: 150
Training loss: 0.017326634074372006 | Validation loss: 0.026434293615498712
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429]
------------------------------
Epoch: 151
Training loss: 0.017380717077966387 | Validation loss: 0.029182153953505412
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429]
------------------------------
Epoch: 152
Training loss: 0.017231200886057403 | Validation loss: 0.030624900878007923
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429]
------------------------------
Epoch: 153
Training loss: 0.018560519161808297 | Validation loss: 0.03458159559938524
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429]
------------------------------
Epoch: 154
Training loss: 0.019787704303285213 | Validation loss: 0.05616758417870317
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429]
------------------------------
Epoch: 155
Training loss: 0.020979577028079004 | Validation loss: 0.06432282871433667
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429]
------------------------------
Epoch: 156
Training loss: 0.01990094185973389 | Validation loss: 0.028011672664433718
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429]
------------------------------
Epoch: 157
Training loss: 0.019052211199016348 | Validation loss: 0.03703709126317075
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429]
------------------------------
Epoch: 158
Training loss: 0.017911518242482733 | Validation loss: 0.03025994263589382
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429]
------------------------------
Epoch: 159
Training loss: 0.017369525834569723 | Validation loss: 0.02964053342917136
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429]
------------------------------
Epoch: 160
Training loss: 0.016482023953846316 | Validation loss: 0.025826098497158716
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261 ]
------------------------------
Epoch: 161
Training loss: 0.01659158083261937 | Validation loss: 0.02873330456869943
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261 ]
------------------------------
Epoch: 162
Training loss: 0.01691494110453026 | Validation loss: 0.027677631338260004
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261 ]
------------------------------
Epoch: 163
Training loss: 0.017991937094981352 | Validation loss: 0.04074185960260885
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261 ]
------------------------------
Epoch: 164
Training loss: 0.018723719999406155 | Validation loss: 0.030231072633926357
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261 ]
------------------------------
Epoch: 165
Training loss: 0.020276146350292172 | Validation loss: 0.043174420217318196
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261 ]
------------------------------
Epoch: 166
Training loss: 0.019531042235562714 | Validation loss: 0.03581643250903913
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261 ]
------------------------------
Epoch: 167
Training loss: 0.018447091965746832 | Validation loss: 0.044610560472522466
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261 ]
------------------------------
Epoch: 168
Training loss: 0.017366024497063544 | Validation loss: 0.028554132840197
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261 ]
------------------------------
Epoch: 169
Training loss: 0.016597856987055723 | Validation loss: 0.03181937616318464
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261 ]
------------------------------
Epoch: 170
Training loss: 0.0163807039247538 | Validation loss: 0.02585350861772895
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351]
------------------------------
Epoch: 171
Training loss: 0.01622825662597893 | Validation loss: 0.03245220848891352
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351]
------------------------------
Epoch: 172
Training loss: 0.01662803715004063 | Validation loss: 0.02856071732406105
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351]
------------------------------
Epoch: 173
Training loss: 0.01692930633595925 | Validation loss: 0.030707208572753837
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351]
------------------------------
Epoch: 174
Training loss: 0.018463662787666167 | Validation loss: 0.03464765181498868
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351]
------------------------------
Epoch: 175
Training loss: 0.019447343614202264 | Validation loss: 0.03708033948870642
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351]
------------------------------
Epoch: 176
Training loss: 0.018903135850162404 | Validation loss: 0.03530201914587191
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351]
------------------------------
Epoch: 177
Training loss: 0.01786497042786966 | Validation loss: 0.03155623323151043
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351]
------------------------------
Epoch: 178
Training loss: 0.016593552877884207 | Validation loss: 0.028088790896747793
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351]
------------------------------
Epoch: 179
Training loss: 0.015996790239284558 | Validation loss: 0.030268458050808737
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351]
------------------------------
Epoch: 180
Training loss: 0.015754348903182683 | Validation loss: 0.026033666616837894
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367]
------------------------------
Epoch: 181
Training loss: 0.015584597916804194 | Validation loss: 0.029790076526946256
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367]
------------------------------
Epoch: 182
Training loss: 0.016471456264986133 | Validation loss: 0.028614665381610394
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367]
------------------------------
Epoch: 183
Training loss: 0.01635930025987933 | Validation loss: 0.028159564627068385
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367]
------------------------------
Epoch: 184
Training loss: 0.01770466239933621 | Validation loss: 0.030318220911015357
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367]
------------------------------
Epoch: 185
Training loss: 0.019188743718801353 | Validation loss: 0.028221322023975
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367]
------------------------------
Epoch: 186
Training loss: 0.018213050498634697 | Validation loss: 0.03194981573947838
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367]
------------------------------
Epoch: 187
Training loss: 0.017296552865357116 | Validation loss: 0.040214699027793746
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367]
------------------------------
Epoch: 188
Training loss: 0.01603712801957821 | Validation loss: 0.03133211996672409
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367]
------------------------------
Epoch: 189
Training loss: 0.015969472931607102 | Validation loss: 0.029272598114662936
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367]
------------------------------
Epoch: 190
Training loss: 0.015087590197941697 | Validation loss: 0.024819848526801382
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367 0.02481985]
------------------------------
Epoch: 191
Training loss: 0.015174830080638451 | Validation loss: 0.03065409930422902
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367 0.02481985]
------------------------------
Epoch: 192
Training loss: 0.015296503244634203 | Validation loss: 0.031787786699299304
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367 0.02481985]
------------------------------
Epoch: 193
Training loss: 0.015783516960824286 | Validation loss: 0.03863530486289944
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367 0.02481985]
------------------------------
Epoch: 194
Training loss: 0.017338460010317403 | Validation loss: 0.027448586221518263
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367 0.02481985]
------------------------------
Epoch: 195
Training loss: 0.018564120671386276 | Validation loss: 0.032875948186431615
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367 0.02481985]
------------------------------
Epoch: 196
Training loss: 0.0181109133248437 | Validation loss: 0.03741413594356605
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367 0.02481985]
------------------------------
Epoch: 197
Training loss: 0.016321961830438273 | Validation loss: 0.03071831968346877
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367 0.02481985]
------------------------------
Epoch: 198
Training loss: 0.015929741894720288 | Validation loss: 0.0271761506529791
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367 0.02481985]
------------------------------
Epoch: 199
Training loss: 0.015383159098162399 | Validation loss: 0.026942191678764566
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367 0.02481985]
------------------------------
Epoch: 200
Training loss: 0.014515927291429805 | Validation loss: 0.024746225814202
Validation loss (ends of cycles): [0.22635138 0.04029037 0.0343265  0.03267501 0.03091741 0.02946952
 0.029396   0.0306451  0.02941776 0.02776028 0.02855408 0.02718329
 0.0268103  0.02696753 0.02683369 0.02643429 0.0258261  0.02585351
 0.02603367 0.02481985 0.02474623]
--------------------------------------------------------------------------------
Seed: 12
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.2637900622629444 | Validation loss: 0.2553698234260082
Validation loss (ends of cycles): [0.25536982]
------------------------------
Epoch: 1
Training loss: 0.16978094037546804 | Validation loss: 0.08061320481023618
Validation loss (ends of cycles): [0.25536982]
------------------------------
Epoch: 2
Training loss: 0.07621631738145342 | Validation loss: 0.0826724971245442
Validation loss (ends of cycles): [0.25536982]
------------------------------
Epoch: 3
Training loss: 0.061927354770150746 | Validation loss: 0.15350222560976232
Validation loss (ends of cycles): [0.25536982]
------------------------------
Epoch: 4
Training loss: 0.05844452919114215 | Validation loss: 0.18728351167270116
Validation loss (ends of cycles): [0.25536982]
------------------------------
Epoch: 5
Training loss: 0.055577036785150344 | Validation loss: 0.061056972414787324
Validation loss (ends of cycles): [0.25536982]
------------------------------
Epoch: 6
Training loss: 0.05109959298948407 | Validation loss: 0.05445564238886748
Validation loss (ends of cycles): [0.25536982]
------------------------------
Epoch: 7
Training loss: 0.04711966514059527 | Validation loss: 0.05329546033005629
Validation loss (ends of cycles): [0.25536982]
------------------------------
Epoch: 8
Training loss: 0.044755841250934826 | Validation loss: 0.049413079262844155
Validation loss (ends of cycles): [0.25536982]
------------------------------
Epoch: 9
Training loss: 0.04260337657412054 | Validation loss: 0.04562658929665174
Validation loss (ends of cycles): [0.25536982]
------------------------------
Epoch: 10
Training loss: 0.040551968409042606 | Validation loss: 0.042534173532788246
Validation loss (ends of cycles): [0.25536982 0.04253417]
------------------------------
Epoch: 11
Training loss: 0.04085618404196462 | Validation loss: 0.042740331896181614
Validation loss (ends of cycles): [0.25536982 0.04253417]
------------------------------
Epoch: 12
Training loss: 0.04147623869616855 | Validation loss: 0.04740859594728265
Validation loss (ends of cycles): [0.25536982 0.04253417]
------------------------------
Epoch: 13
Training loss: 0.04226049646614534 | Validation loss: 0.06226673981707011
Validation loss (ends of cycles): [0.25536982 0.04253417]
------------------------------
Epoch: 14
Training loss: 0.04316315458340925 | Validation loss: 0.0443901874324573
Validation loss (ends of cycles): [0.25536982 0.04253417]
------------------------------
Epoch: 15
Training loss: 0.04279391495850284 | Validation loss: 0.060327282308467796
Validation loss (ends of cycles): [0.25536982 0.04253417]
------------------------------
Epoch: 16
Training loss: 0.040710862269738184 | Validation loss: 0.05306545631693942
Validation loss (ends of cycles): [0.25536982 0.04253417]
------------------------------
Epoch: 17
Training loss: 0.0389511743522728 | Validation loss: 0.03878095995501748
Validation loss (ends of cycles): [0.25536982 0.04253417]
------------------------------
Epoch: 18
Training loss: 0.03631727450606432 | Validation loss: 0.03926701827107796
Validation loss (ends of cycles): [0.25536982 0.04253417]
------------------------------
Epoch: 19
Training loss: 0.03461150976599228 | Validation loss: 0.035595839111400504
Validation loss (ends of cycles): [0.25536982 0.04253417]
------------------------------
Epoch: 20
Training loss: 0.03270029825976624 | Validation loss: 0.034391016932204366
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102]
------------------------------
Epoch: 21
Training loss: 0.032733950070162054 | Validation loss: 0.033897070114367774
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102]
------------------------------
Epoch: 22
Training loss: 0.03373902814409994 | Validation loss: 0.03615709903117802
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102]
------------------------------
Epoch: 23
Training loss: 0.035701153863990594 | Validation loss: 0.04238018014335206
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102]
------------------------------
Epoch: 24
Training loss: 0.03666398287769633 | Validation loss: 0.053025079325639775
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102]
------------------------------
Epoch: 25
Training loss: 0.03761743522637528 | Validation loss: 0.040236693840207796
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102]
------------------------------
Epoch: 26
Training loss: 0.03550851068681913 | Validation loss: 0.03966777375899255
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102]
------------------------------
Epoch: 27
Training loss: 0.03362350801179404 | Validation loss: 0.03458143038941281
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102]
------------------------------
Epoch: 28
Training loss: 0.03134352446492264 | Validation loss: 0.03413207765801677
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102]
------------------------------
Epoch: 29
Training loss: 0.03009785374679305 | Validation loss: 0.039865221961268356
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102]
------------------------------
Epoch: 30
Training loss: 0.02849840317373937 | Validation loss: 0.031089227979204485
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923]
------------------------------
Epoch: 31
Training loss: 0.02911056212794322 | Validation loss: 0.03221330076589116
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923]
------------------------------
Epoch: 32
Training loss: 0.030087379848034033 | Validation loss: 0.03465819009579718
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923]
------------------------------
Epoch: 33
Training loss: 0.03126076528960876 | Validation loss: 0.034185842610895634
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923]
------------------------------
Epoch: 34
Training loss: 0.03246060802190289 | Validation loss: 0.03449801728129387
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923]
------------------------------
Epoch: 35
Training loss: 0.0336248841860637 | Validation loss: 0.053725846244820526
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923]
------------------------------
Epoch: 36
Training loss: 0.031415612436831 | Validation loss: 0.037223817381475656
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923]
------------------------------
Epoch: 37
Training loss: 0.030353294953191088 | Validation loss: 0.04098063687394772
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923]
------------------------------
Epoch: 38
Training loss: 0.028800708188973217 | Validation loss: 0.052007128085408895
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923]
------------------------------
Epoch: 39
Training loss: 0.02750550863532885 | Validation loss: 0.03739549871534109
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923]
------------------------------
Epoch: 40
Training loss: 0.02563297042721196 | Validation loss: 0.02868142564381872
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143]
------------------------------
Epoch: 41
Training loss: 0.02616311686852502 | Validation loss: 0.03076405274415655
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143]
------------------------------
Epoch: 42
Training loss: 0.027380637066657484 | Validation loss: 0.03508420488131898
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143]
------------------------------
Epoch: 43
Training loss: 0.02831252516711108 | Validation loss: 0.04598719187613044
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143]
------------------------------
Epoch: 44
Training loss: 0.029504929771005866 | Validation loss: 0.03175034220995648
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143]
------------------------------
Epoch: 45
Training loss: 0.03159959042663516 | Validation loss: 0.07434647623449564
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143]
------------------------------
Epoch: 46
Training loss: 0.030014191938918612 | Validation loss: 0.033090820303186774
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143]
------------------------------
Epoch: 47
Training loss: 0.02731679374284228 | Validation loss: 0.04520116526899593
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143]
------------------------------
Epoch: 48
Training loss: 0.0262628064155277 | Validation loss: 0.034826266346499324
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143]
------------------------------
Epoch: 49
Training loss: 0.024662429327333747 | Validation loss: 0.036349371408245394
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143]
------------------------------
Epoch: 50
Training loss: 0.02363490600983503 | Validation loss: 0.02791736850381962
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737]
------------------------------
Epoch: 51
Training loss: 0.023922254276737148 | Validation loss: 0.03908925157572542
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737]
------------------------------
Epoch: 52
Training loss: 0.025039831698619522 | Validation loss: 0.038353501952120235
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737]
------------------------------
Epoch: 53
Training loss: 0.0258442997924893 | Validation loss: 0.039914444155458896
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737]
------------------------------
Epoch: 54
Training loss: 0.027072010339362178 | Validation loss: 0.058511382301471064
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737]
------------------------------
Epoch: 55
Training loss: 0.028190431893927607 | Validation loss: 0.04542659169861248
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737]
------------------------------
Epoch: 56
Training loss: 0.027136848568313034 | Validation loss: 0.035249374906665513
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737]
------------------------------
Epoch: 57
Training loss: 0.026047875482573923 | Validation loss: 0.04109857104984777
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737]
------------------------------
Epoch: 58
Training loss: 0.02399788330863362 | Validation loss: 0.038921072048002055
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737]
------------------------------
Epoch: 59
Training loss: 0.022918796129072244 | Validation loss: 0.03578687781867172
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737]
------------------------------
Epoch: 60
Training loss: 0.02215754752413102 | Validation loss: 0.027188095430444394
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881 ]
------------------------------
Epoch: 61
Training loss: 0.022274650759648094 | Validation loss: 0.03344160255177745
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881 ]
------------------------------
Epoch: 62
Training loss: 0.02273730670482765 | Validation loss: 0.040124644458826096
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881 ]
------------------------------
Epoch: 63
Training loss: 0.02412148073375949 | Validation loss: 0.049811005658869235
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881 ]
------------------------------
Epoch: 64
Training loss: 0.025627500584974944 | Validation loss: 0.040126007582460134
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881 ]
------------------------------
Epoch: 65
Training loss: 0.027548986309875362 | Validation loss: 0.03670580784923264
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881 ]
------------------------------
Epoch: 66
Training loss: 0.025487642145153845 | Validation loss: 0.04663573281972536
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881 ]
------------------------------
Epoch: 67
Training loss: 0.02363148095606551 | Validation loss: 0.043784839500273974
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881 ]
------------------------------
Epoch: 68
Training loss: 0.022655583384563686 | Validation loss: 0.052880666351744106
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881 ]
------------------------------
Epoch: 69
Training loss: 0.021290708928998665 | Validation loss: 0.03772427241450974
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881 ]
------------------------------
Epoch: 70
Training loss: 0.020642276361267937 | Validation loss: 0.026211059918361052
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106]
------------------------------
Epoch: 71
Training loss: 0.020620109798077508 | Validation loss: 0.03342627454549074
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106]
------------------------------
Epoch: 72
Training loss: 0.021390452662384825 | Validation loss: 0.04179773193651012
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106]
------------------------------
Epoch: 73
Training loss: 0.022596948584051507 | Validation loss: 0.05714830097609332
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106]
------------------------------
Epoch: 74
Training loss: 0.023441489446561346 | Validation loss: 0.06146918742784432
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106]
------------------------------
Epoch: 75
Training loss: 0.025943865520721265 | Validation loss: 0.07486197179449457
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106]
------------------------------
Epoch: 76
Training loss: 0.02424021406091659 | Validation loss: 0.03962666341768844
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106]
------------------------------
Epoch: 77
Training loss: 0.022813132383411955 | Validation loss: 0.0878413732030562
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106]
------------------------------
Epoch: 78
Training loss: 0.0214004504234202 | Validation loss: 0.036098147343311994
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106]
------------------------------
Epoch: 79
Training loss: 0.020235511447917593 | Validation loss: 0.030150488950312138
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106]
------------------------------
Epoch: 80
Training loss: 0.019402427983428786 | Validation loss: 0.025317091960459948
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709]
------------------------------
Epoch: 81
Training loss: 0.019609720393381862 | Validation loss: 0.033331186211268814
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709]
------------------------------
Epoch: 82
Training loss: 0.020388572548444454 | Validation loss: 0.046018143982759545
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709]
------------------------------
Epoch: 83
Training loss: 0.021280125978776078 | Validation loss: 0.04034127773983138
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709]
------------------------------
Epoch: 84
Training loss: 0.022711353089946967 | Validation loss: 0.03409364766308239
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709]
------------------------------
Epoch: 85
Training loss: 0.02453193292097162 | Validation loss: 0.08150071637438876
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709]
------------------------------
Epoch: 86
Training loss: 0.02312512177717589 | Validation loss: 0.050399243499019315
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709]
------------------------------
Epoch: 87
Training loss: 0.021199130441857734 | Validation loss: 0.04424859756337745
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709]
------------------------------
Epoch: 88
Training loss: 0.02017700191217697 | Validation loss: 0.04524432375494923
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709]
------------------------------
Epoch: 89
Training loss: 0.019404342235914366 | Validation loss: 0.0321854111805026
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709]
------------------------------
Epoch: 90
Training loss: 0.01875441001826211 | Validation loss: 0.024785825766489973
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583]
------------------------------
Epoch: 91
Training loss: 0.018730865393648384 | Validation loss: 0.02874932675955019
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583]
------------------------------
Epoch: 92
Training loss: 0.01903830907545109 | Validation loss: 0.049793892606560676
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583]
------------------------------
Epoch: 93
Training loss: 0.019815182877018264 | Validation loss: 0.03450491282689784
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583]
------------------------------
Epoch: 94
Training loss: 0.02128261590549941 | Validation loss: 0.03380642605147192
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583]
------------------------------
Epoch: 95
Training loss: 0.023460421202183977 | Validation loss: 0.04820313796933208
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583]
------------------------------
Epoch: 96
Training loss: 0.022719959653009048 | Validation loss: 0.04494413600436279
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583]
------------------------------
Epoch: 97
Training loss: 0.020536305857935415 | Validation loss: 0.07395922485738993
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583]
------------------------------
Epoch: 98
Training loss: 0.019407542622252274 | Validation loss: 0.04475157501708184
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583]
------------------------------
Epoch: 99
Training loss: 0.018422949634990107 | Validation loss: 0.029646523080633154
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583]
------------------------------
Epoch: 100
Training loss: 0.01798046646481342 | Validation loss: 0.02428950648754835
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951]
------------------------------
Epoch: 101
Training loss: 0.01773889101702373 | Validation loss: 0.026878266878026937
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951]
------------------------------
Epoch: 102
Training loss: 0.017975333689708217 | Validation loss: 0.0366562896940325
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951]
------------------------------
Epoch: 103
Training loss: 0.01922888393223527 | Validation loss: 0.03991323919035494
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951]
------------------------------
Epoch: 104
Training loss: 0.021070435833608212 | Validation loss: 0.02945878236953701
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951]
------------------------------
Epoch: 105
Training loss: 0.022624185305043513 | Validation loss: 0.039455125973160775
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951]
------------------------------
Epoch: 106
Training loss: 0.021214887884404014 | Validation loss: 0.04384329728782177
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951]
------------------------------
Epoch: 107
Training loss: 0.019580216475042253 | Validation loss: 0.033993630370657356
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951]
------------------------------
Epoch: 108
Training loss: 0.01824560266568774 | Validation loss: 0.034660790281902464
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951]
------------------------------
Epoch: 109
Training loss: 0.017534620688496694 | Validation loss: 0.02668355663107442
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951]
------------------------------
Epoch: 110
Training loss: 0.017282390727582368 | Validation loss: 0.023429298324377408
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293 ]
------------------------------
Epoch: 111
Training loss: 0.017200304728815792 | Validation loss: 0.028306924737989902
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293 ]
------------------------------
Epoch: 112
Training loss: 0.017457029978303534 | Validation loss: 0.039004568476229906
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293 ]
------------------------------
Epoch: 113
Training loss: 0.018181345012672396 | Validation loss: 0.037324963070984395
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293 ]
------------------------------
Epoch: 114
Training loss: 0.019610544768452403 | Validation loss: 0.05263229812096272
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293 ]
------------------------------
Epoch: 115
Training loss: 0.021719489280541657 | Validation loss: 0.10329486296645232
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293 ]
------------------------------
Epoch: 116
Training loss: 0.01992288611731247 | Validation loss: 0.03785114236442106
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293 ]
------------------------------
Epoch: 117
Training loss: 0.018402819861073484 | Validation loss: 0.037453519978693554
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293 ]
------------------------------
Epoch: 118
Training loss: 0.017824132010521677 | Validation loss: 0.04232800778533731
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293 ]
------------------------------
Epoch: 119
Training loss: 0.016857583978805345 | Validation loss: 0.027738639802139784
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293 ]
------------------------------
Epoch: 120
Training loss: 0.016807740439212154 | Validation loss: 0.02330138411239854
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138]
------------------------------
Epoch: 121
Training loss: 0.01654348725845155 | Validation loss: 0.026412564745571996
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138]
------------------------------
Epoch: 122
Training loss: 0.016701313612326556 | Validation loss: 0.040867413128060956
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138]
------------------------------
Epoch: 123
Training loss: 0.017480136147937794 | Validation loss: 0.04603183016713176
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138]
------------------------------
Epoch: 124
Training loss: 0.01918658996302529 | Validation loss: 0.06513113408748593
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138]
------------------------------
Epoch: 125
Training loss: 0.02093221848973861 | Validation loss: 0.03038291127554008
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138]
------------------------------
Epoch: 126
Training loss: 0.019247326766129447 | Validation loss: 0.02680572501516768
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138]
------------------------------
Epoch: 127
Training loss: 0.018294873881165075 | Validation loss: 0.03915104289938297
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138]
------------------------------
Epoch: 128
Training loss: 0.017199005021031208 | Validation loss: 0.03470036167917507
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138]
------------------------------
Epoch: 129
Training loss: 0.01629751831903933 | Validation loss: 0.027707418227302178
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138]
------------------------------
Epoch: 130
Training loss: 0.016322553382153333 | Validation loss: 0.023030078088465546
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008]
------------------------------
Epoch: 131
Training loss: 0.01574328486517313 | Validation loss: 0.025941874748761102
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008]
------------------------------
Epoch: 132
Training loss: 0.01595830355736951 | Validation loss: 0.026795019695003117
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008]
------------------------------
Epoch: 133
Training loss: 0.01674997114278527 | Validation loss: 0.032723173864984086
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008]
------------------------------
Epoch: 134
Training loss: 0.018060487297246693 | Validation loss: 0.05259248175259147
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008]
------------------------------
Epoch: 135
Training loss: 0.019997143852128554 | Validation loss: 0.029469374334439635
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008]
------------------------------
Epoch: 136
Training loss: 0.018740022800078516 | Validation loss: 0.03724197328223714
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008]
------------------------------
Epoch: 137
Training loss: 0.017349006890918804 | Validation loss: 0.03206472541205585
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008]
------------------------------
Epoch: 138
Training loss: 0.016172322676505757 | Validation loss: 0.031014696528602923
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008]
------------------------------
Epoch: 139
Training loss: 0.015613548719674832 | Validation loss: 0.02622273386389549
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008]
------------------------------
Epoch: 140
Training loss: 0.015545637761563183 | Validation loss: 0.02298504370264709
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504]
------------------------------
Epoch: 141
Training loss: 0.015161730000289225 | Validation loss: 0.025938816684564308
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504]
------------------------------
Epoch: 142
Training loss: 0.015637453324972135 | Validation loss: 0.031124545161479285
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504]
------------------------------
Epoch: 143
Training loss: 0.016111005887555086 | Validation loss: 0.030322041545462395
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504]
------------------------------
Epoch: 144
Training loss: 0.017304543302081494 | Validation loss: 0.030219923222570548
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504]
------------------------------
Epoch: 145
Training loss: 0.01862924370673383 | Validation loss: 0.037004510373143215
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504]
------------------------------
Epoch: 146
Training loss: 0.01818571688403726 | Validation loss: 0.031312630412035754
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504]
------------------------------
Epoch: 147
Training loss: 0.016826508900429676 | Validation loss: 0.025547978468239307
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504]
------------------------------
Epoch: 148
Training loss: 0.015718223280103704 | Validation loss: 0.025384996768220196
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504]
------------------------------
Epoch: 149
Training loss: 0.01519297607250602 | Validation loss: 0.026305198087356985
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504]
------------------------------
Epoch: 150
Training loss: 0.015273088334799416 | Validation loss: 0.02278466535998242
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467]
------------------------------
Epoch: 151
Training loss: 0.014718375066936257 | Validation loss: 0.025838033728567616
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467]
------------------------------
Epoch: 152
Training loss: 0.015016126022240532 | Validation loss: 0.02520066301804036
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467]
------------------------------
Epoch: 153
Training loss: 0.015792525130199637 | Validation loss: 0.033811263440709026
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467]
------------------------------
Epoch: 154
Training loss: 0.016720541877302562 | Validation loss: 0.03242217247108264
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467]
------------------------------
Epoch: 155
Training loss: 0.01912484302370232 | Validation loss: 0.0287974347925878
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467]
------------------------------
Epoch: 156
Training loss: 0.017655496700451925 | Validation loss: 0.028920547171894993
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467]
------------------------------
Epoch: 157
Training loss: 0.016262264513158 | Validation loss: 0.026407693163491786
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467]
------------------------------
Epoch: 158
Training loss: 0.015085211046311537 | Validation loss: 0.026688831109952713
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467]
------------------------------
Epoch: 159
Training loss: 0.014528502254871282 | Validation loss: 0.026294793401445662
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467]
------------------------------
Epoch: 160
Training loss: 0.014659694655183778 | Validation loss: 0.022498133392738446
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813]
------------------------------
Epoch: 161
Training loss: 0.014347941379028896 | Validation loss: 0.026282045541198125
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813]
------------------------------
Epoch: 162
Training loss: 0.014263723311410379 | Validation loss: 0.03165137199019747
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813]
------------------------------
Epoch: 163
Training loss: 0.015251761050668625 | Validation loss: 0.027164413177940463
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813]
------------------------------
Epoch: 164
Training loss: 0.016272441059848677 | Validation loss: 0.029640018258110752
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813]
------------------------------
Epoch: 165
Training loss: 0.017680507535982107 | Validation loss: 0.029534945397504737
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813]
------------------------------
Epoch: 166
Training loss: 0.017353308862188326 | Validation loss: 0.02775626107385116
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813]
------------------------------
Epoch: 167
Training loss: 0.016023863398239982 | Validation loss: 0.02645636486288692
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813]
------------------------------
Epoch: 168
Training loss: 0.01442366958923849 | Validation loss: 0.02888286848818617
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813]
------------------------------
Epoch: 169
Training loss: 0.014458691623996989 | Validation loss: 0.027762371946924498
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813]
------------------------------
Epoch: 170
Training loss: 0.014444907576690319 | Validation loss: 0.0224708960325058
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709 ]
------------------------------
Epoch: 171
Training loss: 0.013903563040938333 | Validation loss: 0.025974585957426046
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709 ]
------------------------------
Epoch: 172
Training loss: 0.014055204803976212 | Validation loss: 0.027087848567004715
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709 ]
------------------------------
Epoch: 173
Training loss: 0.014722083803635799 | Validation loss: 0.03538105211087635
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709 ]
------------------------------
Epoch: 174
Training loss: 0.015269574309559728 | Validation loss: 0.030134127608367374
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709 ]
------------------------------
Epoch: 175
Training loss: 0.01760749817686344 | Validation loss: 0.032296386281294484
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709 ]
------------------------------
Epoch: 176
Training loss: 0.016067436583310005 | Validation loss: 0.02902796821269606
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709 ]
------------------------------
Epoch: 177
Training loss: 0.015462717003667886 | Validation loss: 0.027272284696144716
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709 ]
------------------------------
Epoch: 178
Training loss: 0.014177237382829793 | Validation loss: 0.0292433856853417
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709 ]
------------------------------
Epoch: 179
Training loss: 0.014127726506381503 | Validation loss: 0.02626519976183772
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709 ]
------------------------------
Epoch: 180
Training loss: 0.014095621309827576 | Validation loss: 0.02228838494712753
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838]
------------------------------
Epoch: 181
Training loss: 0.013186523555080418 | Validation loss: 0.026403546100482345
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838]
------------------------------
Epoch: 182
Training loss: 0.013458487034779087 | Validation loss: 0.03125196968072227
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838]
------------------------------
Epoch: 183
Training loss: 0.014144350702003369 | Validation loss: 0.027981760312936137
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838]
------------------------------
Epoch: 184
Training loss: 0.01527679824790642 | Validation loss: 0.02871349892978157
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838]
------------------------------
Epoch: 185
Training loss: 0.01699371934351832 | Validation loss: 0.03547387194287564
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838]
------------------------------
Epoch: 186
Training loss: 0.016371346983741892 | Validation loss: 0.029047775315120816
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838]
------------------------------
Epoch: 187
Training loss: 0.014853418538882304 | Validation loss: 0.029449129876281534
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838]
------------------------------
Epoch: 188
Training loss: 0.013668709351617073 | Validation loss: 0.02833327539597771
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838]
------------------------------
Epoch: 189
Training loss: 0.013487473279162337 | Validation loss: 0.028004308929666877
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838]
------------------------------
Epoch: 190
Training loss: 0.013926172044565562 | Validation loss: 0.021582843336675848
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838 0.02158284]
------------------------------
Epoch: 191
Training loss: 0.013011930078675269 | Validation loss: 0.025751842485208596
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838 0.02158284]
------------------------------
Epoch: 192
Training loss: 0.012926986992928512 | Validation loss: 0.027355525941987122
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838 0.02158284]
------------------------------
Epoch: 193
Training loss: 0.013586024041117927 | Validation loss: 0.030126292291762575
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838 0.02158284]
------------------------------
Epoch: 194
Training loss: 0.015417855717467996 | Validation loss: 0.030633447425706044
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838 0.02158284]
------------------------------
Epoch: 195
Training loss: 0.016156206774687477 | Validation loss: 0.03374442298497472
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838 0.02158284]
------------------------------
Epoch: 196
Training loss: 0.014926741387966553 | Validation loss: 0.02969645961586918
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838 0.02158284]
------------------------------
Epoch: 197
Training loss: 0.014366873232685482 | Validation loss: 0.028297341195866466
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838 0.02158284]
------------------------------
Epoch: 198
Training loss: 0.013488575995254975 | Validation loss: 0.02735566158246781
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838 0.02158284]
------------------------------
Epoch: 199
Training loss: 0.012988377337038576 | Validation loss: 0.024107716279104352
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838 0.02158284]
------------------------------
Epoch: 200
Training loss: 0.013492380542235697 | Validation loss: 0.021781244115637883
Validation loss (ends of cycles): [0.25536982 0.04253417 0.03439102 0.03108923 0.02868143 0.02791737
 0.0271881  0.02621106 0.02531709 0.02478583 0.02428951 0.0234293
 0.02330138 0.02303008 0.02298504 0.02278467 0.02249813 0.0224709
 0.02228838 0.02158284 0.02178124]
--------------------------------------------------------------------------------
Seed: 13
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.1930667787182088 | Validation loss: 0.1819245464823864
Validation loss (ends of cycles): [0.18192455]
------------------------------
Epoch: 1
Training loss: 0.11809876634150135 | Validation loss: 0.07254008576273918
Validation loss (ends of cycles): [0.18192455]
------------------------------
Epoch: 2
Training loss: 0.06877991482615471 | Validation loss: 0.0805306493960045
Validation loss (ends of cycles): [0.18192455]
------------------------------
Epoch: 3
Training loss: 0.0631012045835354 | Validation loss: 0.05939289858495748
Validation loss (ends of cycles): [0.18192455]
------------------------------
Epoch: 4
Training loss: 0.05970304113413606 | Validation loss: 0.056863684483148436
Validation loss (ends of cycles): [0.18192455]
------------------------------
Epoch: 5
Training loss: 0.057145256518709414 | Validation loss: 0.052280418988731175
Validation loss (ends of cycles): [0.18192455]
------------------------------
Epoch: 6
Training loss: 0.05306313728191415 | Validation loss: 0.046050683284799256
Validation loss (ends of cycles): [0.18192455]
------------------------------
Epoch: 7
Training loss: 0.0496613971204782 | Validation loss: 0.04423446822221632
Validation loss (ends of cycles): [0.18192455]
------------------------------
Epoch: 8
Training loss: 0.04634420350574109 | Validation loss: 0.04120242133460663
Validation loss (ends of cycles): [0.18192455]
------------------------------
Epoch: 9
Training loss: 0.04390166444057713 | Validation loss: 0.03790110322060408
Validation loss (ends of cycles): [0.18192455]
------------------------------
Epoch: 10
Training loss: 0.042118645292155595 | Validation loss: 0.03582969597644276
Validation loss (ends of cycles): [0.18192455 0.0358297 ]
------------------------------
Epoch: 11
Training loss: 0.04284505806863308 | Validation loss: 0.03783127010144569
Validation loss (ends of cycles): [0.18192455 0.0358297 ]
------------------------------
Epoch: 12
Training loss: 0.04320145487709313 | Validation loss: 0.03778853467493146
Validation loss (ends of cycles): [0.18192455 0.0358297 ]
------------------------------
Epoch: 13
Training loss: 0.044070840353260234 | Validation loss: 0.03866708637387664
Validation loss (ends of cycles): [0.18192455 0.0358297 ]
------------------------------
Epoch: 14
Training loss: 0.045151009517056605 | Validation loss: 0.0393032300527449
Validation loss (ends of cycles): [0.18192455 0.0358297 ]
------------------------------
Epoch: 15
Training loss: 0.04501143620178408 | Validation loss: 0.0551495544474434
Validation loss (ends of cycles): [0.18192455 0.0358297 ]
------------------------------
Epoch: 16
Training loss: 0.043016062821356615 | Validation loss: 0.07461784904201825
Validation loss (ends of cycles): [0.18192455 0.0358297 ]
------------------------------
Epoch: 17
Training loss: 0.0403907396849625 | Validation loss: 0.036994416266679764
Validation loss (ends of cycles): [0.18192455 0.0358297 ]
------------------------------
Epoch: 18
Training loss: 0.0384160772764257 | Validation loss: 0.03323371660102297
Validation loss (ends of cycles): [0.18192455 0.0358297 ]
------------------------------
Epoch: 19
Training loss: 0.036099768840536776 | Validation loss: 0.03114642182158099
Validation loss (ends of cycles): [0.18192455 0.0358297 ]
------------------------------
Epoch: 20
Training loss: 0.034705798651034736 | Validation loss: 0.030475210260461877
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521]
------------------------------
Epoch: 21
Training loss: 0.03530205554347866 | Validation loss: 0.030978830186305224
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521]
------------------------------
Epoch: 22
Training loss: 0.03607280668327395 | Validation loss: 0.03204350987518275
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521]
------------------------------
Epoch: 23
Training loss: 0.03704883032763491 | Validation loss: 0.033623026132031726
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521]
------------------------------
Epoch: 24
Training loss: 0.03808547970743812 | Validation loss: 0.034693438559770584
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521]
------------------------------
Epoch: 25
Training loss: 0.0391657291474391 | Validation loss: 0.035492373906351904
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521]
------------------------------
Epoch: 26
Training loss: 0.03752170335881564 | Validation loss: 0.03386187332647818
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521]
------------------------------
Epoch: 27
Training loss: 0.035501095694394746 | Validation loss: 0.033731289483882765
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521]
------------------------------
Epoch: 28
Training loss: 0.03345001264266213 | Validation loss: 0.029232336277211154
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521]
------------------------------
Epoch: 29
Training loss: 0.03161927238106728 | Validation loss: 0.028818574630551867
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521]
------------------------------
Epoch: 30
Training loss: 0.029917811959677812 | Validation loss: 0.02814174229624095
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174]
------------------------------
Epoch: 31
Training loss: 0.030526079264070305 | Validation loss: 0.028566454364745704
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174]
------------------------------
Epoch: 32
Training loss: 0.031387867964804174 | Validation loss: 0.028836151205555157
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174]
------------------------------
Epoch: 33
Training loss: 0.03292266800239378 | Validation loss: 0.03008065648652889
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174]
------------------------------
Epoch: 34
Training loss: 0.03430468309860753 | Validation loss: 0.030857506449575776
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174]
------------------------------
Epoch: 35
Training loss: 0.0357665854715267 | Validation loss: 0.031715808574248244
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174]
------------------------------
Epoch: 36
Training loss: 0.03369065793611261 | Validation loss: 0.033253426422123554
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174]
------------------------------
Epoch: 37
Training loss: 0.03193698752367375 | Validation loss: 0.030685135404820794
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174]
------------------------------
Epoch: 38
Training loss: 0.030100774111188187 | Validation loss: 0.0328261045118173
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174]
------------------------------
Epoch: 39
Training loss: 0.028577730930125226 | Validation loss: 0.027603525944330073
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174]
------------------------------
Epoch: 40
Training loss: 0.027103183905080874 | Validation loss: 0.02621252317395475
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252]
------------------------------
Epoch: 41
Training loss: 0.02798980980047158 | Validation loss: 0.02808722888154012
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252]
------------------------------
Epoch: 42
Training loss: 0.02831293367685712 | Validation loss: 0.02703952934179041
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252]
------------------------------
Epoch: 43
Training loss: 0.03022744706166642 | Validation loss: 0.029000978502962325
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252]
------------------------------
Epoch: 44
Training loss: 0.031051456407472797 | Validation loss: 0.030444668605923653
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252]
------------------------------
Epoch: 45
Training loss: 0.032409838005444225 | Validation loss: 0.03240376365957437
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252]
------------------------------
Epoch: 46
Training loss: 0.0312417740116314 | Validation loss: 0.04385587483368538
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252]
------------------------------
Epoch: 47
Training loss: 0.0294837532641024 | Validation loss: 0.03135377789537112
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252]
------------------------------
Epoch: 48
Training loss: 0.027556763897288818 | Validation loss: 0.02746297750208113
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252]
------------------------------
Epoch: 49
Training loss: 0.025821912870267215 | Validation loss: 0.027316523577879975
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252]
------------------------------
Epoch: 50
Training loss: 0.024773090705275537 | Validation loss: 0.02547023341887527
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023]
------------------------------
Epoch: 51
Training loss: 0.0251947464079273 | Validation loss: 0.026439772573886095
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023]
------------------------------
Epoch: 52
Training loss: 0.026060407675270524 | Validation loss: 0.030076515550414722
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023]
------------------------------
Epoch: 53
Training loss: 0.02717079576485011 | Validation loss: 0.03210321586165163
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023]
------------------------------
Epoch: 54
Training loss: 0.0285390988296392 | Validation loss: 0.030368766023053065
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023]
------------------------------
Epoch: 55
Training loss: 0.030521390408429563 | Validation loss: 0.029636258983777627
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023]
------------------------------
Epoch: 56
Training loss: 0.029085740456547663 | Validation loss: 0.03383835812133772
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023]
------------------------------
Epoch: 57
Training loss: 0.02747641666887366 | Validation loss: 0.029344955597210814
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023]
------------------------------
Epoch: 58
Training loss: 0.025737719032533315 | Validation loss: 0.032367387855494464
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023]
------------------------------
Epoch: 59
Training loss: 0.024154662392197216 | Validation loss: 0.026446550946544717
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023]
------------------------------
Epoch: 60
Training loss: 0.023362241928674737 | Validation loss: 0.024779479191810998
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948]
------------------------------
Epoch: 61
Training loss: 0.02353256069579903 | Validation loss: 0.028069672726646618
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948]
------------------------------
Epoch: 62
Training loss: 0.023943516661470033 | Validation loss: 0.026808338826177298
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948]
------------------------------
Epoch: 63
Training loss: 0.025357702752689316 | Validation loss: 0.02782607140640418
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948]
------------------------------
Epoch: 64
Training loss: 0.02692143836115696 | Validation loss: 0.030238174040008475
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948]
------------------------------
Epoch: 65
Training loss: 0.028473555140805486 | Validation loss: 0.03152257176461043
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948]
------------------------------
Epoch: 66
Training loss: 0.02699487556000145 | Validation loss: 0.047619540589275186
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948]
------------------------------
Epoch: 67
Training loss: 0.02489455645257721 | Validation loss: 0.028346326340127875
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948]
------------------------------
Epoch: 68
Training loss: 0.02349343087182057 | Validation loss: 0.028713655347625416
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948]
------------------------------
Epoch: 69
Training loss: 0.02266941082530788 | Validation loss: 0.02549334270534692
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948]
------------------------------
Epoch: 70
Training loss: 0.021703293554636897 | Validation loss: 0.024168244942470832
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824]
------------------------------
Epoch: 71
Training loss: 0.02196174266995216 | Validation loss: 0.02505502008177616
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824]
------------------------------
Epoch: 72
Training loss: 0.022541736478784255 | Validation loss: 0.03061453970494094
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824]
------------------------------
Epoch: 73
Training loss: 0.023306790157696423 | Validation loss: 0.029162768481506243
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824]
------------------------------
Epoch: 74
Training loss: 0.025527279784104653 | Validation loss: 0.04011596442648658
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824]
------------------------------
Epoch: 75
Training loss: 0.026604378816424586 | Validation loss: 0.033134016794738944
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824]
------------------------------
Epoch: 76
Training loss: 0.025109626031575762 | Validation loss: 0.027811537048331014
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824]
------------------------------
Epoch: 77
Training loss: 0.024159897717514207 | Validation loss: 0.027146308924312943
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824]
------------------------------
Epoch: 78
Training loss: 0.022194400768042827 | Validation loss: 0.026139097840145783
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824]
------------------------------
Epoch: 79
Training loss: 0.020927915326794798 | Validation loss: 0.025522519178964472
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824]
------------------------------
Epoch: 80
Training loss: 0.02010866692479776 | Validation loss: 0.023466593240974127
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659]
------------------------------
Epoch: 81
Training loss: 0.02052211769457374 | Validation loss: 0.025561627552465157
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659]
------------------------------
Epoch: 82
Training loss: 0.02113350276189039 | Validation loss: 0.03258011841939555
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659]
------------------------------
Epoch: 83
Training loss: 0.02219036114687214 | Validation loss: 0.031846943552847264
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659]
------------------------------
Epoch: 84
Training loss: 0.023618147117370855 | Validation loss: 0.030687368647367867
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659]
------------------------------
Epoch: 85
Training loss: 0.02571254876651326 | Validation loss: 0.031185772821859078
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659]
------------------------------
Epoch: 86
Training loss: 0.024023403993294556 | Validation loss: 0.02871771556911645
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659]
------------------------------
Epoch: 87
Training loss: 0.022371417447468456 | Validation loss: 0.032844013155058575
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659]
------------------------------
Epoch: 88
Training loss: 0.02110713280889453 | Validation loss: 0.030332010584297003
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659]
------------------------------
Epoch: 89
Training loss: 0.020046295809122372 | Validation loss: 0.025618491763318027
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659]
------------------------------
Epoch: 90
Training loss: 0.019356959265637762 | Validation loss: 0.02316210146441504
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621 ]
------------------------------
Epoch: 91
Training loss: 0.019383190148415008 | Validation loss: 0.02634717537849038
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621 ]
------------------------------
Epoch: 92
Training loss: 0.01964251248888215 | Validation loss: 0.03047156140760139
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621 ]
------------------------------
Epoch: 93
Training loss: 0.02092831273164068 | Validation loss: 0.029502556693774683
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621 ]
------------------------------
Epoch: 94
Training loss: 0.02249736651322063 | Validation loss: 0.041276842148767576
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621 ]
------------------------------
Epoch: 95
Training loss: 0.024790223473112803 | Validation loss: 0.03220497692624728
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621 ]
------------------------------
Epoch: 96
Training loss: 0.02344361155160836 | Validation loss: 0.04404209623182261
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621 ]
------------------------------
Epoch: 97
Training loss: 0.021694707273676686 | Validation loss: 0.03870899168153604
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621 ]
------------------------------
Epoch: 98
Training loss: 0.02010431843524685 | Validation loss: 0.028377596702840593
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621 ]
------------------------------
Epoch: 99
Training loss: 0.018850200651783725 | Validation loss: 0.030515969972367638
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621 ]
------------------------------
Epoch: 100
Training loss: 0.018333698068840467 | Validation loss: 0.022866654968648044
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665]
------------------------------
Epoch: 101
Training loss: 0.01845642739184657 | Validation loss: 0.027251339827974636
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665]
------------------------------
Epoch: 102
Training loss: 0.018576808184461325 | Validation loss: 0.03162037546711939
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665]
------------------------------
Epoch: 103
Training loss: 0.019754362957818166 | Validation loss: 0.030628113283051386
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665]
------------------------------
Epoch: 104
Training loss: 0.020817349562231375 | Validation loss: 0.03573075723316935
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665]
------------------------------
Epoch: 105
Training loss: 0.023296759935209944 | Validation loss: 0.03717152795030011
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665]
------------------------------
Epoch: 106
Training loss: 0.021819874887563744 | Validation loss: 0.04560715691358955
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665]
------------------------------
Epoch: 107
Training loss: 0.02036462054987039 | Validation loss: 0.03906206234737679
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665]
------------------------------
Epoch: 108
Training loss: 0.018941566669287122 | Validation loss: 0.02962375542631856
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665]
------------------------------
Epoch: 109
Training loss: 0.017907579606208875 | Validation loss: 0.025408087988142616
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665]
------------------------------
Epoch: 110
Training loss: 0.017118974164964593 | Validation loss: 0.023260762639067793
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076]
------------------------------
Epoch: 111
Training loss: 0.017573000283493678 | Validation loss: 0.027502070560499473
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076]
------------------------------
Epoch: 112
Training loss: 0.018396547465224046 | Validation loss: 0.02959495989812745
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076]
------------------------------
Epoch: 113
Training loss: 0.018755224162750706 | Validation loss: 0.032247422745934236
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076]
------------------------------
Epoch: 114
Training loss: 0.020228182037874146 | Validation loss: 0.02642800127742467
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076]
------------------------------
Epoch: 115
Training loss: 0.021511572188868816 | Validation loss: 0.029202266009869398
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076]
------------------------------
Epoch: 116
Training loss: 0.02057119747813867 | Validation loss: 0.028558928381513665
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076]
------------------------------
Epoch: 117
Training loss: 0.019055473568792246 | Validation loss: 0.04045708553382644
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076]
------------------------------
Epoch: 118
Training loss: 0.018233418848593624 | Validation loss: 0.035353186505812186
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076]
------------------------------
Epoch: 119
Training loss: 0.017574515155687625 | Validation loss: 0.030527102243569162
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076]
------------------------------
Epoch: 120
Training loss: 0.017130743881345403 | Validation loss: 0.022757303659562713
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573 ]
------------------------------
Epoch: 121
Training loss: 0.016826823591349686 | Validation loss: 0.025191029671717574
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573 ]
------------------------------
Epoch: 122
Training loss: 0.016850333925032494 | Validation loss: 0.03346616526444753
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573 ]
------------------------------
Epoch: 123
Training loss: 0.018192078241584253 | Validation loss: 0.047841162869223845
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573 ]
------------------------------
Epoch: 124
Training loss: 0.019429788616847018 | Validation loss: 0.038563721226873224
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573 ]
------------------------------
Epoch: 125
Training loss: 0.021230487420926897 | Validation loss: 0.03223240699757029
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573 ]
------------------------------
Epoch: 126
Training loss: 0.02028775199373462 | Validation loss: 0.03193793335446605
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573 ]
------------------------------
Epoch: 127
Training loss: 0.018140019360473568 | Validation loss: 0.02840865741449374
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573 ]
------------------------------
Epoch: 128
Training loss: 0.017588311623857947 | Validation loss: 0.031451340212866115
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573 ]
------------------------------
Epoch: 129
Training loss: 0.016793060888137135 | Validation loss: 0.024730978499132174
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573 ]
------------------------------
Epoch: 130
Training loss: 0.016106459407173857 | Validation loss: 0.022969599364808312
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696 ]
------------------------------
Epoch: 131
Training loss: 0.016320625238348634 | Validation loss: 0.024469188311033778
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696 ]
------------------------------
Epoch: 132
Training loss: 0.016396109552635832 | Validation loss: 0.02705709463744252
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696 ]
------------------------------
Epoch: 133
Training loss: 0.01734909897252005 | Validation loss: 0.031023156035829474
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696 ]
------------------------------
Epoch: 134
Training loss: 0.01820811143297018 | Validation loss: 0.05327481610907449
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696 ]
------------------------------
Epoch: 135
Training loss: 0.019724413256484028 | Validation loss: 0.03707399743574637
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696 ]
------------------------------
Epoch: 136
Training loss: 0.019301350711255657 | Validation loss: 0.03078989284457984
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696 ]
------------------------------
Epoch: 137
Training loss: 0.017556924152435087 | Validation loss: 0.028413188471286384
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696 ]
------------------------------
Epoch: 138
Training loss: 0.016612043256434252 | Validation loss: 0.025720094572062844
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696 ]
------------------------------
Epoch: 139
Training loss: 0.016134973512772394 | Validation loss: 0.027998936052123707
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696 ]
------------------------------
Epoch: 140
Training loss: 0.015872289350598444 | Validation loss: 0.022802802872050693
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028 ]
------------------------------
Epoch: 141
Training loss: 0.015516076696922584 | Validation loss: 0.024056544630891748
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028 ]
------------------------------
Epoch: 142
Training loss: 0.015703990444426937 | Validation loss: 0.0258673885492263
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028 ]
------------------------------
Epoch: 143
Training loss: 0.01668028063425908 | Validation loss: 0.030482867594670365
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028 ]
------------------------------
Epoch: 144
Training loss: 0.017571322114339896 | Validation loss: 0.03652796528681561
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028 ]
------------------------------
Epoch: 145
Training loss: 0.019416323446725704 | Validation loss: 0.03051293613734069
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028 ]
------------------------------
Epoch: 146
Training loss: 0.018579784489940014 | Validation loss: 0.03504657910929786
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028 ]
------------------------------
Epoch: 147
Training loss: 0.016626955300797615 | Validation loss: 0.028009693586715945
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028 ]
------------------------------
Epoch: 148
Training loss: 0.016029098249819813 | Validation loss: 0.029912206999681615
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028 ]
------------------------------
Epoch: 149
Training loss: 0.015353832416692558 | Validation loss: 0.023703449750664057
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028 ]
------------------------------
Epoch: 150
Training loss: 0.014883509628018555 | Validation loss: 0.021929300873091927
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293 ]
------------------------------
Epoch: 151
Training loss: 0.01512682895385185 | Validation loss: 0.023662176673059112
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293 ]
------------------------------
Epoch: 152
Training loss: 0.014992564006195384 | Validation loss: 0.02634827120022641
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293 ]
------------------------------
Epoch: 153
Training loss: 0.015701996538864105 | Validation loss: 0.02868762995219893
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293 ]
------------------------------
Epoch: 154
Training loss: 0.016893358146581723 | Validation loss: 0.026929746002510743
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293 ]
------------------------------
Epoch: 155
Training loss: 0.01879451618130718 | Validation loss: 0.034038130538883035
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293 ]
------------------------------
Epoch: 156
Training loss: 0.017982265125123822 | Validation loss: 0.03113197583567213
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293 ]
------------------------------
Epoch: 157
Training loss: 0.0168210576383435 | Validation loss: 0.03469343262690085
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293 ]
------------------------------
Epoch: 158
Training loss: 0.015516883760158504 | Validation loss: 0.025853695451385446
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293 ]
------------------------------
Epoch: 159
Training loss: 0.014961472537596615 | Validation loss: 0.0240436724766537
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293 ]
------------------------------
Epoch: 160
Training loss: 0.01467474195535998 | Validation loss: 0.022376254062961648
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625]
------------------------------
Epoch: 161
Training loss: 0.014463082505199983 | Validation loss: 0.02429829292965156
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625]
------------------------------
Epoch: 162
Training loss: 0.014489603282085487 | Validation loss: 0.024390017406808004
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625]
------------------------------
Epoch: 163
Training loss: 0.014960362051366543 | Validation loss: 0.03855056495026306
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625]
------------------------------
Epoch: 164
Training loss: 0.016004944959541363 | Validation loss: 0.027088109058914362
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625]
------------------------------
Epoch: 165
Training loss: 0.017839457700979344 | Validation loss: 0.027761828982167773
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625]
------------------------------
Epoch: 166
Training loss: 0.016734649878641476 | Validation loss: 0.02709192072075826
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625]
------------------------------
Epoch: 167
Training loss: 0.015986402323279452 | Validation loss: 0.02521773434623524
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625]
------------------------------
Epoch: 168
Training loss: 0.014949820494773437 | Validation loss: 0.028413208270514453
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625]
------------------------------
Epoch: 169
Training loss: 0.014427602009809746 | Validation loss: 0.025387840138541326
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625]
------------------------------
Epoch: 170
Training loss: 0.014175291431649606 | Validation loss: 0.0222464417003923
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644]
------------------------------
Epoch: 171
Training loss: 0.014155056855964417 | Validation loss: 0.025276930174893804
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644]
------------------------------
Epoch: 172
Training loss: 0.013586985551733143 | Validation loss: 0.02459180938010966
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644]
------------------------------
Epoch: 173
Training loss: 0.014965886816534461 | Validation loss: 0.026229242380294535
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644]
------------------------------
Epoch: 174
Training loss: 0.01641260800617082 | Validation loss: 0.02679949150317245
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644]
------------------------------
Epoch: 175
Training loss: 0.017782501397863488 | Validation loss: 0.029586961009988078
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644]
------------------------------
Epoch: 176
Training loss: 0.01672653796493399 | Validation loss: 0.04584400052273715
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644]
------------------------------
Epoch: 177
Training loss: 0.015437085578712274 | Validation loss: 0.02551957640658926
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644]
------------------------------
Epoch: 178
Training loss: 0.014381962241034727 | Validation loss: 0.027132539078593254
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644]
------------------------------
Epoch: 179
Training loss: 0.014216822250841223 | Validation loss: 0.024544751720020064
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644]
------------------------------
Epoch: 180
Training loss: 0.013737957268877297 | Validation loss: 0.02210113792507737
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114]
------------------------------
Epoch: 181
Training loss: 0.013467943696875354 | Validation loss: 0.024385717973389006
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114]
------------------------------
Epoch: 182
Training loss: 0.013636469504586897 | Validation loss: 0.023889954443331116
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114]
------------------------------
Epoch: 183
Training loss: 0.01405429927708239 | Validation loss: 0.02557995884368817
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114]
------------------------------
Epoch: 184
Training loss: 0.01594532945440436 | Validation loss: 0.02542697093277066
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114]
------------------------------
Epoch: 185
Training loss: 0.017133247770597132 | Validation loss: 0.026300121409197647
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114]
------------------------------
Epoch: 186
Training loss: 0.01628313361743123 | Validation loss: 0.027368043307904846
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114]
------------------------------
Epoch: 187
Training loss: 0.014727997079453601 | Validation loss: 0.026379065864064074
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114]
------------------------------
Epoch: 188
Training loss: 0.014223979732819966 | Validation loss: 0.025630985352176207
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114]
------------------------------
Epoch: 189
Training loss: 0.013516011896866317 | Validation loss: 0.025777899901624077
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114]
------------------------------
Epoch: 190
Training loss: 0.013738666835944264 | Validation loss: 0.02189505234774616
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114 0.02189505]
------------------------------
Epoch: 191
Training loss: 0.013424585791950931 | Validation loss: 0.025334533924857777
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114 0.02189505]
------------------------------
Epoch: 192
Training loss: 0.013389314419342851 | Validation loss: 0.0272432800244402
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114 0.02189505]
------------------------------
Epoch: 193
Training loss: 0.013665561119512636 | Validation loss: 0.025919210579660203
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114 0.02189505]
------------------------------
Epoch: 194
Training loss: 0.015035620595955727 | Validation loss: 0.027025571368910647
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114 0.02189505]
------------------------------
Epoch: 195
Training loss: 0.016460462674802664 | Validation loss: 0.02844604704942968
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114 0.02189505]
------------------------------
Epoch: 196
Training loss: 0.015581737829334273 | Validation loss: 0.026644890472568846
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114 0.02189505]
------------------------------
Epoch: 197
Training loss: 0.014256568825138467 | Validation loss: 0.027392463344666693
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114 0.02189505]
------------------------------
Epoch: 198
Training loss: 0.013787354469033224 | Validation loss: 0.027793116833048838
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114 0.02189505]
------------------------------
Epoch: 199
Training loss: 0.013205509652782764 | Validation loss: 0.024656895755065814
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114 0.02189505]
------------------------------
Epoch: 200
Training loss: 0.012971938055540835 | Validation loss: 0.02208654147883256
Validation loss (ends of cycles): [0.18192455 0.0358297  0.03047521 0.02814174 0.02621252 0.02547023
 0.02477948 0.02416824 0.02346659 0.0231621  0.02286665 0.02326076
 0.0227573  0.0229696  0.0228028  0.0219293  0.02237625 0.02224644
 0.02210114 0.02189505 0.02208654]
--------------------------------------------------------------------------------
Seed: 14
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.12228791815478628 | Validation loss: 0.1054369103577402
Validation loss (ends of cycles): [0.10543691]
------------------------------
Epoch: 1
Training loss: 0.08345528741980471 | Validation loss: 0.06988698575231764
Validation loss (ends of cycles): [0.10543691]
------------------------------
Epoch: 2
Training loss: 0.06868993813489996 | Validation loss: 0.06038115518512549
Validation loss (ends of cycles): [0.10543691]
------------------------------
Epoch: 3
Training loss: 0.06472492747281383 | Validation loss: 0.054184967582976376
Validation loss (ends of cycles): [0.10543691]
------------------------------
Epoch: 4
Training loss: 0.06308126072512894 | Validation loss: 0.13389017957228203
Validation loss (ends of cycles): [0.10543691]
------------------------------
Epoch: 5
Training loss: 0.060122362686306 | Validation loss: 0.17819979400546462
Validation loss (ends of cycles): [0.10543691]
------------------------------
Epoch: 6
Training loss: 0.055708695059203034 | Validation loss: 0.049357822785774864
Validation loss (ends of cycles): [0.10543691]
------------------------------
Epoch: 7
Training loss: 0.052122626143197216 | Validation loss: 0.05457461749513944
Validation loss (ends of cycles): [0.10543691]
------------------------------
Epoch: 8
Training loss: 0.049281957006551386 | Validation loss: 0.04812982305884361
Validation loss (ends of cycles): [0.10543691]
------------------------------
Epoch: 9
Training loss: 0.04547054481851619 | Validation loss: 0.040970456407026005
Validation loss (ends of cycles): [0.10543691]
------------------------------
Epoch: 10
Training loss: 0.043980468833077005 | Validation loss: 0.038650386242402926
Validation loss (ends of cycles): [0.10543691 0.03865039]
------------------------------
Epoch: 11
Training loss: 0.04430758124747412 | Validation loss: 0.03830565043069698
Validation loss (ends of cycles): [0.10543691 0.03865039]
------------------------------
Epoch: 12
Training loss: 0.04522882570035574 | Validation loss: 0.038157600081629224
Validation loss (ends of cycles): [0.10543691 0.03865039]
------------------------------
Epoch: 13
Training loss: 0.04559288569915343 | Validation loss: 0.039070602782346586
Validation loss (ends of cycles): [0.10543691 0.03865039]
------------------------------
Epoch: 14
Training loss: 0.04618575094585738 | Validation loss: 0.03879287556089737
Validation loss (ends of cycles): [0.10543691 0.03865039]
------------------------------
Epoch: 15
Training loss: 0.04678545226683704 | Validation loss: 0.04081984361012777
Validation loss (ends of cycles): [0.10543691 0.03865039]
------------------------------
Epoch: 16
Training loss: 0.0438527973951787 | Validation loss: 0.03588898617912222
Validation loss (ends of cycles): [0.10543691 0.03865039]
------------------------------
Epoch: 17
Training loss: 0.041553754484417234 | Validation loss: 0.04742350084362207
Validation loss (ends of cycles): [0.10543691 0.03865039]
------------------------------
Epoch: 18
Training loss: 0.03968509051161326 | Validation loss: 0.03589330261780156
Validation loss (ends of cycles): [0.10543691 0.03865039]
------------------------------
Epoch: 19
Training loss: 0.0379939789058474 | Validation loss: 0.03520316261522196
Validation loss (ends of cycles): [0.10543691 0.03865039]
------------------------------
Epoch: 20
Training loss: 0.03644957100760525 | Validation loss: 0.032497044583713566
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704]
------------------------------
Epoch: 21
Training loss: 0.03638568487230355 | Validation loss: 0.03465360265087198
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704]
------------------------------
Epoch: 22
Training loss: 0.03805654572428969 | Validation loss: 0.032737655737609776
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704]
------------------------------
Epoch: 23
Training loss: 0.03894560454352721 | Validation loss: 0.04324095565135832
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704]
------------------------------
Epoch: 24
Training loss: 0.04015268617105193 | Validation loss: 0.03395191821511145
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704]
------------------------------
Epoch: 25
Training loss: 0.04047986775697247 | Validation loss: 0.03522724899704809
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704]
------------------------------
Epoch: 26
Training loss: 0.038433285021200414 | Validation loss: 0.039532348375629495
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704]
------------------------------
Epoch: 27
Training loss: 0.03698946019195444 | Validation loss: 0.03227757614243914
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704]
------------------------------
Epoch: 28
Training loss: 0.03505631038404214 | Validation loss: 0.03230649564001295
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704]
------------------------------
Epoch: 29
Training loss: 0.033620467814579 | Validation loss: 0.03223192781485893
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704]
------------------------------
Epoch: 30
Training loss: 0.03178789988690155 | Validation loss: 0.030227238519324198
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724]
------------------------------
Epoch: 31
Training loss: 0.03240554717679819 | Validation loss: 0.030879514326375944
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724]
------------------------------
Epoch: 32
Training loss: 0.03349711600978806 | Validation loss: 0.031250168948813724
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724]
------------------------------
Epoch: 33
Training loss: 0.034300035622909786 | Validation loss: 0.031172568392422464
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724]
------------------------------
Epoch: 34
Training loss: 0.03561411505731625 | Validation loss: 0.035509896154205
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724]
------------------------------
Epoch: 35
Training loss: 0.035965731965635364 | Validation loss: 0.037915085131923355
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724]
------------------------------
Epoch: 36
Training loss: 0.03542989279650818 | Validation loss: 0.03176571290802072
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724]
------------------------------
Epoch: 37
Training loss: 0.03348708177757699 | Validation loss: 0.04181969317573088
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724]
------------------------------
Epoch: 38
Training loss: 0.032106196553241914 | Validation loss: 0.029362012459724036
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724]
------------------------------
Epoch: 39
Training loss: 0.030441816941630548 | Validation loss: 0.029443050079323626
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724]
------------------------------
Epoch: 40
Training loss: 0.028951545102082615 | Validation loss: 0.027948243957426813
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824]
------------------------------
Epoch: 41
Training loss: 0.02955955447159647 | Validation loss: 0.02866512626685478
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824]
------------------------------
Epoch: 42
Training loss: 0.030229009668971223 | Validation loss: 0.029844649274040153
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824]
------------------------------
Epoch: 43
Training loss: 0.031777786844357 | Validation loss: 0.029600525412846496
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824]
------------------------------
Epoch: 44
Training loss: 0.03273992139724939 | Validation loss: 0.03340166689897025
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824]
------------------------------
Epoch: 45
Training loss: 0.03442120896198037 | Validation loss: 0.03195419293586855
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824]
------------------------------
Epoch: 46
Training loss: 0.03261524584235215 | Validation loss: 0.0315606604433722
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824]
------------------------------
Epoch: 47
Training loss: 0.03111256630258347 | Validation loss: 0.029374615530724877
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824]
------------------------------
Epoch: 48
Training loss: 0.029745885477651182 | Validation loss: 0.029515129479545134
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824]
------------------------------
Epoch: 49
Training loss: 0.028203826734599303 | Validation loss: 0.027964135907866335
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824]
------------------------------
Epoch: 50
Training loss: 0.02741435214089669 | Validation loss: 0.02691561297548038
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561]
------------------------------
Epoch: 51
Training loss: 0.02772532103628647 | Validation loss: 0.028478951848767423
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561]
------------------------------
Epoch: 52
Training loss: 0.028424028443490585 | Validation loss: 0.028361442671329888
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561]
------------------------------
Epoch: 53
Training loss: 0.028827177053998884 | Validation loss: 0.03354962459868855
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561]
------------------------------
Epoch: 54
Training loss: 0.030674236258719026 | Validation loss: 0.02970579656323901
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561]
------------------------------
Epoch: 55
Training loss: 0.03177045115337866 | Validation loss: 0.029968159445733937
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561]
------------------------------
Epoch: 56
Training loss: 0.03085786577435286 | Validation loss: 0.029443291947245598
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561]
------------------------------
Epoch: 57
Training loss: 0.02905051384832922 | Validation loss: 0.02832780895685708
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561]
------------------------------
Epoch: 58
Training loss: 0.02775720836276688 | Validation loss: 0.027414744236954936
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561]
------------------------------
Epoch: 59
Training loss: 0.02642078349760514 | Validation loss: 0.02696808020549792
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561]
------------------------------
Epoch: 60
Training loss: 0.025448385527645185 | Validation loss: 0.02623730621956013
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731]
------------------------------
Epoch: 61
Training loss: 0.025777471902954385 | Validation loss: 0.0270316894683573
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731]
------------------------------
Epoch: 62
Training loss: 0.026880092464569138 | Validation loss: 0.02829531321509017
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731]
------------------------------
Epoch: 63
Training loss: 0.02747904618338841 | Validation loss: 0.029463160783052444
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731]
------------------------------
Epoch: 64
Training loss: 0.029024402031720412 | Validation loss: 0.030591066995704616
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731]
------------------------------
Epoch: 65
Training loss: 0.03056626948641568 | Validation loss: 0.03102897756077625
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731]
------------------------------
Epoch: 66
Training loss: 0.02904554159282063 | Validation loss: 0.03191932718510981
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731]
------------------------------
Epoch: 67
Training loss: 0.02752647077573872 | Validation loss: 0.02758541154778666
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731]
------------------------------
Epoch: 68
Training loss: 0.026392553770172644 | Validation loss: 0.027598489075899124
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731]
------------------------------
Epoch: 69
Training loss: 0.024825475034796124 | Validation loss: 0.027477786013925518
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731]
------------------------------
Epoch: 70
Training loss: 0.02389349034851099 | Validation loss: 0.025481665368985246
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167]
------------------------------
Epoch: 71
Training loss: 0.024810818970445694 | Validation loss: 0.0258330095352398
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167]
------------------------------
Epoch: 72
Training loss: 0.025311344723421628 | Validation loss: 0.026171780950217334
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167]
------------------------------
Epoch: 73
Training loss: 0.026799078659737498 | Validation loss: 0.027304544803444988
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167]
------------------------------
Epoch: 74
Training loss: 0.02739570381468147 | Validation loss: 0.029297827053125256
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167]
------------------------------
Epoch: 75
Training loss: 0.02901256551996358 | Validation loss: 0.03376480612765859
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167]
------------------------------
Epoch: 76
Training loss: 0.027922531536863585 | Validation loss: 0.031056516907281347
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167]
------------------------------
Epoch: 77
Training loss: 0.026418042119319845 | Validation loss: 0.02869530960365578
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167]
------------------------------
Epoch: 78
Training loss: 0.024494294804043887 | Validation loss: 0.02879283722076151
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167]
------------------------------
Epoch: 79
Training loss: 0.023640055410049068 | Validation loss: 0.025536342110070918
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167]
------------------------------
Epoch: 80
Training loss: 0.02292518796611244 | Validation loss: 0.024924556069352007
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456]
------------------------------
Epoch: 81
Training loss: 0.022964879946132017 | Validation loss: 0.025884984164602227
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456]
------------------------------
Epoch: 82
Training loss: 0.024365100468228745 | Validation loss: 0.02613054729860138
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456]
------------------------------
Epoch: 83
Training loss: 0.024947036228074534 | Validation loss: 0.026084760548891844
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456]
------------------------------
Epoch: 84
Training loss: 0.026113691951746378 | Validation loss: 0.029804825920749595
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456]
------------------------------
Epoch: 85
Training loss: 0.02732016758357243 | Validation loss: 0.028548474147639894
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456]
------------------------------
Epoch: 86
Training loss: 0.026706833134942907 | Validation loss: 0.02933104053415634
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456]
------------------------------
Epoch: 87
Training loss: 0.02524915052108406 | Validation loss: 0.028903299735652074
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456]
------------------------------
Epoch: 88
Training loss: 0.023512191850904043 | Validation loss: 0.02478121103787864
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456]
------------------------------
Epoch: 89
Training loss: 0.0227027192325672 | Validation loss: 0.024699235369485838
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456]
------------------------------
Epoch: 90
Training loss: 0.02195627812099287 | Validation loss: 0.024092087763603085
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209]
------------------------------
Epoch: 91
Training loss: 0.022029758766066374 | Validation loss: 0.024637732999744238
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209]
------------------------------
Epoch: 92
Training loss: 0.02214197951134264 | Validation loss: 0.02562687725380615
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209]
------------------------------
Epoch: 93
Training loss: 0.023682874324542236 | Validation loss: 0.025451905301047698
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209]
------------------------------
Epoch: 94
Training loss: 0.0249582189759921 | Validation loss: 0.026010741134760557
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209]
------------------------------
Epoch: 95
Training loss: 0.026149797552393945 | Validation loss: 0.029183034267690446
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209]
------------------------------
Epoch: 96
Training loss: 0.025613661204290584 | Validation loss: 0.029470085580315854
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209]
------------------------------
Epoch: 97
Training loss: 0.02367819772987831 | Validation loss: 0.02606600364325223
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209]
------------------------------
Epoch: 98
Training loss: 0.022782948269410346 | Validation loss: 0.025672975306709606
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209]
------------------------------
Epoch: 99
Training loss: 0.02164456419025858 | Validation loss: 0.024995564966014138
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209]
------------------------------
Epoch: 100
Training loss: 0.020927290345080257 | Validation loss: 0.023898190818727016
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819]
------------------------------
Epoch: 101
Training loss: 0.020838848724052672 | Validation loss: 0.024341666174155695
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819]
------------------------------
Epoch: 102
Training loss: 0.02157418485882321 | Validation loss: 0.02549752972468182
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819]
------------------------------
Epoch: 103
Training loss: 0.02215171731796449 | Validation loss: 0.027644692678694373
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819]
------------------------------
Epoch: 104
Training loss: 0.023533740482194636 | Validation loss: 0.02766746641309173
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819]
------------------------------
Epoch: 105
Training loss: 0.02504744735826564 | Validation loss: 0.03269333695923841
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819]
------------------------------
Epoch: 106
Training loss: 0.02399577457851511 | Validation loss: 0.026279965553570678
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819]
------------------------------
Epoch: 107
Training loss: 0.022653392671296995 | Validation loss: 0.025546919761432543
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819]
------------------------------
Epoch: 108
Training loss: 0.021445270692096736 | Validation loss: 0.024732283109592065
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819]
------------------------------
Epoch: 109
Training loss: 0.02024538448928454 | Validation loss: 0.024346998823737656
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819]
------------------------------
Epoch: 110
Training loss: 0.020088354396308218 | Validation loss: 0.023614108493482625
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411]
------------------------------
Epoch: 111
Training loss: 0.020159628817705603 | Validation loss: 0.023966707841113762
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411]
------------------------------
Epoch: 112
Training loss: 0.02066820897767699 | Validation loss: 0.024948603266643152
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411]
------------------------------
Epoch: 113
Training loss: 0.021366572594345826 | Validation loss: 0.02552163469846602
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411]
------------------------------
Epoch: 114
Training loss: 0.022936123722361597 | Validation loss: 0.027489966126503767
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411]
------------------------------
Epoch: 115
Training loss: 0.02412121834960289 | Validation loss: 0.027228782815789734
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411]
------------------------------
Epoch: 116
Training loss: 0.023651672402260508 | Validation loss: 0.025690350233128777
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411]
------------------------------
Epoch: 117
Training loss: 0.021718305630261094 | Validation loss: 0.027504071075883176
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411]
------------------------------
Epoch: 118
Training loss: 0.020649877437428247 | Validation loss: 0.02455218826179151
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411]
------------------------------
Epoch: 119
Training loss: 0.02011051802986824 | Validation loss: 0.024612521649234824
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411]
------------------------------
Epoch: 120
Training loss: 0.019264938839809682 | Validation loss: 0.023097472437829884
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747]
------------------------------
Epoch: 121
Training loss: 0.019316403066542576 | Validation loss: 0.02441213297209254
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747]
------------------------------
Epoch: 122
Training loss: 0.019887421912384955 | Validation loss: 0.02437097444716427
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747]
------------------------------
Epoch: 123
Training loss: 0.020397775916276666 | Validation loss: 0.027738412048805644
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747]
------------------------------
Epoch: 124
Training loss: 0.02181776584792367 | Validation loss: 0.02554246283101815
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747]
------------------------------
Epoch: 125
Training loss: 0.023289408678628082 | Validation loss: 0.032003110343659366
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747]
------------------------------
Epoch: 126
Training loss: 0.022173569053108615 | Validation loss: 0.025447359757015
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747]
------------------------------
Epoch: 127
Training loss: 0.02074599415593879 | Validation loss: 0.02787999822585671
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747]
------------------------------
Epoch: 128
Training loss: 0.019872636575918128 | Validation loss: 0.02400662622380036
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747]
------------------------------
Epoch: 129
Training loss: 0.0191136942778873 | Validation loss: 0.023773066078623135
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747]
------------------------------
Epoch: 130
Training loss: 0.018855253730453853 | Validation loss: 0.022893787844589463
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379]
------------------------------
Epoch: 131
Training loss: 0.018389209398878663 | Validation loss: 0.023396671577184287
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379]
------------------------------
Epoch: 132
Training loss: 0.01898594183574726 | Validation loss: 0.024825873094852322
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379]
------------------------------
Epoch: 133
Training loss: 0.019571426332118065 | Validation loss: 0.03233330883085728
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379]
------------------------------
Epoch: 134
Training loss: 0.020783360165399993 | Validation loss: 0.027140274229976866
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379]
------------------------------
Epoch: 135
Training loss: 0.022257593823823987 | Validation loss: 0.02835464863865464
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379]
------------------------------
Epoch: 136
Training loss: 0.02122602771922219 | Validation loss: 0.024941712376420146
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379]
------------------------------
Epoch: 137
Training loss: 0.020035431871750976 | Validation loss: 0.02537165092373336
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379]
------------------------------
Epoch: 138
Training loss: 0.019187722245337276 | Validation loss: 0.025162397124977025
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379]
------------------------------
Epoch: 139
Training loss: 0.018331920370323267 | Validation loss: 0.024782232664249563
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379]
------------------------------
Epoch: 140
Training loss: 0.018385974502917832 | Validation loss: 0.022544961250214664
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496]
------------------------------
Epoch: 141
Training loss: 0.01765795958008829 | Validation loss: 0.023712188798796247
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496]
------------------------------
Epoch: 142
Training loss: 0.018420973618522407 | Validation loss: 0.023575709166902083
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496]
------------------------------
Epoch: 143
Training loss: 0.019126582281224852 | Validation loss: 0.025220924919402157
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496]
------------------------------
Epoch: 144
Training loss: 0.01961053673526257 | Validation loss: 0.026265721975101367
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496]
------------------------------
Epoch: 145
Training loss: 0.02205375614538183 | Validation loss: 0.02857490042569461
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496]
------------------------------
Epoch: 146
Training loss: 0.020693585647618383 | Validation loss: 0.025435910698164393
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496]
------------------------------
Epoch: 147
Training loss: 0.01922064234797911 | Validation loss: 0.02503646554908267
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496]
------------------------------
Epoch: 148
Training loss: 0.018394955019353003 | Validation loss: 0.024210673067028874
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496]
------------------------------
Epoch: 149
Training loss: 0.017704154535128577 | Validation loss: 0.0237839009474825
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496]
------------------------------
Epoch: 150
Training loss: 0.017490101173686667 | Validation loss: 0.022615981495214835
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598]
------------------------------
Epoch: 151
Training loss: 0.017312443612627987 | Validation loss: 0.02387337434898924
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598]
------------------------------
Epoch: 152
Training loss: 0.017103146326281432 | Validation loss: 0.023941471551855404
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598]
------------------------------
Epoch: 153
Training loss: 0.018206471003728304 | Validation loss: 0.02593925995407281
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598]
------------------------------
Epoch: 154
Training loss: 0.019363362325082827 | Validation loss: 0.02541112327189357
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598]
------------------------------
Epoch: 155
Training loss: 0.020997219991574927 | Validation loss: 0.025342813727480394
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598]
------------------------------
Epoch: 156
Training loss: 0.019623544086073714 | Validation loss: 0.027581549491043442
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598]
------------------------------
Epoch: 157
Training loss: 0.01856963353125545 | Validation loss: 0.025144454367734766
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598]
------------------------------
Epoch: 158
Training loss: 0.017901460539807026 | Validation loss: 0.02464213549952816
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598]
------------------------------
Epoch: 159
Training loss: 0.017084604182197313 | Validation loss: 0.023683185516684142
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598]
------------------------------
Epoch: 160
Training loss: 0.01706684923473352 | Validation loss: 0.022672119033005502
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212]
------------------------------
Epoch: 161
Training loss: 0.01673129112914023 | Validation loss: 0.023932423822029873
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212]
------------------------------
Epoch: 162
Training loss: 0.017171250194371716 | Validation loss: 0.02393032217191325
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212]
------------------------------
Epoch: 163
Training loss: 0.017759795351771683 | Validation loss: 0.026334970016722328
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212]
------------------------------
Epoch: 164
Training loss: 0.01868446520340394 | Validation loss: 0.025735552940103743
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212]
------------------------------
Epoch: 165
Training loss: 0.019864219000441878 | Validation loss: 0.026584884738204657
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212]
------------------------------
Epoch: 166
Training loss: 0.019493408527649272 | Validation loss: 0.02552235554213877
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212]
------------------------------
Epoch: 167
Training loss: 0.0178543335580214 | Validation loss: 0.02480841196935486
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212]
------------------------------
Epoch: 168
Training loss: 0.017060703226948172 | Validation loss: 0.024558599520888593
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212]
------------------------------
Epoch: 169
Training loss: 0.01673183792717452 | Validation loss: 0.023999774656086055
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212]
------------------------------
Epoch: 170
Training loss: 0.01663350821964867 | Validation loss: 0.02244613198908391
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613]
------------------------------
Epoch: 171
Training loss: 0.016103041254180838 | Validation loss: 0.02298469769044055
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613]
------------------------------
Epoch: 172
Training loss: 0.01630746733351815 | Validation loss: 0.02470569105611907
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613]
------------------------------
Epoch: 173
Training loss: 0.017092946812707354 | Validation loss: 0.024231813089163216
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613]
------------------------------
Epoch: 174
Training loss: 0.018157666808418262 | Validation loss: 0.02737403698955421
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613]
------------------------------
Epoch: 175
Training loss: 0.019655547646154476 | Validation loss: 0.027440111980670027
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613]
------------------------------
Epoch: 176
Training loss: 0.01850139775230148 | Validation loss: 0.025333901763790183
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613]
------------------------------
Epoch: 177
Training loss: 0.017738321042672648 | Validation loss: 0.026279438459486874
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613]
------------------------------
Epoch: 178
Training loss: 0.01703572527853757 | Validation loss: 0.02614127082267293
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613]
------------------------------
Epoch: 179
Training loss: 0.0160271596784393 | Validation loss: 0.02310966721011533
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613]
------------------------------
Epoch: 180
Training loss: 0.016043832913661998 | Validation loss: 0.022625376642854127
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538]
------------------------------
Epoch: 181
Training loss: 0.015809292392048046 | Validation loss: 0.023693999517019147
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538]
------------------------------
Epoch: 182
Training loss: 0.016034167735057513 | Validation loss: 0.024256648495793343
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538]
------------------------------
Epoch: 183
Training loss: 0.016748781766500173 | Validation loss: 0.024400403792107547
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538]
------------------------------
Epoch: 184
Training loss: 0.017791693662936852 | Validation loss: 0.02896435482910386
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538]
------------------------------
Epoch: 185
Training loss: 0.019187645261683237 | Validation loss: 0.029182869561568455
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538]
------------------------------
Epoch: 186
Training loss: 0.018036984204413083 | Validation loss: 0.024585284361684764
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538]
------------------------------
Epoch: 187
Training loss: 0.0170689941084815 | Validation loss: 0.02560491528775957
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538]
------------------------------
Epoch: 188
Training loss: 0.016152752020085853 | Validation loss: 0.025931633905404143
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538]
------------------------------
Epoch: 189
Training loss: 0.015870558355397326 | Validation loss: 0.022788437876712392
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538]
------------------------------
Epoch: 190
Training loss: 0.01561052555301628 | Validation loss: 0.022453050267089297
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538 0.02245305]
------------------------------
Epoch: 191
Training loss: 0.015502604122114618 | Validation loss: 0.02315064930115585
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538 0.02245305]
------------------------------
Epoch: 192
Training loss: 0.015278358620069012 | Validation loss: 0.023724095206017846
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538 0.02245305]
------------------------------
Epoch: 193
Training loss: 0.01631206663219425 | Validation loss: 0.02449931452671687
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538 0.02245305]
------------------------------
Epoch: 194
Training loss: 0.01718841513951983 | Validation loss: 0.02486682427978074
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538 0.02245305]
------------------------------
Epoch: 195
Training loss: 0.01918403732370797 | Validation loss: 0.02663724600440926
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538 0.02245305]
------------------------------
Epoch: 196
Training loss: 0.018109782920711165 | Validation loss: 0.02764201440193035
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538 0.02245305]
------------------------------
Epoch: 197
Training loss: 0.016803728043654462 | Validation loss: 0.026375215983501187
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538 0.02245305]
------------------------------
Epoch: 198
Training loss: 0.015610241978542834 | Validation loss: 0.02521015338047787
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538 0.02245305]
------------------------------
Epoch: 199
Training loss: 0.015274432143272182 | Validation loss: 0.023028226431321214
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538 0.02245305]
------------------------------
Epoch: 200
Training loss: 0.015197526428818218 | Validation loss: 0.022503956462498063
Validation loss (ends of cycles): [0.10543691 0.03865039 0.03249704 0.03022724 0.02794824 0.02691561
 0.02623731 0.02548167 0.02492456 0.02409209 0.02389819 0.02361411
 0.02309747 0.02289379 0.02254496 0.02261598 0.02267212 0.02244613
 0.02262538 0.02245305 0.02250396]
--------------------------------------------------------------------------------
Seed: 15
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.15279348028695536 | Validation loss: 0.14506536836807543
Validation loss (ends of cycles): [0.14506537]
------------------------------
Epoch: 1
Training loss: 0.0992763647545687 | Validation loss: 0.08038875761513527
Validation loss (ends of cycles): [0.14506537]
------------------------------
Epoch: 2
Training loss: 0.06854788238853819 | Validation loss: 0.07798023822789009
Validation loss (ends of cycles): [0.14506537]
------------------------------
Epoch: 3
Training loss: 0.06264783143393907 | Validation loss: 0.06590831738251907
Validation loss (ends of cycles): [0.14506537]
------------------------------
Epoch: 4
Training loss: 0.06060648671076124 | Validation loss: 0.11362000554800034
Validation loss (ends of cycles): [0.14506537]
------------------------------
Epoch: 5
Training loss: 0.05775577272571292 | Validation loss: 0.16101208042639953
Validation loss (ends of cycles): [0.14506537]
------------------------------
Epoch: 6
Training loss: 0.05431262623171816 | Validation loss: 0.05958796557612144
Validation loss (ends of cycles): [0.14506537]
------------------------------
Epoch: 7
Training loss: 0.05047389544425947 | Validation loss: 0.05647731199860573
Validation loss (ends of cycles): [0.14506537]
------------------------------
Epoch: 8
Training loss: 0.04694410883312525 | Validation loss: 0.04427507806282777
Validation loss (ends of cycles): [0.14506537]
------------------------------
Epoch: 9
Training loss: 0.04459618917300633 | Validation loss: 0.04090933745297102
Validation loss (ends of cycles): [0.14506537]
------------------------------
Epoch: 10
Training loss: 0.04245714181921019 | Validation loss: 0.03974307392938779
Validation loss (ends of cycles): [0.14506537 0.03974307]
------------------------------
Epoch: 11
Training loss: 0.04292430049977322 | Validation loss: 0.04144837728773172
Validation loss (ends of cycles): [0.14506537 0.03974307]
------------------------------
Epoch: 12
Training loss: 0.043996825655884586 | Validation loss: 0.042766958904954105
Validation loss (ends of cycles): [0.14506537 0.03974307]
------------------------------
Epoch: 13
Training loss: 0.04482655116330515 | Validation loss: 0.046519542686068095
Validation loss (ends of cycles): [0.14506537 0.03974307]
------------------------------
Epoch: 14
Training loss: 0.04563175601062746 | Validation loss: 0.040774742857767984
Validation loss (ends of cycles): [0.14506537 0.03974307]
------------------------------
Epoch: 15
Training loss: 0.044861849880049584 | Validation loss: 0.047626798757566854
Validation loss (ends of cycles): [0.14506537 0.03974307]
------------------------------
Epoch: 16
Training loss: 0.0434767716914898 | Validation loss: 0.05095124581398872
Validation loss (ends of cycles): [0.14506537 0.03974307]
------------------------------
Epoch: 17
Training loss: 0.0410251230046696 | Validation loss: 0.045552261030444734
Validation loss (ends of cycles): [0.14506537 0.03974307]
------------------------------
Epoch: 18
Training loss: 0.03894493085622546 | Validation loss: 0.03951437076410422
Validation loss (ends of cycles): [0.14506537 0.03974307]
------------------------------
Epoch: 19
Training loss: 0.03676822814366475 | Validation loss: 0.036260907824795977
Validation loss (ends of cycles): [0.14506537 0.03974307]
------------------------------
Epoch: 20
Training loss: 0.03533417369551987 | Validation loss: 0.0353879089682148
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791]
------------------------------
Epoch: 21
Training loss: 0.03569188686502486 | Validation loss: 0.037464807741343975
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791]
------------------------------
Epoch: 22
Training loss: 0.03668812884070612 | Validation loss: 0.03660466314221804
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791]
------------------------------
Epoch: 23
Training loss: 0.037543252314090246 | Validation loss: 0.041893985194082446
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791]
------------------------------
Epoch: 24
Training loss: 0.03912920638285426 | Validation loss: 0.05439267536768547
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791]
------------------------------
Epoch: 25
Training loss: 0.040333300218106764 | Validation loss: 0.03696370225113172
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791]
------------------------------
Epoch: 26
Training loss: 0.03786210742918586 | Validation loss: 0.03918445941347342
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791]
------------------------------
Epoch: 27
Training loss: 0.03594942207670646 | Validation loss: 0.037800938105927065
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791]
------------------------------
Epoch: 28
Training loss: 0.0335141976216906 | Validation loss: 0.03604494651349691
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791]
------------------------------
Epoch: 29
Training loss: 0.03202974798557488 | Validation loss: 0.03684409148991108
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791]
------------------------------
Epoch: 30
Training loss: 0.031134030864638115 | Validation loss: 0.03303491732535454
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492]
------------------------------
Epoch: 31
Training loss: 0.03132372897359644 | Validation loss: 0.05156394910927002
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492]
------------------------------
Epoch: 32
Training loss: 0.03225202846382311 | Validation loss: 0.03565296494903473
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492]
------------------------------
Epoch: 33
Training loss: 0.033366888260039 | Validation loss: 0.04518821405676695
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492]
------------------------------
Epoch: 34
Training loss: 0.03463769986139618 | Validation loss: 0.0466082334661713
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492]
------------------------------
Epoch: 35
Training loss: 0.035970898729707546 | Validation loss: 0.035931507841898844
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492]
------------------------------
Epoch: 36
Training loss: 0.033702543582239376 | Validation loss: 0.039530598415205113
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492]
------------------------------
Epoch: 37
Training loss: 0.03226060464390015 | Validation loss: 0.035033257463230535
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492]
------------------------------
Epoch: 38
Training loss: 0.030439186973218253 | Validation loss: 0.03421707866856685
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492]
------------------------------
Epoch: 39
Training loss: 0.028959295595524764 | Validation loss: 0.03411362642565599
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492]
------------------------------
Epoch: 40
Training loss: 0.028355397585673854 | Validation loss: 0.03144805756612466
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806]
------------------------------
Epoch: 41
Training loss: 0.028508084403871283 | Validation loss: 0.034525489721160665
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806]
------------------------------
Epoch: 42
Training loss: 0.02908853772152894 | Validation loss: 0.03552745999051975
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806]
------------------------------
Epoch: 43
Training loss: 0.030279508965308607 | Validation loss: 0.0412328397282041
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806]
------------------------------
Epoch: 44
Training loss: 0.03161932406607668 | Validation loss: 0.04157860963963545
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806]
------------------------------
Epoch: 45
Training loss: 0.032413170465573606 | Validation loss: 0.03746562243367617
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806]
------------------------------
Epoch: 46
Training loss: 0.03155249784802377 | Validation loss: 0.03418631808689007
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806]
------------------------------
Epoch: 47
Training loss: 0.029698396640780726 | Validation loss: 0.0433016954562985
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806]
------------------------------
Epoch: 48
Training loss: 0.02817893827613066 | Validation loss: 0.032205368463809676
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806]
------------------------------
Epoch: 49
Training loss: 0.026418235477225983 | Validation loss: 0.031983047186468654
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806]
------------------------------
Epoch: 50
Training loss: 0.02586102904940424 | Validation loss: 0.03005178444660627
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178]
------------------------------
Epoch: 51
Training loss: 0.025684328844039307 | Validation loss: 0.0325129132789488
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178]
------------------------------
Epoch: 52
Training loss: 0.026494500268748413 | Validation loss: 0.035909466803647004
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178]
------------------------------
Epoch: 53
Training loss: 0.02768527107079502 | Validation loss: 0.03419312082517605
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178]
------------------------------
Epoch: 54
Training loss: 0.029022690255632283 | Validation loss: 0.0756713766604662
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178]
------------------------------
Epoch: 55
Training loss: 0.030787047692518003 | Validation loss: 0.05678711172479849
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178]
------------------------------
Epoch: 56
Training loss: 0.029405175485772643 | Validation loss: 0.03094115208547849
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178]
------------------------------
Epoch: 57
Training loss: 0.027223537728823872 | Validation loss: 0.03423833968834235
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178]
------------------------------
Epoch: 58
Training loss: 0.025910726122497788 | Validation loss: 0.03238570869255524
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178]
------------------------------
Epoch: 59
Training loss: 0.024620303329879696 | Validation loss: 0.032775767075900845
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178]
------------------------------
Epoch: 60
Training loss: 0.024254488279884644 | Validation loss: 0.02928792341397359
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792]
------------------------------
Epoch: 61
Training loss: 0.023843600908633668 | Validation loss: 0.03604337021421928
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792]
------------------------------
Epoch: 62
Training loss: 0.024692987382170642 | Validation loss: 0.03211906702759174
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792]
------------------------------
Epoch: 63
Training loss: 0.025721851939916128 | Validation loss: 0.0374490234714288
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792]
------------------------------
Epoch: 64
Training loss: 0.027135886828818543 | Validation loss: 0.03476525499270512
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792]
------------------------------
Epoch: 65
Training loss: 0.028328854775410674 | Validation loss: 0.06682556294477902
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792]
------------------------------
Epoch: 66
Training loss: 0.027366366840687842 | Validation loss: 0.03239961333859425
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792]
------------------------------
Epoch: 67
Training loss: 0.02528798375052479 | Validation loss: 0.0460323214244384
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792]
------------------------------
Epoch: 68
Training loss: 0.024234542412012214 | Validation loss: 0.03999455079722863
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792]
------------------------------
Epoch: 69
Training loss: 0.02321765907033373 | Validation loss: 0.03608835130356825
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792]
------------------------------
Epoch: 70
Training loss: 0.0221617315492408 | Validation loss: 0.028585847467184067
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585]
------------------------------
Epoch: 71
Training loss: 0.022177805008012273 | Validation loss: 0.030094460082741883
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585]
------------------------------
Epoch: 72
Training loss: 0.022704321559262178 | Validation loss: 0.03219181979791476
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585]
------------------------------
Epoch: 73
Training loss: 0.023915208310552456 | Validation loss: 0.031648835382209375
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585]
------------------------------
Epoch: 74
Training loss: 0.02473944200858896 | Validation loss: 0.03561788174108817
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585]
------------------------------
Epoch: 75
Training loss: 0.027072073639132957 | Validation loss: 0.03957040794193745
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585]
------------------------------
Epoch: 76
Training loss: 0.025478746504466302 | Validation loss: 0.04291144543542312
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585]
------------------------------
Epoch: 77
Training loss: 0.02397834242280075 | Validation loss: 0.03551239255242623
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585]
------------------------------
Epoch: 78
Training loss: 0.022762301095161842 | Validation loss: 0.035777714963142686
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585]
------------------------------
Epoch: 79
Training loss: 0.021810038727877835 | Validation loss: 0.03426963635362112
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585]
------------------------------
Epoch: 80
Training loss: 0.021307517216363658 | Validation loss: 0.028310040656763773
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004]
------------------------------
Epoch: 81
Training loss: 0.021035488740152677 | Validation loss: 0.04373047608309067
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004]
------------------------------
Epoch: 82
Training loss: 0.021291860884530583 | Validation loss: 0.044406829974972285
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004]
------------------------------
Epoch: 83
Training loss: 0.022908585479762027 | Validation loss: 0.034169698492265664
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004]
------------------------------
Epoch: 84
Training loss: 0.024446617925034362 | Validation loss: 0.034328691518077485
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004]
------------------------------
Epoch: 85
Training loss: 0.025942979482292888 | Validation loss: 0.060097817689753495
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004]
------------------------------
Epoch: 86
Training loss: 0.024310822792818793 | Validation loss: 0.03501157462596893
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004]
------------------------------
Epoch: 87
Training loss: 0.022634415940083714 | Validation loss: 0.03473313940832248
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004]
------------------------------
Epoch: 88
Training loss: 0.021089079691783377 | Validation loss: 0.03671561274677515
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004]
------------------------------
Epoch: 89
Training loss: 0.0206126121479852 | Validation loss: 0.038643558796208635
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004]
------------------------------
Epoch: 90
Training loss: 0.02040816003999669 | Validation loss: 0.027402662170621064
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266]
------------------------------
Epoch: 91
Training loss: 0.019678906183161957 | Validation loss: 0.034164513962773174
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266]
------------------------------
Epoch: 92
Training loss: 0.020458108885025205 | Validation loss: 0.043395852025311724
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266]
------------------------------
Epoch: 93
Training loss: 0.02149372065801792 | Validation loss: 0.04887792203002251
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266]
------------------------------
Epoch: 94
Training loss: 0.022725580859672925 | Validation loss: 0.03178696441822327
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266]
------------------------------
Epoch: 95
Training loss: 0.024382544900120994 | Validation loss: 0.04414457856462552
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266]
------------------------------
Epoch: 96
Training loss: 0.022855830495717072 | Validation loss: 0.041788846684189945
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266]
------------------------------
Epoch: 97
Training loss: 0.021017923792786445 | Validation loss: 0.039392892127999894
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266]
------------------------------
Epoch: 98
Training loss: 0.020251478479519063 | Validation loss: 0.035716857617864244
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266]
------------------------------
Epoch: 99
Training loss: 0.01932824108945696 | Validation loss: 0.034940155796133555
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266]
------------------------------
Epoch: 100
Training loss: 0.020082863217393155 | Validation loss: 0.026871812601502124
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266 0.02687181]
------------------------------
Epoch: 101
Training loss: 0.018461225810743536 | Validation loss: 0.03337421948806597
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266 0.02687181]
------------------------------
Epoch: 102
Training loss: 0.01906144128818261 | Validation loss: 0.044319855908934884
Validation loss (ends of cycles): [0.14506537 0.03974307 0.03538791 0.03303492 0.03144806 0.03005178
 0.02928792 0.02858585 0.02831004 0.02740266 0.02687181]
------------------------------
Epoch: 103
# Replace following Paths with yours
src_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/mollisols/models')
seeds = range(20)
order = 1
learners = Learners(Model, tax_lookup, seeds=seeds, device=device)
perfs_local_mollisols, _, _, _ = learners.evaluate((X, y, depth_order[:, -1]),
                                                   order = order,
                                                   src_dir_model=src_dir_model)

perfs_local_mollisols.describe()
rpd rpiq r2 lccc rmse mse mae mape bias stb
count 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000
mean 2.103741 2.777945 0.772714 0.873048 0.454543 0.212442 0.214790 26.963759 0.008362 0.019571
std 0.085642 0.136850 0.019085 0.011321 0.078355 0.080627 0.010765 1.352271 0.013197 0.031140
min 1.923080 2.550751 0.729316 0.846127 0.360252 0.129782 0.200901 25.281143 -0.008894 -0.021487
25% 2.058124 2.688481 0.763672 0.865870 0.408399 0.166824 0.206547 25.622950 -0.004228 -0.009841
50% 2.111893 2.778513 0.775535 0.873508 0.428046 0.183224 0.212339 27.126652 0.009768 0.022086
75% 2.166518 2.872471 0.786733 0.881705 0.470035 0.220976 0.221762 27.655639 0.017485 0.041642
max 2.256515 3.048478 0.803395 0.891445 0.664916 0.442113 0.240433 30.713740 0.039076 0.092671

Train and test on Gelisols

# Replace following Paths with yours
dest_dir_loss = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/gelisols/losses')
dest_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/gelisols/models')

order = 12
seeds = range(20) 
n_epochs = 31
learners = Learners(Model, tax_lookup, seeds=seeds, device=device)
learners.train((X, y, depth_order[:, -1]), 
               order=order,
               dest_dir_loss=dest_dir_loss,
               dest_dir_model=dest_dir_model,
               n_epochs=n_epochs,
               sc_kwargs=params_scheduler)
--------------------------------------------------------------------------------
Seed: 0
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.44518339904871856 | Validation loss: 0.41080141067504883
Validation loss (ends of cycles): [0.41080141]
------------------------------
Epoch: 1
Training loss: 0.4287309023466977 | Validation loss: 0.4031795859336853
Validation loss (ends of cycles): [0.41080141]
------------------------------
Epoch: 2
Training loss: 0.4021740339019082 | Validation loss: 0.3900226950645447
Validation loss (ends of cycles): [0.41080141]
------------------------------
Epoch: 3
Training loss: 0.37238084186207165 | Validation loss: 0.37106457352638245
Validation loss (ends of cycles): [0.41080141]
------------------------------
Epoch: 4
Training loss: 0.3336567445234819 | Validation loss: 0.3466283082962036
Validation loss (ends of cycles): [0.41080141]
------------------------------
Epoch: 5
Training loss: 0.28763867101886054 | Validation loss: 0.31739506125450134
Validation loss (ends of cycles): [0.41080141]
------------------------------
Epoch: 6
Training loss: 0.2438681653954766 | Validation loss: 0.28248274326324463
Validation loss (ends of cycles): [0.41080141]
------------------------------
Epoch: 7
Training loss: 0.21182731132615695 | Validation loss: 0.2361939251422882
Validation loss (ends of cycles): [0.41080141]
------------------------------
Epoch: 8
Training loss: 0.19058756801215085 | Validation loss: 0.1982204169034958
Validation loss (ends of cycles): [0.41080141]
------------------------------
Epoch: 9
Training loss: 0.17826715314930136 | Validation loss: 0.16747665405273438
Validation loss (ends of cycles): [0.41080141]
------------------------------
Epoch: 10
Training loss: 0.17355785789814862 | Validation loss: 0.1784353256225586
Validation loss (ends of cycles): [0.41080141 0.17843533]
------------------------------
Epoch: 11
Training loss: 0.16932719945907593 | Validation loss: 0.15043631196022034
Validation loss (ends of cycles): [0.41080141 0.17843533]
------------------------------
Epoch: 12
Training loss: 0.15943419459191235 | Validation loss: 0.12366648763418198
Validation loss (ends of cycles): [0.41080141 0.17843533]
------------------------------
Epoch: 13
Training loss: 0.14414918016303668 | Validation loss: 0.14775261282920837
Validation loss (ends of cycles): [0.41080141 0.17843533]
------------------------------
Epoch: 14
Training loss: 0.12919475544582715 | Validation loss: 0.10592546314001083
Validation loss (ends of cycles): [0.41080141 0.17843533]
------------------------------
Epoch: 15
Training loss: 0.1137502295049754 | Validation loss: 0.09366549551486969
Validation loss (ends of cycles): [0.41080141 0.17843533]
------------------------------
Epoch: 16
Training loss: 0.09954378415237773 | Validation loss: 0.13239187002182007
Validation loss (ends of cycles): [0.41080141 0.17843533]
------------------------------
Epoch: 17
Training loss: 0.09346607327461243 | Validation loss: 0.09203055500984192
Validation loss (ends of cycles): [0.41080141 0.17843533]
------------------------------
Epoch: 18
Training loss: 0.08725625784559683 | Validation loss: 0.08837558329105377
Validation loss (ends of cycles): [0.41080141 0.17843533]
------------------------------
Epoch: 19
Training loss: 0.08394723622636362 | Validation loss: 0.08583536744117737
Validation loss (ends of cycles): [0.41080141 0.17843533]
------------------------------
Epoch: 20
Training loss: 0.08184474774382332 | Validation loss: 0.08680430799722672
Validation loss (ends of cycles): [0.41080141 0.17843533 0.08680431]
------------------------------
Epoch: 21
Training loss: 0.07987467301162807 | Validation loss: 0.09593759477138519
Validation loss (ends of cycles): [0.41080141 0.17843533 0.08680431]
------------------------------
Epoch: 22
Training loss: 0.07881784202022986 | Validation loss: 0.15475192666053772
Validation loss (ends of cycles): [0.41080141 0.17843533 0.08680431]
------------------------------
Epoch: 23
Training loss: 0.08277476172555577 | Validation loss: 0.11691433191299438
Validation loss (ends of cycles): [0.41080141 0.17843533 0.08680431]
------------------------------
Epoch: 24
Training loss: 0.07523983920162375 | Validation loss: 0.09792322665452957
Validation loss (ends of cycles): [0.41080141 0.17843533 0.08680431]
------------------------------
Epoch: 25
Training loss: 0.08776683563535864 | Validation loss: 0.14275996387004852
Validation loss (ends of cycles): [0.41080141 0.17843533 0.08680431]
------------------------------
Epoch: 26
Training loss: 0.08074803311716426 | Validation loss: 0.09750550240278244
Validation loss (ends of cycles): [0.41080141 0.17843533 0.08680431]
------------------------------
Epoch: 27
Training loss: 0.07898637584664604 | Validation loss: 0.08195220679044724
Validation loss (ends of cycles): [0.41080141 0.17843533 0.08680431]
------------------------------
Epoch: 28
Training loss: 0.07386642017147758 | Validation loss: 0.08875860273838043
Validation loss (ends of cycles): [0.41080141 0.17843533 0.08680431]
------------------------------
Epoch: 29
Training loss: 0.06991859241812066 | Validation loss: 0.08599168062210083
Validation loss (ends of cycles): [0.41080141 0.17843533 0.08680431]
------------------------------
Epoch: 30
Training loss: 0.07041569883850488 | Validation loss: 0.08440250158309937
Validation loss (ends of cycles): [0.41080141 0.17843533 0.08680431 0.0844025 ]
--------------------------------------------------------------------------------
Seed: 1
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.2712295380505649 | Validation loss: 0.29694056510925293
Validation loss (ends of cycles): [0.29694057]
------------------------------
Epoch: 1
Training loss: 0.2557703337886117 | Validation loss: 0.2916700839996338
Validation loss (ends of cycles): [0.29694057]
------------------------------
Epoch: 2
Training loss: 0.23915838382460855 | Validation loss: 0.2821319103240967
Validation loss (ends of cycles): [0.29694057]
------------------------------
Epoch: 3
Training loss: 0.21806168962608685 | Validation loss: 0.27006006240844727
Validation loss (ends of cycles): [0.29694057]
------------------------------
Epoch: 4
Training loss: 0.19106543605977838 | Validation loss: 0.2555059790611267
Validation loss (ends of cycles): [0.29694057]
------------------------------
Epoch: 5
Training loss: 0.1593422293663025 | Validation loss: 0.23977532982826233
Validation loss (ends of cycles): [0.29694057]
------------------------------
Epoch: 6
Training loss: 0.1341805329377001 | Validation loss: 0.21363312005996704
Validation loss (ends of cycles): [0.29694057]
------------------------------
Epoch: 7
Training loss: 0.11576782234690407 | Validation loss: 0.17318867146968842
Validation loss (ends of cycles): [0.29694057]
------------------------------
Epoch: 8
Training loss: 0.10450286960059946 | Validation loss: 0.14469444751739502
Validation loss (ends of cycles): [0.29694057]
------------------------------
Epoch: 9
Training loss: 0.0977388471364975 | Validation loss: 0.1382673680782318
Validation loss (ends of cycles): [0.29694057]
------------------------------
Epoch: 10
Training loss: 0.0953068191354925 | Validation loss: 0.1350218504667282
Validation loss (ends of cycles): [0.29694057 0.13502185]
------------------------------
Epoch: 11
Training loss: 0.09208406778899106 | Validation loss: 0.13349907100200653
Validation loss (ends of cycles): [0.29694057 0.13502185]
------------------------------
Epoch: 12
Training loss: 0.08720466291362589 | Validation loss: 0.1306271106004715
Validation loss (ends of cycles): [0.29694057 0.13502185]
------------------------------
Epoch: 13
Training loss: 0.08318295160477812 | Validation loss: 0.12258781492710114
Validation loss (ends of cycles): [0.29694057 0.13502185]
------------------------------
Epoch: 14
Training loss: 0.0795554786243222 | Validation loss: 0.1179313138127327
Validation loss (ends of cycles): [0.29694057 0.13502185]
------------------------------
Epoch: 15
Training loss: 0.0752499575980685 | Validation loss: 0.21455103158950806
Validation loss (ends of cycles): [0.29694057 0.13502185]
------------------------------
Epoch: 16
Training loss: 0.07196846248751337 | Validation loss: 0.10781153291463852
Validation loss (ends of cycles): [0.29694057 0.13502185]
------------------------------
Epoch: 17
Training loss: 0.07144239443269643 | Validation loss: 0.11611378192901611
Validation loss (ends of cycles): [0.29694057 0.13502185]
------------------------------
Epoch: 18
Training loss: 0.061659027060324494 | Validation loss: 0.09441842883825302
Validation loss (ends of cycles): [0.29694057 0.13502185]
------------------------------
Epoch: 19
Training loss: 0.0629522180692716 | Validation loss: 0.09655869007110596
Validation loss (ends of cycles): [0.29694057 0.13502185]
------------------------------
Epoch: 20
Training loss: 0.056854997338219124 | Validation loss: 0.09356872737407684
Validation loss (ends of cycles): [0.29694057 0.13502185 0.09356873]
------------------------------
Epoch: 21
Training loss: 0.05831065249036659 | Validation loss: 0.10779251158237457
Validation loss (ends of cycles): [0.29694057 0.13502185 0.09356873]
------------------------------
Epoch: 22
Training loss: 0.05809896981174296 | Validation loss: 0.08884022384881973
Validation loss (ends of cycles): [0.29694057 0.13502185 0.09356873]
------------------------------
Epoch: 23
Training loss: 0.05916342342441732 | Validation loss: 0.11716677248477936
Validation loss (ends of cycles): [0.29694057 0.13502185 0.09356873]
------------------------------
Epoch: 24
Training loss: 0.060772972012108024 | Validation loss: 0.09863302856683731
Validation loss (ends of cycles): [0.29694057 0.13502185 0.09356873]
------------------------------
Epoch: 25
Training loss: 0.06027982553297823 | Validation loss: 0.10262470692396164
Validation loss (ends of cycles): [0.29694057 0.13502185 0.09356873]
------------------------------
Epoch: 26
Training loss: 0.06343780009245331 | Validation loss: 0.10198401659727097
Validation loss (ends of cycles): [0.29694057 0.13502185 0.09356873]
------------------------------
Epoch: 27
Training loss: 0.057109395719387314 | Validation loss: 0.08557749539613724
Validation loss (ends of cycles): [0.29694057 0.13502185 0.09356873]
------------------------------
Epoch: 28
Training loss: 0.05680512789298187 | Validation loss: 0.08958760648965836
Validation loss (ends of cycles): [0.29694057 0.13502185 0.09356873]
------------------------------
Epoch: 29
Training loss: 0.05255689133297314 | Validation loss: 0.08726699650287628
Validation loss (ends of cycles): [0.29694057 0.13502185 0.09356873]
------------------------------
Epoch: 30
Training loss: 0.05449707111851736 | Validation loss: 0.0852150097489357
Validation loss (ends of cycles): [0.29694057 0.13502185 0.09356873 0.08521501]
--------------------------------------------------------------------------------
Seed: 2
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.3403178215026855 | Validation loss: 0.22224114835262299
Validation loss (ends of cycles): [0.22224115]
------------------------------
Epoch: 1
Training loss: 0.3237610936164856 | Validation loss: 0.2175069898366928
Validation loss (ends of cycles): [0.22224115]
------------------------------
Epoch: 2
Training loss: 0.3036596342921257 | Validation loss: 0.2090708464384079
Validation loss (ends of cycles): [0.22224115]
------------------------------
Epoch: 3
Training loss: 0.278908571600914 | Validation loss: 0.1986195296049118
Validation loss (ends of cycles): [0.22224115]
------------------------------
Epoch: 4
Training loss: 0.24769878536462783 | Validation loss: 0.19036133587360382
Validation loss (ends of cycles): [0.22224115]
------------------------------
Epoch: 5
Training loss: 0.2119302600622177 | Validation loss: 0.1850602626800537
Validation loss (ends of cycles): [0.22224115]
------------------------------
Epoch: 6
Training loss: 0.17966288328170776 | Validation loss: 0.17428995668888092
Validation loss (ends of cycles): [0.22224115]
------------------------------
Epoch: 7
Training loss: 0.1580403283238411 | Validation loss: 0.14990781247615814
Validation loss (ends of cycles): [0.22224115]
------------------------------
Epoch: 8
Training loss: 0.1425566166639328 | Validation loss: 0.12038183957338333
Validation loss (ends of cycles): [0.22224115]
------------------------------
Epoch: 9
Training loss: 0.1347818359732628 | Validation loss: 0.10062086582183838
Validation loss (ends of cycles): [0.22224115]
------------------------------
Epoch: 10
Training loss: 0.13022587075829506 | Validation loss: 0.09236151725053787
Validation loss (ends of cycles): [0.22224115 0.09236152]
------------------------------
Epoch: 11
Training loss: 0.128421900421381 | Validation loss: 0.08484034240245819
Validation loss (ends of cycles): [0.22224115 0.09236152]
------------------------------
Epoch: 12
Training loss: 0.12211894914507866 | Validation loss: 0.0809514969587326
Validation loss (ends of cycles): [0.22224115 0.09236152]
------------------------------
Epoch: 13
Training loss: 0.11671075597405434 | Validation loss: 0.09415645897388458
Validation loss (ends of cycles): [0.22224115 0.09236152]
------------------------------
Epoch: 14
Training loss: 0.1062808632850647 | Validation loss: 0.0794791579246521
Validation loss (ends of cycles): [0.22224115 0.09236152]
------------------------------
Epoch: 15
Training loss: 0.09774378426373005 | Validation loss: 0.07385452091693878
Validation loss (ends of cycles): [0.22224115 0.09236152]
------------------------------
Epoch: 16
Training loss: 0.09059763960540294 | Validation loss: 0.06823520362377167
Validation loss (ends of cycles): [0.22224115 0.09236152]
------------------------------
Epoch: 17
Training loss: 0.09244940280914307 | Validation loss: 0.10553069412708282
Validation loss (ends of cycles): [0.22224115 0.09236152]
------------------------------
Epoch: 18
Training loss: 0.0844466209411621 | Validation loss: 0.06962426751852036
Validation loss (ends of cycles): [0.22224115 0.09236152]
------------------------------
Epoch: 19
Training loss: 0.08120766542851925 | Validation loss: 0.06540507078170776
Validation loss (ends of cycles): [0.22224115 0.09236152]
------------------------------
Epoch: 20
Training loss: 0.08298086747527122 | Validation loss: 0.06289994716644287
Validation loss (ends of cycles): [0.22224115 0.09236152 0.06289995]
------------------------------
Epoch: 21
Training loss: 0.07931055799126625 | Validation loss: 0.06145286187529564
Validation loss (ends of cycles): [0.22224115 0.09236152 0.06289995]
------------------------------
Epoch: 22
Training loss: 0.07616247236728668 | Validation loss: 0.05724290385842323
Validation loss (ends of cycles): [0.22224115 0.09236152 0.06289995]
------------------------------
Epoch: 23
Training loss: 0.07831368632614613 | Validation loss: 0.061430856585502625
Validation loss (ends of cycles): [0.22224115 0.09236152 0.06289995]
------------------------------
Epoch: 24
Training loss: 0.08261116072535515 | Validation loss: 0.1134483739733696
Validation loss (ends of cycles): [0.22224115 0.09236152 0.06289995]
------------------------------
Epoch: 25
Training loss: 0.07242584079504014 | Validation loss: 0.07263419032096863
Validation loss (ends of cycles): [0.22224115 0.09236152 0.06289995]
------------------------------
Epoch: 26
Training loss: 0.08369440548121929 | Validation loss: 0.05704887583851814
Validation loss (ends of cycles): [0.22224115 0.09236152 0.06289995]
------------------------------
Epoch: 27
Training loss: 0.07497833706438542 | Validation loss: 0.05711729824542999
Validation loss (ends of cycles): [0.22224115 0.09236152 0.06289995]
------------------------------
Epoch: 28
Training loss: 0.06945027969777584 | Validation loss: 0.05534932762384415
Validation loss (ends of cycles): [0.22224115 0.09236152 0.06289995]
------------------------------
Epoch: 29
Training loss: 0.06933295093476773 | Validation loss: 0.054547473788261414
Validation loss (ends of cycles): [0.22224115 0.09236152 0.06289995]
------------------------------
Epoch: 30
Training loss: 0.06596663519740105 | Validation loss: 0.05319888889789581
Validation loss (ends of cycles): [0.22224115 0.09236152 0.06289995 0.05319889]
--------------------------------------------------------------------------------
Seed: 3
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.3599785728888078 | Validation loss: 0.4432780146598816
Validation loss (ends of cycles): [0.44327801]
------------------------------
Epoch: 1
Training loss: 0.3369556678967042 | Validation loss: 0.43250834941864014
Validation loss (ends of cycles): [0.44327801]
------------------------------
Epoch: 2
Training loss: 0.30670858242295007 | Validation loss: 0.4114508032798767
Validation loss (ends of cycles): [0.44327801]
------------------------------
Epoch: 3
Training loss: 0.272465627301823 | Validation loss: 0.3817843198776245
Validation loss (ends of cycles): [0.44327801]
------------------------------
Epoch: 4
Training loss: 0.23151018267328088 | Validation loss: 0.3536441922187805
Validation loss (ends of cycles): [0.44327801]
------------------------------
Epoch: 5
Training loss: 0.18663159283724698 | Validation loss: 0.32844647765159607
Validation loss (ends of cycles): [0.44327801]
------------------------------
Epoch: 6
Training loss: 0.15000829168341376 | Validation loss: 0.28304171562194824
Validation loss (ends of cycles): [0.44327801]
------------------------------
Epoch: 7
Training loss: 0.12784207612276077 | Validation loss: 0.21613815426826477
Validation loss (ends of cycles): [0.44327801]
------------------------------
Epoch: 8
Training loss: 0.11503372409126976 | Validation loss: 0.16611164808273315
Validation loss (ends of cycles): [0.44327801]
------------------------------
Epoch: 9
Training loss: 0.10725069452415813 | Validation loss: 0.15188439190387726
Validation loss (ends of cycles): [0.44327801]
------------------------------
Epoch: 10
Training loss: 0.10527931221506813 | Validation loss: 0.13959506154060364
Validation loss (ends of cycles): [0.44327801 0.13959506]
------------------------------
Epoch: 11
Training loss: 0.1036420301957564 | Validation loss: 0.1270444095134735
Validation loss (ends of cycles): [0.44327801 0.13959506]
------------------------------
Epoch: 12
Training loss: 0.09993583234873685 | Validation loss: 0.1236078217625618
Validation loss (ends of cycles): [0.44327801 0.13959506]
------------------------------
Epoch: 13
Training loss: 0.09465839510614221 | Validation loss: 0.13082820177078247
Validation loss (ends of cycles): [0.44327801 0.13959506]
------------------------------
Epoch: 14
Training loss: 0.08756625313650478 | Validation loss: 0.10408846288919449
Validation loss (ends of cycles): [0.44327801 0.13959506]
------------------------------
Epoch: 15
Training loss: 0.0838624509898099 | Validation loss: 0.08756735175848007
Validation loss (ends of cycles): [0.44327801 0.13959506]
------------------------------
Epoch: 16
Training loss: 0.08244303072040732 | Validation loss: 0.09559406340122223
Validation loss (ends of cycles): [0.44327801 0.13959506]
------------------------------
Epoch: 17
Training loss: 0.0741471800614487 | Validation loss: 0.10337742418050766
Validation loss (ends of cycles): [0.44327801 0.13959506]
------------------------------
Epoch: 18
Training loss: 0.07662723992358554 | Validation loss: 0.06411650031805038
Validation loss (ends of cycles): [0.44327801 0.13959506]
------------------------------
Epoch: 19
Training loss: 0.06983052329583601 | Validation loss: 0.07078924030065536
Validation loss (ends of cycles): [0.44327801 0.13959506]
------------------------------
Epoch: 20
Training loss: 0.0687717121433128 | Validation loss: 0.06904434412717819
Validation loss (ends of cycles): [0.44327801 0.13959506 0.06904434]
------------------------------
Epoch: 21
Training loss: 0.06762443178079346 | Validation loss: 0.06313017755746841
Validation loss (ends of cycles): [0.44327801 0.13959506 0.06904434]
------------------------------
Epoch: 22
Training loss: 0.0678871947933327 | Validation loss: 0.067040354013443
Validation loss (ends of cycles): [0.44327801 0.13959506 0.06904434]
------------------------------
Epoch: 23
Training loss: 0.06613235378807242 | Validation loss: 0.07850442826747894
Validation loss (ends of cycles): [0.44327801 0.13959506 0.06904434]
------------------------------
Epoch: 24
Training loss: 0.06488648564978079 | Validation loss: 0.051457930356264114
Validation loss (ends of cycles): [0.44327801 0.13959506 0.06904434]
------------------------------
Epoch: 25
Training loss: 0.0654671046544205 | Validation loss: 0.061340026557445526
Validation loss (ends of cycles): [0.44327801 0.13959506 0.06904434]
------------------------------
Epoch: 26
Training loss: 0.06411620466546579 | Validation loss: 0.06412345170974731
Validation loss (ends of cycles): [0.44327801 0.13959506 0.06904434]
------------------------------
Epoch: 27
Training loss: 0.060056577013297516 | Validation loss: 0.05233113467693329
Validation loss (ends of cycles): [0.44327801 0.13959506 0.06904434]
------------------------------
Epoch: 28
Training loss: 0.05766375464471904 | Validation loss: 0.04654216393828392
Validation loss (ends of cycles): [0.44327801 0.13959506 0.06904434]
------------------------------
Epoch: 29
Training loss: 0.05332683399319649 | Validation loss: 0.04950576648116112
Validation loss (ends of cycles): [0.44327801 0.13959506 0.06904434]
------------------------------
Epoch: 30
Training loss: 0.05111788822845979 | Validation loss: 0.05081142485141754
Validation loss (ends of cycles): [0.44327801 0.13959506 0.06904434 0.05081142]
--------------------------------------------------------------------------------
Seed: 4
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.5454315461895682 | Validation loss: 0.4730074107646942
Validation loss (ends of cycles): [0.47300741]
------------------------------
Epoch: 1
Training loss: 0.5257494151592255 | Validation loss: 0.4644114375114441
Validation loss (ends of cycles): [0.47300741]
------------------------------
Epoch: 2
Training loss: 0.49964225021275604 | Validation loss: 0.4493078589439392
Validation loss (ends of cycles): [0.47300741]
------------------------------
Epoch: 3
Training loss: 0.4701995172283866 | Validation loss: 0.4282127916812897
Validation loss (ends of cycles): [0.47300741]
------------------------------
Epoch: 4
Training loss: 0.43141826445406134 | Validation loss: 0.4020078182220459
Validation loss (ends of cycles): [0.47300741]
------------------------------
Epoch: 5
Training loss: 0.3827357996593822 | Validation loss: 0.362201452255249
Validation loss (ends of cycles): [0.47300741]
------------------------------
Epoch: 6
Training loss: 0.3341675915501334 | Validation loss: 0.3249988853931427
Validation loss (ends of cycles): [0.47300741]
------------------------------
Epoch: 7
Training loss: 0.2958235442638397 | Validation loss: 0.2761434316635132
Validation loss (ends of cycles): [0.47300741]
------------------------------
Epoch: 8
Training loss: 0.2697145681489598 | Validation loss: 0.25178632140159607
Validation loss (ends of cycles): [0.47300741]
------------------------------
Epoch: 9
Training loss: 0.25373841009356757 | Validation loss: 0.22395309805870056
Validation loss (ends of cycles): [0.47300741]
------------------------------
Epoch: 10
Training loss: 0.24752120673656464 | Validation loss: 0.22707553207874298
Validation loss (ends of cycles): [0.47300741 0.22707553]
------------------------------
Epoch: 11
Training loss: 0.24136914312839508 | Validation loss: 0.2116468846797943
Validation loss (ends of cycles): [0.47300741 0.22707553]
------------------------------
Epoch: 12
Training loss: 0.2281808080998334 | Validation loss: 0.2141018956899643
Validation loss (ends of cycles): [0.47300741 0.22707553]
------------------------------
Epoch: 13
Training loss: 0.20625835250724445 | Validation loss: 0.2063857465982437
Validation loss (ends of cycles): [0.47300741 0.22707553]
------------------------------
Epoch: 14
Training loss: 0.1818149442022497 | Validation loss: 0.1678750216960907
Validation loss (ends of cycles): [0.47300741 0.22707553]
------------------------------
Epoch: 15
Training loss: 0.1542614088817076 | Validation loss: 0.24311645328998566
Validation loss (ends of cycles): [0.47300741 0.22707553]
------------------------------
Epoch: 16
Training loss: 0.13340282982045953 | Validation loss: 0.11346697062253952
Validation loss (ends of cycles): [0.47300741 0.22707553]
------------------------------
Epoch: 17
Training loss: 0.11361510171131654 | Validation loss: 0.08446861058473587
Validation loss (ends of cycles): [0.47300741 0.22707553]
------------------------------
Epoch: 18
Training loss: 0.10257813706994057 | Validation loss: 0.1038476824760437
Validation loss (ends of cycles): [0.47300741 0.22707553]
------------------------------
Epoch: 19
Training loss: 0.09851626374504784 | Validation loss: 0.10009600967168808
Validation loss (ends of cycles): [0.47300741 0.22707553]
------------------------------
Epoch: 20
Training loss: 0.09389783983880823 | Validation loss: 0.10324864834547043
Validation loss (ends of cycles): [0.47300741 0.22707553 0.10324865]
------------------------------
Epoch: 21
Training loss: 0.09191824529658664 | Validation loss: 0.08838817477226257
Validation loss (ends of cycles): [0.47300741 0.22707553 0.10324865]
------------------------------
Epoch: 22
Training loss: 0.08743048831820488 | Validation loss: 0.11275480687618256
Validation loss (ends of cycles): [0.47300741 0.22707553 0.10324865]
------------------------------
Epoch: 23
Training loss: 0.08301741765304045 | Validation loss: 0.08835740387439728
Validation loss (ends of cycles): [0.47300741 0.22707553 0.10324865]
------------------------------
Epoch: 24
Training loss: 0.07729637080972845 | Validation loss: 0.15197023749351501
Validation loss (ends of cycles): [0.47300741 0.22707553 0.10324865]
------------------------------
Epoch: 25
Training loss: 0.07531545412811366 | Validation loss: 0.19605480134487152
Validation loss (ends of cycles): [0.47300741 0.22707553 0.10324865]
------------------------------
Epoch: 26
Training loss: 0.0744694381613623 | Validation loss: 0.08726125210523605
Validation loss (ends of cycles): [0.47300741 0.22707553 0.10324865]
------------------------------
Epoch: 27
Training loss: 0.07172296805815263 | Validation loss: 0.08883035182952881
Validation loss (ends of cycles): [0.47300741 0.22707553 0.10324865]
------------------------------
Epoch: 28
Training loss: 0.05978721922094172 | Validation loss: 0.10437580943107605
Validation loss (ends of cycles): [0.47300741 0.22707553 0.10324865]
------------------------------
Epoch: 29
Training loss: 0.058711225844242355 | Validation loss: 0.07623975723981857
Validation loss (ends of cycles): [0.47300741 0.22707553 0.10324865]
------------------------------
Epoch: 30
Training loss: 0.05624144473536448 | Validation loss: 0.0785088762640953
Validation loss (ends of cycles): [0.47300741 0.22707553 0.10324865 0.07850888]
--------------------------------------------------------------------------------
Seed: 5
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.3115223288536072 | Validation loss: 0.35713261365890503
Validation loss (ends of cycles): [0.35713261]
------------------------------
Epoch: 1
Training loss: 0.29661754220724107 | Validation loss: 0.3485311195254326
Validation loss (ends of cycles): [0.35713261]
------------------------------
Epoch: 2
Training loss: 0.27701484113931657 | Validation loss: 0.3320363536477089
Validation loss (ends of cycles): [0.35713261]
------------------------------
Epoch: 3
Training loss: 0.2542939618229866 | Validation loss: 0.3089185282588005
Validation loss (ends of cycles): [0.35713261]
------------------------------
Epoch: 4
Training loss: 0.2231126084923744 | Validation loss: 0.28109800815582275
Validation loss (ends of cycles): [0.35713261]
------------------------------
Epoch: 5
Training loss: 0.18675014078617097 | Validation loss: 0.2502268999814987
Validation loss (ends of cycles): [0.35713261]
------------------------------
Epoch: 6
Training loss: 0.15313852950930595 | Validation loss: 0.2070472240447998
Validation loss (ends of cycles): [0.35713261]
------------------------------
Epoch: 7
Training loss: 0.13002245053648948 | Validation loss: 0.19130465388298035
Validation loss (ends of cycles): [0.35713261]
------------------------------
Epoch: 8
Training loss: 0.11442725360393524 | Validation loss: 0.20052699744701385
Validation loss (ends of cycles): [0.35713261]
------------------------------
Epoch: 9
Training loss: 0.10721210837364196 | Validation loss: 0.21451064944267273
Validation loss (ends of cycles): [0.35713261]
------------------------------
Epoch: 10
Training loss: 0.10336671397089958 | Validation loss: 0.21087735146284103
Validation loss (ends of cycles): [0.35713261 0.21087735]
------------------------------
Epoch: 11
Training loss: 0.0994449395686388 | Validation loss: 0.20340285822749138
Validation loss (ends of cycles): [0.35713261 0.21087735]
------------------------------
Epoch: 12
Training loss: 0.09221443757414818 | Validation loss: 0.22622137889266014
Validation loss (ends of cycles): [0.35713261 0.21087735]
------------------------------
Epoch: 13
Training loss: 0.08567270934581757 | Validation loss: 0.2773045003414154
Validation loss (ends of cycles): [0.35713261 0.21087735]
------------------------------
Epoch: 14
Training loss: 0.07911113798618316 | Validation loss: 0.43483686447143555
Validation loss (ends of cycles): [0.35713261 0.21087735]
------------------------------
Epoch: 15
Training loss: 0.07443125247955322 | Validation loss: 0.1533229500055313
Validation loss (ends of cycles): [0.35713261 0.21087735]
------------------------------
Epoch: 16
Training loss: 0.06726926937699318 | Validation loss: 0.1722557358443737
Validation loss (ends of cycles): [0.35713261 0.21087735]
------------------------------
Epoch: 17
Training loss: 0.06305484026670456 | Validation loss: 0.21744874492287636
Validation loss (ends of cycles): [0.35713261 0.21087735]
------------------------------
Epoch: 18
Training loss: 0.057345937564969064 | Validation loss: 0.2609431743621826
Validation loss (ends of cycles): [0.35713261 0.21087735]
------------------------------
Epoch: 19
Training loss: 0.05563645977526903 | Validation loss: 0.2218475341796875
Validation loss (ends of cycles): [0.35713261 0.21087735]
------------------------------
Epoch: 20
Training loss: 0.052960326336324214 | Validation loss: 0.19616811349987984
Validation loss (ends of cycles): [0.35713261 0.21087735 0.19616811]
------------------------------
Epoch: 21
Training loss: 0.05901392139494419 | Validation loss: 0.1624758467078209
Validation loss (ends of cycles): [0.35713261 0.21087735 0.19616811]
------------------------------
Epoch: 22
Training loss: 0.050959855690598486 | Validation loss: 0.22551586478948593
Validation loss (ends of cycles): [0.35713261 0.21087735 0.19616811]
------------------------------
Epoch: 23
Training loss: 0.051076297834515574 | Validation loss: 0.2302572764456272
Validation loss (ends of cycles): [0.35713261 0.21087735 0.19616811]
------------------------------
Epoch: 24
Training loss: 0.05112339500337839 | Validation loss: 0.21048244833946228
Validation loss (ends of cycles): [0.35713261 0.21087735 0.19616811]
------------------------------
Epoch: 25
Training loss: 0.05467013940215111 | Validation loss: 0.12336333841085434
Validation loss (ends of cycles): [0.35713261 0.21087735 0.19616811]
------------------------------
Epoch: 26
Training loss: 0.057834042236208916 | Validation loss: 0.2566063143312931
Validation loss (ends of cycles): [0.35713261 0.21087735 0.19616811]
------------------------------
Epoch: 27
Training loss: 0.04988208692520857 | Validation loss: 0.22633014991879463
Validation loss (ends of cycles): [0.35713261 0.21087735 0.19616811]
------------------------------
Epoch: 28
Training loss: 0.05284797567874193 | Validation loss: 0.21418332308530807
Validation loss (ends of cycles): [0.35713261 0.21087735 0.19616811]
------------------------------
Epoch: 29
Training loss: 0.045893807895481586 | Validation loss: 0.1591845452785492
Validation loss (ends of cycles): [0.35713261 0.21087735 0.19616811]
------------------------------
Epoch: 30
Training loss: 0.0433046093210578 | Validation loss: 0.14972137287259102
Validation loss (ends of cycles): [0.35713261 0.21087735 0.19616811 0.14972137]
--------------------------------------------------------------------------------
Seed: 6
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.5947945535182952 | Validation loss: 0.5636097490787506
Validation loss (ends of cycles): [0.56360975]
------------------------------
Epoch: 1
Training loss: 0.5739877611398697 | Validation loss: 0.5552766025066376
Validation loss (ends of cycles): [0.56360975]
------------------------------
Epoch: 2
Training loss: 0.5444172233343124 | Validation loss: 0.5409025847911835
Validation loss (ends of cycles): [0.56360975]
------------------------------
Epoch: 3
Training loss: 0.5102303147315979 | Validation loss: 0.5223296880722046
Validation loss (ends of cycles): [0.56360975]
------------------------------
Epoch: 4
Training loss: 0.4671860933303833 | Validation loss: 0.5054818987846375
Validation loss (ends of cycles): [0.56360975]
------------------------------
Epoch: 5
Training loss: 0.4151140213012695 | Validation loss: 0.49210914969444275
Validation loss (ends of cycles): [0.56360975]
------------------------------
Epoch: 6
Training loss: 0.3648500770330429 | Validation loss: 0.4792882353067398
Validation loss (ends of cycles): [0.56360975]
------------------------------
Epoch: 7
Training loss: 0.3277033656835556 | Validation loss: 0.4582698494195938
Validation loss (ends of cycles): [0.56360975]
------------------------------
Epoch: 8
Training loss: 0.30160123109817505 | Validation loss: 0.40672582387924194
Validation loss (ends of cycles): [0.56360975]
------------------------------
Epoch: 9
Training loss: 0.28555808067321775 | Validation loss: 0.3488713800907135
Validation loss (ends of cycles): [0.56360975]
------------------------------
Epoch: 10
Training loss: 0.2787675619125366 | Validation loss: 0.31419089436531067
Validation loss (ends of cycles): [0.56360975 0.31419089]
------------------------------
Epoch: 11
Training loss: 0.27361820340156556 | Validation loss: 0.2943081259727478
Validation loss (ends of cycles): [0.56360975 0.31419089]
------------------------------
Epoch: 12
Training loss: 0.26058380156755445 | Validation loss: 0.26867610216140747
Validation loss (ends of cycles): [0.56360975 0.31419089]
------------------------------
Epoch: 13
Training loss: 0.24124586433172227 | Validation loss: 0.24037369340658188
Validation loss (ends of cycles): [0.56360975 0.31419089]
------------------------------
Epoch: 14
Training loss: 0.21548341661691667 | Validation loss: 0.22535283118486404
Validation loss (ends of cycles): [0.56360975 0.31419089]
------------------------------
Epoch: 15
Training loss: 0.18438775539398194 | Validation loss: 0.1685718446969986
Validation loss (ends of cycles): [0.56360975 0.31419089]
------------------------------
Epoch: 16
Training loss: 0.16061918511986734 | Validation loss: 0.13537318259477615
Validation loss (ends of cycles): [0.56360975 0.31419089]
------------------------------
Epoch: 17
Training loss: 0.14224363416433333 | Validation loss: 0.13322928547859192
Validation loss (ends of cycles): [0.56360975 0.31419089]
------------------------------
Epoch: 18
Training loss: 0.13033997938036918 | Validation loss: 0.12760929018259048
Validation loss (ends of cycles): [0.56360975 0.31419089]
------------------------------
Epoch: 19
Training loss: 0.12404320240020753 | Validation loss: 0.13474282249808311
Validation loss (ends of cycles): [0.56360975 0.31419089]
------------------------------
Epoch: 20
Training loss: 0.11867297813296318 | Validation loss: 0.13434157520532608
Validation loss (ends of cycles): [0.56360975 0.31419089 0.13434158]
------------------------------
Epoch: 21
Training loss: 0.11793936267495156 | Validation loss: 0.13654564321041107
Validation loss (ends of cycles): [0.56360975 0.31419089 0.13434158]
------------------------------
Epoch: 22
Training loss: 0.11286920569837093 | Validation loss: 0.12956713140010834
Validation loss (ends of cycles): [0.56360975 0.31419089 0.13434158]
------------------------------
Epoch: 23
Training loss: 0.10548075810074806 | Validation loss: 0.12283502891659737
Validation loss (ends of cycles): [0.56360975 0.31419089 0.13434158]
------------------------------
Epoch: 24
Training loss: 0.09794336780905724 | Validation loss: 0.10408468917012215
Validation loss (ends of cycles): [0.56360975 0.31419089 0.13434158]
------------------------------
Epoch: 25
Training loss: 0.0924295324832201 | Validation loss: 0.15008461475372314
Validation loss (ends of cycles): [0.56360975 0.31419089 0.13434158]
------------------------------
Epoch: 26
Training loss: 0.09017585180699825 | Validation loss: 0.07894822582602501
Validation loss (ends of cycles): [0.56360975 0.31419089 0.13434158]
------------------------------
Epoch: 27
Training loss: 0.08053090944886207 | Validation loss: 0.07444114610552788
Validation loss (ends of cycles): [0.56360975 0.31419089 0.13434158]
------------------------------
Epoch: 28
Training loss: 0.07959246821701527 | Validation loss: 0.09047586098313332
Validation loss (ends of cycles): [0.56360975 0.31419089 0.13434158]
------------------------------
Epoch: 29
Training loss: 0.0734288364648819 | Validation loss: 0.07841610908508301
Validation loss (ends of cycles): [0.56360975 0.31419089 0.13434158]
------------------------------
Epoch: 30
Training loss: 0.07281158864498138 | Validation loss: 0.07585301622748375
Validation loss (ends of cycles): [0.56360975 0.31419089 0.13434158 0.07585302]
--------------------------------------------------------------------------------
Seed: 7
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.24966259002685548 | Validation loss: 0.24267390370368958
Validation loss (ends of cycles): [0.2426739]
------------------------------
Epoch: 1
Training loss: 0.235618394613266 | Validation loss: 0.2372320294380188
Validation loss (ends of cycles): [0.2426739]
------------------------------
Epoch: 2
Training loss: 0.21840053498744966 | Validation loss: 0.22739332169294357
Validation loss (ends of cycles): [0.2426739]
------------------------------
Epoch: 3
Training loss: 0.19703763723373413 | Validation loss: 0.21525921672582626
Validation loss (ends of cycles): [0.2426739]
------------------------------
Epoch: 4
Training loss: 0.1717545986175537 | Validation loss: 0.20377518981695175
Validation loss (ends of cycles): [0.2426739]
------------------------------
Epoch: 5
Training loss: 0.14265027046203613 | Validation loss: 0.19772907346487045
Validation loss (ends of cycles): [0.2426739]
------------------------------
Epoch: 6
Training loss: 0.11750846356153488 | Validation loss: 0.1927606761455536
Validation loss (ends of cycles): [0.2426739]
------------------------------
Epoch: 7
Training loss: 0.10381745249032974 | Validation loss: 0.18614127486944199
Validation loss (ends of cycles): [0.2426739]
------------------------------
Epoch: 8
Training loss: 0.0941122256219387 | Validation loss: 0.16012199968099594
Validation loss (ends of cycles): [0.2426739]
------------------------------
Epoch: 9
Training loss: 0.09131616652011872 | Validation loss: 0.15815239399671555
Validation loss (ends of cycles): [0.2426739]
------------------------------
Epoch: 10
Training loss: 0.08770758658647537 | Validation loss: 0.15310294181108475
Validation loss (ends of cycles): [0.2426739  0.15310294]
------------------------------
Epoch: 11
Training loss: 0.08528880327939987 | Validation loss: 0.14396170154213905
Validation loss (ends of cycles): [0.2426739  0.15310294]
------------------------------
Epoch: 12
Training loss: 0.08279691264033318 | Validation loss: 0.14130394160747528
Validation loss (ends of cycles): [0.2426739  0.15310294]
------------------------------
Epoch: 13
Training loss: 0.08135882690548897 | Validation loss: 0.14968570321798325
Validation loss (ends of cycles): [0.2426739  0.15310294]
------------------------------
Epoch: 14
Training loss: 0.08186716139316559 | Validation loss: 0.17318671941757202
Validation loss (ends of cycles): [0.2426739  0.15310294]
------------------------------
Epoch: 15
Training loss: 0.08467233590781689 | Validation loss: 0.18237532302737236
Validation loss (ends of cycles): [0.2426739  0.15310294]
------------------------------
Epoch: 16
Training loss: 0.0764297217130661 | Validation loss: 0.18119005113840103
Validation loss (ends of cycles): [0.2426739  0.15310294]
------------------------------
Epoch: 17
Training loss: 0.07494740672409535 | Validation loss: 0.1301913782954216
Validation loss (ends of cycles): [0.2426739  0.15310294]
------------------------------
Epoch: 18
Training loss: 0.07010909467935562 | Validation loss: 0.14423485472798347
Validation loss (ends of cycles): [0.2426739  0.15310294]
------------------------------
Epoch: 19
Training loss: 0.0687748871743679 | Validation loss: 0.15484707802534103
Validation loss (ends of cycles): [0.2426739  0.15310294]
------------------------------
Epoch: 20
Training loss: 0.0645689457654953 | Validation loss: 0.15342670306563377
Validation loss (ends of cycles): [0.2426739  0.15310294 0.1534267 ]
------------------------------
Epoch: 21
Training loss: 0.062006055191159246 | Validation loss: 0.15138789266347885
Validation loss (ends of cycles): [0.2426739  0.15310294 0.1534267 ]
------------------------------
Epoch: 22
Training loss: 0.06060033775866032 | Validation loss: 0.13369300588965416
Validation loss (ends of cycles): [0.2426739  0.15310294 0.1534267 ]
------------------------------
Epoch: 23
Training loss: 0.061561013013124465 | Validation loss: 0.13530557602643967
Validation loss (ends of cycles): [0.2426739  0.15310294 0.1534267 ]
------------------------------
Epoch: 24
Training loss: 0.06617009490728379 | Validation loss: 0.16354938223958015
Validation loss (ends of cycles): [0.2426739  0.15310294 0.1534267 ]
------------------------------
Epoch: 25
Training loss: 0.06215674802660942 | Validation loss: 0.20111528038978577
Validation loss (ends of cycles): [0.2426739  0.15310294 0.1534267 ]
------------------------------
Epoch: 26
Training loss: 0.059966203197836875 | Validation loss: 0.14292334020137787
Validation loss (ends of cycles): [0.2426739  0.15310294 0.1534267 ]
------------------------------
Epoch: 27
Training loss: 0.059527568891644476 | Validation loss: 0.1250341236591339
Validation loss (ends of cycles): [0.2426739  0.15310294 0.1534267 ]
------------------------------
Epoch: 28
Training loss: 0.05725446715950966 | Validation loss: 0.12287440150976181
Validation loss (ends of cycles): [0.2426739  0.15310294 0.1534267 ]
------------------------------
Epoch: 29
Training loss: 0.054586905613541604 | Validation loss: 0.1597551889717579
Validation loss (ends of cycles): [0.2426739  0.15310294 0.1534267 ]
------------------------------
Epoch: 30
Training loss: 0.05266690887510776 | Validation loss: 0.1473530475050211
Validation loss (ends of cycles): [0.2426739  0.15310294 0.1534267  0.14735305]
--------------------------------------------------------------------------------
Seed: 8
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.41844989494843915 | Validation loss: 0.46415480971336365
Validation loss (ends of cycles): [0.46415481]
------------------------------
Epoch: 1
Training loss: 0.39472986351359973 | Validation loss: 0.4504581391811371
Validation loss (ends of cycles): [0.46415481]
------------------------------
Epoch: 2
Training loss: 0.35734637623483484 | Validation loss: 0.42483583092689514
Validation loss (ends of cycles): [0.46415481]
------------------------------
Epoch: 3
Training loss: 0.31263969296758826 | Validation loss: 0.3877894878387451
Validation loss (ends of cycles): [0.46415481]
------------------------------
Epoch: 4
Training loss: 0.2572383663871072 | Validation loss: 0.35185202956199646
Validation loss (ends of cycles): [0.46415481]
------------------------------
Epoch: 5
Training loss: 0.19959831779653375 | Validation loss: 0.32558348774909973
Validation loss (ends of cycles): [0.46415481]
------------------------------
Epoch: 6
Training loss: 0.15444783595475284 | Validation loss: 0.30798062682151794
Validation loss (ends of cycles): [0.46415481]
------------------------------
Epoch: 7
Training loss: 0.12643937360156665 | Validation loss: 0.2347472906112671
Validation loss (ends of cycles): [0.46415481]
------------------------------
Epoch: 8
Training loss: 0.1116909838535569 | Validation loss: 0.19630743563175201
Validation loss (ends of cycles): [0.46415481]
------------------------------
Epoch: 9
Training loss: 0.10401421514424411 | Validation loss: 0.1785627156496048
Validation loss (ends of cycles): [0.46415481]
------------------------------
Epoch: 10
Training loss: 0.10056576505303383 | Validation loss: 0.16850918531417847
Validation loss (ends of cycles): [0.46415481 0.16850919]
------------------------------
Epoch: 11
Training loss: 0.0980552001432939 | Validation loss: 0.16101451218128204
Validation loss (ends of cycles): [0.46415481 0.16850919]
------------------------------
Epoch: 12
Training loss: 0.09317369014024734 | Validation loss: 0.15137232840061188
Validation loss (ends of cycles): [0.46415481 0.16850919]
------------------------------
Epoch: 13
Training loss: 0.08474765955047174 | Validation loss: 0.19075323641300201
Validation loss (ends of cycles): [0.46415481 0.16850919]
------------------------------
Epoch: 14
Training loss: 0.08182131465185773 | Validation loss: 0.12207912653684616
Validation loss (ends of cycles): [0.46415481 0.16850919]
------------------------------
Epoch: 15
Training loss: 0.08076543767343868 | Validation loss: 0.16828617453575134
Validation loss (ends of cycles): [0.46415481 0.16850919]
------------------------------
Epoch: 16
Training loss: 0.07709567960013043 | Validation loss: 0.13723145425319672
Validation loss (ends of cycles): [0.46415481 0.16850919]
------------------------------
Epoch: 17
Training loss: 0.07455341958186844 | Validation loss: 0.17370207607746124
Validation loss (ends of cycles): [0.46415481 0.16850919]
------------------------------
Epoch: 18
Training loss: 0.06813155622644858 | Validation loss: 0.1326078176498413
Validation loss (ends of cycles): [0.46415481 0.16850919]
------------------------------
Epoch: 19
Training loss: 0.06662491912191565 | Validation loss: 0.1284124106168747
Validation loss (ends of cycles): [0.46415481 0.16850919]
------------------------------
Epoch: 20
Training loss: 0.06775809993798082 | Validation loss: 0.12708702683448792
Validation loss (ends of cycles): [0.46415481 0.16850919 0.12708703]
------------------------------
Epoch: 21
Training loss: 0.06268392300063913 | Validation loss: 0.1257689744234085
Validation loss (ends of cycles): [0.46415481 0.16850919 0.12708703]
------------------------------
Epoch: 22
Training loss: 0.06307559223337607 | Validation loss: 0.1285398006439209
Validation loss (ends of cycles): [0.46415481 0.16850919 0.12708703]
------------------------------
Epoch: 23
Training loss: 0.06152626160870899 | Validation loss: 0.10883551836013794
Validation loss (ends of cycles): [0.46415481 0.16850919 0.12708703]
------------------------------
Epoch: 24
Training loss: 0.06122888353737918 | Validation loss: 0.1875065714120865
Validation loss (ends of cycles): [0.46415481 0.16850919 0.12708703]
------------------------------
Epoch: 25
Training loss: 0.06612172418019989 | Validation loss: 0.20511069893836975
Validation loss (ends of cycles): [0.46415481 0.16850919 0.12708703]
------------------------------
Epoch: 26
Training loss: 0.06252996826713736 | Validation loss: 0.1681954264640808
Validation loss (ends of cycles): [0.46415481 0.16850919 0.12708703]
------------------------------
Epoch: 27
Training loss: 0.06012651967731389 | Validation loss: 0.12634092569351196
Validation loss (ends of cycles): [0.46415481 0.16850919 0.12708703]
------------------------------
Epoch: 28
Training loss: 0.052851350639354096 | Validation loss: 0.1592557728290558
Validation loss (ends of cycles): [0.46415481 0.16850919 0.12708703]
------------------------------
Epoch: 29
Training loss: 0.05158912593668157 | Validation loss: 0.12818372249603271
Validation loss (ends of cycles): [0.46415481 0.16850919 0.12708703]
------------------------------
Epoch: 30
Training loss: 0.052270400591871956 | Validation loss: 0.12273656576871872
Validation loss (ends of cycles): [0.46415481 0.16850919 0.12708703 0.12273657]
--------------------------------------------------------------------------------
Seed: 9
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.33450397036292334 | Validation loss: 0.4183863401412964
Validation loss (ends of cycles): [0.41838634]
------------------------------
Epoch: 1
Training loss: 0.31508046659556305 | Validation loss: 0.4055172950029373
Validation loss (ends of cycles): [0.41838634]
------------------------------
Epoch: 2
Training loss: 0.28755233775485645 | Validation loss: 0.38339458405971527
Validation loss (ends of cycles): [0.41838634]
------------------------------
Epoch: 3
Training loss: 0.2563350403850729 | Validation loss: 0.35465146601200104
Validation loss (ends of cycles): [0.41838634]
------------------------------
Epoch: 4
Training loss: 0.21771724657578903 | Validation loss: 0.32462094724178314
Validation loss (ends of cycles): [0.41838634]
------------------------------
Epoch: 5
Training loss: 0.17677603594281457 | Validation loss: 0.2886727899312973
Validation loss (ends of cycles): [0.41838634]
------------------------------
Epoch: 6
Training loss: 0.14331864734942262 | Validation loss: 0.25660841912031174
Validation loss (ends of cycles): [0.41838634]
------------------------------
Epoch: 7
Training loss: 0.12253618409687822 | Validation loss: 0.20420953631401062
Validation loss (ends of cycles): [0.41838634]
------------------------------
Epoch: 8
Training loss: 0.10970718989318068 | Validation loss: 0.16134043782949448
Validation loss (ends of cycles): [0.41838634]
------------------------------
Epoch: 9
Training loss: 0.10511686839163303 | Validation loss: 0.136260487139225
Validation loss (ends of cycles): [0.41838634]
------------------------------
Epoch: 10
Training loss: 0.10223364101892168 | Validation loss: 0.12221920490264893
Validation loss (ends of cycles): [0.41838634 0.1222192 ]
------------------------------
Epoch: 11
Training loss: 0.09944640675728972 | Validation loss: 0.10605774819850922
Validation loss (ends of cycles): [0.41838634 0.1222192 ]
------------------------------
Epoch: 12
Training loss: 0.09476278620687398 | Validation loss: 0.09146244078874588
Validation loss (ends of cycles): [0.41838634 0.1222192 ]
------------------------------
Epoch: 13
Training loss: 0.09061784432692961 | Validation loss: 0.07169642113149166
Validation loss (ends of cycles): [0.41838634 0.1222192 ]
------------------------------
Epoch: 14
Training loss: 0.08670035394077952 | Validation loss: 0.07171276770532131
Validation loss (ends of cycles): [0.41838634 0.1222192 ]
------------------------------
Epoch: 15
Training loss: 0.08408490454100749 | Validation loss: 0.07400976028293371
Validation loss (ends of cycles): [0.41838634 0.1222192 ]
------------------------------
Epoch: 16
Training loss: 0.07897272638299248 | Validation loss: 0.07414662837982178
Validation loss (ends of cycles): [0.41838634 0.1222192 ]
------------------------------
Epoch: 17
Training loss: 0.07920655997639353 | Validation loss: 0.060708372853696346
Validation loss (ends of cycles): [0.41838634 0.1222192 ]
------------------------------
Epoch: 18
Training loss: 0.0753878229721026 | Validation loss: 0.07837508991360664
Validation loss (ends of cycles): [0.41838634 0.1222192 ]
------------------------------
Epoch: 19
Training loss: 0.07477120868861675 | Validation loss: 0.08723119460046291
Validation loss (ends of cycles): [0.41838634 0.1222192 ]
------------------------------
Epoch: 20
Training loss: 0.07195909118110483 | Validation loss: 0.07401845417916775
Validation loss (ends of cycles): [0.41838634 0.1222192  0.07401845]
------------------------------
Epoch: 21
Training loss: 0.06806200789287686 | Validation loss: 0.064169991761446
Validation loss (ends of cycles): [0.41838634 0.1222192  0.07401845]
------------------------------
Epoch: 22
Training loss: 0.07544434239918535 | Validation loss: 0.05839283112436533
Validation loss (ends of cycles): [0.41838634 0.1222192  0.07401845]
------------------------------
Epoch: 23
Training loss: 0.06904576837339184 | Validation loss: 0.052823279052972794
Validation loss (ends of cycles): [0.41838634 0.1222192  0.07401845]
------------------------------
Epoch: 24
Training loss: 0.07263032600960949 | Validation loss: 0.05023655481636524
Validation loss (ends of cycles): [0.41838634 0.1222192  0.07401845]
------------------------------
Epoch: 25
Training loss: 0.06975930573588068 | Validation loss: 0.0573732815682888
Validation loss (ends of cycles): [0.41838634 0.1222192  0.07401845]
------------------------------
Epoch: 26
Training loss: 0.07377178052609618 | Validation loss: 0.07613628916442394
Validation loss (ends of cycles): [0.41838634 0.1222192  0.07401845]
------------------------------
Epoch: 27
Training loss: 0.06863558673384515 | Validation loss: 0.06069220509380102
Validation loss (ends of cycles): [0.41838634 0.1222192  0.07401845]
------------------------------
Epoch: 28
Training loss: 0.0684154127639803 | Validation loss: 0.07957470044493675
Validation loss (ends of cycles): [0.41838634 0.1222192  0.07401845]
------------------------------
Epoch: 29
Training loss: 0.06068193370645696 | Validation loss: 0.05438768491148949
Validation loss (ends of cycles): [0.41838634 0.1222192  0.07401845]
------------------------------
Epoch: 30
Training loss: 0.056117918638681825 | Validation loss: 0.0544932559132576
Validation loss (ends of cycles): [0.41838634 0.1222192  0.07401845 0.05449326]
--------------------------------------------------------------------------------
Seed: 10
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.2340872816064141 | Validation loss: 0.27015864849090576
Validation loss (ends of cycles): [0.27015865]
------------------------------
Epoch: 1
Training loss: 0.21558139405467294 | Validation loss: 0.26325294375419617
Validation loss (ends of cycles): [0.27015865]
------------------------------
Epoch: 2
Training loss: 0.19359120591120285 | Validation loss: 0.2500206530094147
Validation loss (ends of cycles): [0.27015865]
------------------------------
Epoch: 3
Training loss: 0.17328757318583402 | Validation loss: 0.23665626347064972
Validation loss (ends of cycles): [0.27015865]
------------------------------
Epoch: 4
Training loss: 0.1474518593062054 | Validation loss: 0.23030684888362885
Validation loss (ends of cycles): [0.27015865]
------------------------------
Epoch: 5
Training loss: 0.12017566642977974 | Validation loss: 0.21531414985656738
Validation loss (ends of cycles): [0.27015865]
------------------------------
Epoch: 6
Training loss: 0.10158516398885033 | Validation loss: 0.17602409422397614
Validation loss (ends of cycles): [0.27015865]
------------------------------
Epoch: 7
Training loss: 0.0888047841462222 | Validation loss: 0.15436150133609772
Validation loss (ends of cycles): [0.27015865]
------------------------------
Epoch: 8
Training loss: 0.08242148025469347 | Validation loss: 0.09863794595003128
Validation loss (ends of cycles): [0.27015865]
------------------------------
Epoch: 9
Training loss: 0.07809551805257797 | Validation loss: 0.09693825244903564
Validation loss (ends of cycles): [0.27015865]
------------------------------
Epoch: 10
Training loss: 0.0748989030041478 | Validation loss: 0.09435625374317169
Validation loss (ends of cycles): [0.27015865 0.09435625]
------------------------------
Epoch: 11
Training loss: 0.07554080574349924 | Validation loss: 0.12675826251506805
Validation loss (ends of cycles): [0.27015865 0.09435625]
------------------------------
Epoch: 12
Training loss: 0.07253992252729156 | Validation loss: 0.0890277624130249
Validation loss (ends of cycles): [0.27015865 0.09435625]
------------------------------
Epoch: 13
Training loss: 0.06745774806900458 | Validation loss: 0.09933421015739441
Validation loss (ends of cycles): [0.27015865 0.09435625]
------------------------------
Epoch: 14
Training loss: 0.06639396636323495 | Validation loss: 0.25477135181427
Validation loss (ends of cycles): [0.27015865 0.09435625]
------------------------------
Epoch: 15
Training loss: 0.07153629342263396 | Validation loss: 0.12218687683343887
Validation loss (ends of cycles): [0.27015865 0.09435625]
------------------------------
Epoch: 16
Training loss: 0.06362383981997316 | Validation loss: 0.09838857501745224
Validation loss (ends of cycles): [0.27015865 0.09435625]
------------------------------
Epoch: 17
Training loss: 0.06499294090000066 | Validation loss: 0.12082856893539429
Validation loss (ends of cycles): [0.27015865 0.09435625]
------------------------------
Epoch: 18
Training loss: 0.057603581385179 | Validation loss: 0.08688703179359436
Validation loss (ends of cycles): [0.27015865 0.09435625]
------------------------------
Epoch: 19
Training loss: 0.054477095265280114 | Validation loss: 0.07416538149118423
Validation loss (ends of cycles): [0.27015865 0.09435625]
------------------------------
Epoch: 20
Training loss: 0.053827235983176666 | Validation loss: 0.07726097851991653
Validation loss (ends of cycles): [0.27015865 0.09435625 0.07726098]
------------------------------
Epoch: 21
Training loss: 0.05040800148113207 | Validation loss: 0.12319447845220566
Validation loss (ends of cycles): [0.27015865 0.09435625 0.07726098]
------------------------------
Epoch: 22
Training loss: 0.04939531450244514 | Validation loss: 0.07437314093112946
Validation loss (ends of cycles): [0.27015865 0.09435625 0.07726098]
------------------------------
Epoch: 23
Training loss: 0.04975901036099954 | Validation loss: 0.2664039731025696
Validation loss (ends of cycles): [0.27015865 0.09435625 0.07726098]
------------------------------
Epoch: 24
Training loss: 0.05050407112999396 | Validation loss: 0.07585630565881729
Validation loss (ends of cycles): [0.27015865 0.09435625 0.07726098]
------------------------------
Epoch: 25
Training loss: 0.052467319098385895 | Validation loss: 0.07166603952646255
Validation loss (ends of cycles): [0.27015865 0.09435625 0.07726098]
------------------------------
Epoch: 26
Training loss: 0.04566661116074432 | Validation loss: 0.11543244868516922
Validation loss (ends of cycles): [0.27015865 0.09435625 0.07726098]
------------------------------
Epoch: 27
Training loss: 0.048686319454149765 | Validation loss: 0.29796749353408813
Validation loss (ends of cycles): [0.27015865 0.09435625 0.07726098]
------------------------------
Epoch: 28
Training loss: 0.04886364056305452 | Validation loss: 0.18778347969055176
Validation loss (ends of cycles): [0.27015865 0.09435625 0.07726098]
------------------------------
Epoch: 29
Training loss: 0.04836133596572009 | Validation loss: 0.06662750244140625
Validation loss (ends of cycles): [0.27015865 0.09435625 0.07726098]
------------------------------
Epoch: 30
Training loss: 0.042628409510309044 | Validation loss: 0.06421707570552826
Validation loss (ends of cycles): [0.27015865 0.09435625 0.07726098 0.06421708]
--------------------------------------------------------------------------------
Seed: 11
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.22647233442826706 | Validation loss: 0.23534537851810455
Validation loss (ends of cycles): [0.23534538]
------------------------------
Epoch: 1
Training loss: 0.2113880677656694 | Validation loss: 0.23379206657409668
Validation loss (ends of cycles): [0.23534538]
------------------------------
Epoch: 2
Training loss: 0.19402675059708682 | Validation loss: 0.23131398856639862
Validation loss (ends of cycles): [0.23534538]
------------------------------
Epoch: 3
Training loss: 0.1751593459736217 | Validation loss: 0.22928957641124725
Validation loss (ends of cycles): [0.23534538]
------------------------------
Epoch: 4
Training loss: 0.15052058547735214 | Validation loss: 0.22850777208805084
Validation loss (ends of cycles): [0.23534538]
------------------------------
Epoch: 5
Training loss: 0.12454998357729478 | Validation loss: 0.22395320236682892
Validation loss (ends of cycles): [0.23534538]
------------------------------
Epoch: 6
Training loss: 0.10516942563382062 | Validation loss: 0.19217549264431
Validation loss (ends of cycles): [0.23534538]
------------------------------
Epoch: 7
Training loss: 0.09278485009616072 | Validation loss: 0.15953890979290009
Validation loss (ends of cycles): [0.23534538]
------------------------------
Epoch: 8
Training loss: 0.08566174080426042 | Validation loss: 0.12811169028282166
Validation loss (ends of cycles): [0.23534538]
------------------------------
Epoch: 9
Training loss: 0.08283901891925118 | Validation loss: 0.12298424541950226
Validation loss (ends of cycles): [0.23534538]
------------------------------
Epoch: 10
Training loss: 0.08166876401413571 | Validation loss: 0.12157569825649261
Validation loss (ends of cycles): [0.23534538 0.1215757 ]
------------------------------
Epoch: 11
Training loss: 0.07987238669937308 | Validation loss: 0.12463139742612839
Validation loss (ends of cycles): [0.23534538 0.1215757 ]
------------------------------
Epoch: 12
Training loss: 0.07590313425118272 | Validation loss: 0.11578209698200226
Validation loss (ends of cycles): [0.23534538 0.1215757 ]
------------------------------
Epoch: 13
Training loss: 0.0747046714479273 | Validation loss: 0.124772809445858
Validation loss (ends of cycles): [0.23534538 0.1215757 ]
------------------------------
Epoch: 14
Training loss: 0.07252140851183371 | Validation loss: 0.11757127940654755
Validation loss (ends of cycles): [0.23534538 0.1215757 ]
------------------------------
Epoch: 15
Training loss: 0.07997047291560606 | Validation loss: 0.11393731087446213
Validation loss (ends of cycles): [0.23534538 0.1215757 ]
------------------------------
Epoch: 16
Training loss: 0.07546812854707241 | Validation loss: 0.1232791543006897
Validation loss (ends of cycles): [0.23534538 0.1215757 ]
------------------------------
Epoch: 17
Training loss: 0.07227142189036716 | Validation loss: 0.11599378287792206
Validation loss (ends of cycles): [0.23534538 0.1215757 ]
------------------------------
Epoch: 18
Training loss: 0.06743957068432462 | Validation loss: 0.10658861696720123
Validation loss (ends of cycles): [0.23534538 0.1215757 ]
------------------------------
Epoch: 19
Training loss: 0.06617797978899696 | Validation loss: 0.10020831972360611
Validation loss (ends of cycles): [0.23534538 0.1215757 ]
------------------------------
Epoch: 20
Training loss: 0.06662786921316927 | Validation loss: 0.09910339117050171
Validation loss (ends of cycles): [0.23534538 0.1215757  0.09910339]
------------------------------
Epoch: 21
Training loss: 0.0641032149168578 | Validation loss: 0.09659582376480103
Validation loss (ends of cycles): [0.23534538 0.1215757  0.09910339]
------------------------------
Epoch: 22
Training loss: 0.06450243294239044 | Validation loss: 0.09174611419439316
Validation loss (ends of cycles): [0.23534538 0.1215757  0.09910339]
------------------------------
Epoch: 23
Training loss: 0.06076653200117024 | Validation loss: 0.0868479311466217
Validation loss (ends of cycles): [0.23534538 0.1215757  0.09910339]
------------------------------
Epoch: 24
Training loss: 0.057067283344539727 | Validation loss: 0.1249217838048935
Validation loss (ends of cycles): [0.23534538 0.1215757  0.09910339]
------------------------------
Epoch: 25
Training loss: 0.06456780433654785 | Validation loss: 0.10062471777200699
Validation loss (ends of cycles): [0.23534538 0.1215757  0.09910339]
------------------------------
Epoch: 26
Training loss: 0.060377268797971985 | Validation loss: 0.07824037969112396
Validation loss (ends of cycles): [0.23534538 0.1215757  0.09910339]
------------------------------
Epoch: 27
Training loss: 0.05700255642560395 | Validation loss: 0.08699680119752884
Validation loss (ends of cycles): [0.23534538 0.1215757  0.09910339]
------------------------------
Epoch: 28
Training loss: 0.053185255182060326 | Validation loss: 0.07531373202800751
Validation loss (ends of cycles): [0.23534538 0.1215757  0.09910339]
------------------------------
Epoch: 29
Training loss: 0.05329576574943282 | Validation loss: 0.07017888128757477
Validation loss (ends of cycles): [0.23534538 0.1215757  0.09910339]
------------------------------
Epoch: 30
Training loss: 0.051533097231929954 | Validation loss: 0.06938274204730988
Validation loss (ends of cycles): [0.23534538 0.1215757  0.09910339 0.06938274]
--------------------------------------------------------------------------------
Seed: 12
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.31496341824531554 | Validation loss: 0.3085462301969528
Validation loss (ends of cycles): [0.30854623]
------------------------------
Epoch: 1
Training loss: 0.296749347448349 | Validation loss: 0.3101869523525238
Validation loss (ends of cycles): [0.30854623]
------------------------------
Epoch: 2
Training loss: 0.2731067031621933 | Validation loss: 0.31405508518218994
Validation loss (ends of cycles): [0.30854623]
------------------------------
Epoch: 3
Training loss: 0.24493784904479982 | Validation loss: 0.3216366618871689
Validation loss (ends of cycles): [0.30854623]
------------------------------
Epoch: 4
Training loss: 0.21210817396640777 | Validation loss: 0.33603934943675995
Validation loss (ends of cycles): [0.30854623]
------------------------------
Epoch: 5
Training loss: 0.17602172642946243 | Validation loss: 0.3545747697353363
Validation loss (ends of cycles): [0.30854623]
------------------------------
Epoch: 6
Training loss: 0.14613406211137772 | Validation loss: 0.35567839443683624
Validation loss (ends of cycles): [0.30854623]
------------------------------
Epoch: 7
Training loss: 0.1257573790848255 | Validation loss: 0.3237410634756088
Validation loss (ends of cycles): [0.30854623]
------------------------------
Epoch: 8
Training loss: 0.11371021643280983 | Validation loss: 0.26202917098999023
Validation loss (ends of cycles): [0.30854623]
------------------------------
Epoch: 9
Training loss: 0.10723200663924218 | Validation loss: 0.22564251720905304
Validation loss (ends of cycles): [0.30854623]
------------------------------
Epoch: 10
Training loss: 0.10520545840263366 | Validation loss: 0.20360779762268066
Validation loss (ends of cycles): [0.30854623 0.2036078 ]
------------------------------
Epoch: 11
Training loss: 0.10288014262914658 | Validation loss: 0.20160969346761703
Validation loss (ends of cycles): [0.30854623 0.2036078 ]
------------------------------
Epoch: 12
Training loss: 0.09802041873335839 | Validation loss: 0.20292669162154198
Validation loss (ends of cycles): [0.30854623 0.2036078 ]
------------------------------
Epoch: 13
Training loss: 0.09413954019546508 | Validation loss: 0.17354870960116386
Validation loss (ends of cycles): [0.30854623 0.2036078 ]
------------------------------
Epoch: 14
Training loss: 0.08832933753728867 | Validation loss: 0.22394151240587234
Validation loss (ends of cycles): [0.30854623 0.2036078 ]
------------------------------
Epoch: 15
Training loss: 0.08517145812511444 | Validation loss: 0.16865815967321396
Validation loss (ends of cycles): [0.30854623 0.2036078 ]
------------------------------
Epoch: 16
Training loss: 0.08164194636046887 | Validation loss: 0.16437028348445892
Validation loss (ends of cycles): [0.30854623 0.2036078 ]
------------------------------
Epoch: 17
Training loss: 0.08001556545495987 | Validation loss: 0.20558318868279457
Validation loss (ends of cycles): [0.30854623 0.2036078 ]
------------------------------
Epoch: 18
Training loss: 0.0778819728642702 | Validation loss: 0.183094821870327
Validation loss (ends of cycles): [0.30854623 0.2036078 ]
------------------------------
Epoch: 19
Training loss: 0.07783219516277314 | Validation loss: 0.1785370595753193
Validation loss (ends of cycles): [0.30854623 0.2036078 ]
------------------------------
Epoch: 20
Training loss: 0.07695055566728115 | Validation loss: 0.17569859325885773
Validation loss (ends of cycles): [0.30854623 0.2036078  0.17569859]
------------------------------
Epoch: 21
Training loss: 0.07580824568867683 | Validation loss: 0.20672567933797836
Validation loss (ends of cycles): [0.30854623 0.2036078  0.17569859]
------------------------------
Epoch: 22
Training loss: 0.07075491286814213 | Validation loss: 0.17038242146372795
Validation loss (ends of cycles): [0.30854623 0.2036078  0.17569859]
------------------------------
Epoch: 23
Training loss: 0.07162440195679665 | Validation loss: 0.17492885142564774
Validation loss (ends of cycles): [0.30854623 0.2036078  0.17569859]
------------------------------
Epoch: 24
Training loss: 0.06862989962100982 | Validation loss: 0.1177842915058136
Validation loss (ends of cycles): [0.30854623 0.2036078  0.17569859]
------------------------------
Epoch: 25
Training loss: 0.06372749842703343 | Validation loss: 0.09121009334921837
Validation loss (ends of cycles): [0.30854623 0.2036078  0.17569859]
------------------------------
Epoch: 26
Training loss: 0.06439065597951413 | Validation loss: 0.11329861357808113
Validation loss (ends of cycles): [0.30854623 0.2036078  0.17569859]
------------------------------
Epoch: 27
Training loss: 0.05956905409693718 | Validation loss: 0.1521657519042492
Validation loss (ends of cycles): [0.30854623 0.2036078  0.17569859]
------------------------------
Epoch: 28
Training loss: 0.054637243039906025 | Validation loss: 0.065576933324337
Validation loss (ends of cycles): [0.30854623 0.2036078  0.17569859]
------------------------------
Epoch: 29
Training loss: 0.06076151393353939 | Validation loss: 0.058655871078372
Validation loss (ends of cycles): [0.30854623 0.2036078  0.17569859]
------------------------------
Epoch: 30
Training loss: 0.05368628203868866 | Validation loss: 0.06157389655709267
Validation loss (ends of cycles): [0.30854623 0.2036078  0.17569859 0.0615739 ]
--------------------------------------------------------------------------------
Seed: 13
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.3849785625934601 | Validation loss: 0.45114198327064514
Validation loss (ends of cycles): [0.45114198]
------------------------------
Epoch: 1
Training loss: 0.3704269663854079 | Validation loss: 0.44007259607315063
Validation loss (ends of cycles): [0.45114198]
------------------------------
Epoch: 2
Training loss: 0.3476231748407537 | Validation loss: 0.4194194972515106
Validation loss (ends of cycles): [0.45114198]
------------------------------
Epoch: 3
Training loss: 0.31672785905274475 | Validation loss: 0.39160406589508057
Validation loss (ends of cycles): [0.45114198]
------------------------------
Epoch: 4
Training loss: 0.27650914002548566 | Validation loss: 0.35673192143440247
Validation loss (ends of cycles): [0.45114198]
------------------------------
Epoch: 5
Training loss: 0.2297362427819859 | Validation loss: 0.31353193521499634
Validation loss (ends of cycles): [0.45114198]
------------------------------
Epoch: 6
Training loss: 0.18639830432154916 | Validation loss: 0.27488814294338226
Validation loss (ends of cycles): [0.45114198]
------------------------------
Epoch: 7
Training loss: 0.15894355489449066 | Validation loss: 0.2300477847456932
Validation loss (ends of cycles): [0.45114198]
------------------------------
Epoch: 8
Training loss: 0.1421263353391127 | Validation loss: 0.18800196796655655
Validation loss (ends of cycles): [0.45114198]
------------------------------
Epoch: 9
Training loss: 0.13018270107832822 | Validation loss: 0.1685379222035408
Validation loss (ends of cycles): [0.45114198]
------------------------------
Epoch: 10
Training loss: 0.1273436485366388 | Validation loss: 0.15773064270615578
Validation loss (ends of cycles): [0.45114198 0.15773064]
------------------------------
Epoch: 11
Training loss: 0.12412191317840056 | Validation loss: 0.15657923743128777
Validation loss (ends of cycles): [0.45114198 0.15773064]
------------------------------
Epoch: 12
Training loss: 0.11569374122402885 | Validation loss: 0.14440815895795822
Validation loss (ends of cycles): [0.45114198 0.15773064]
------------------------------
Epoch: 13
Training loss: 0.10674315149133856 | Validation loss: 0.08752488531172276
Validation loss (ends of cycles): [0.45114198 0.15773064]
------------------------------
Epoch: 14
Training loss: 0.09485080702738329 | Validation loss: 0.08425725065171719
Validation loss (ends of cycles): [0.45114198 0.15773064]
------------------------------
Epoch: 15
Training loss: 0.08512867479161783 | Validation loss: 0.06908361706882715
Validation loss (ends of cycles): [0.45114198 0.15773064]
------------------------------
Epoch: 16
Training loss: 0.07970026880502701 | Validation loss: 0.12750844284892082
Validation loss (ends of cycles): [0.45114198 0.15773064]
------------------------------
Epoch: 17
Training loss: 0.07524278997020288 | Validation loss: 0.07422960735857487
Validation loss (ends of cycles): [0.45114198 0.15773064]
------------------------------
Epoch: 18
Training loss: 0.07073394175280225 | Validation loss: 0.06022882554680109
Validation loss (ends of cycles): [0.45114198 0.15773064]
------------------------------
Epoch: 19
Training loss: 0.06288122284141454 | Validation loss: 0.06339698284864426
Validation loss (ends of cycles): [0.45114198 0.15773064]
------------------------------
Epoch: 20
Training loss: 0.061651048165830696 | Validation loss: 0.06467864662408829
Validation loss (ends of cycles): [0.45114198 0.15773064 0.06467865]
------------------------------
Epoch: 21
Training loss: 0.06111889738928188 | Validation loss: 0.06646665744483471
Validation loss (ends of cycles): [0.45114198 0.15773064 0.06467865]
------------------------------
Epoch: 22
Training loss: 0.05873148346489126 | Validation loss: 0.06867950409650803
Validation loss (ends of cycles): [0.45114198 0.15773064 0.06467865]
------------------------------
Epoch: 23
Training loss: 0.060050982643257485 | Validation loss: 0.061418455094099045
Validation loss (ends of cycles): [0.45114198 0.15773064 0.06467865]
------------------------------
Epoch: 24
Training loss: 0.060610766437920655 | Validation loss: 0.05618366505950689
Validation loss (ends of cycles): [0.45114198 0.15773064 0.06467865]
------------------------------
Epoch: 25
Training loss: 0.0633573637089946 | Validation loss: 0.04861140996217728
Validation loss (ends of cycles): [0.45114198 0.15773064 0.06467865]
------------------------------
Epoch: 26
Training loss: 0.06116855178367008 | Validation loss: 0.07926485501229763
Validation loss (ends of cycles): [0.45114198 0.15773064 0.06467865]
------------------------------
Epoch: 27
Training loss: 0.05911888656291095 | Validation loss: 0.058403012342751026
Validation loss (ends of cycles): [0.45114198 0.15773064 0.06467865]
------------------------------
Epoch: 28
Training loss: 0.05666702342304317 | Validation loss: 0.042423720471560955
Validation loss (ends of cycles): [0.45114198 0.15773064 0.06467865]
------------------------------
Epoch: 29
Training loss: 0.05089438266374848 | Validation loss: 0.03783706063404679
Validation loss (ends of cycles): [0.45114198 0.15773064 0.06467865]
------------------------------
Epoch: 30
Training loss: 0.05125425891442732 | Validation loss: 0.04267655219882727
Validation loss (ends of cycles): [0.45114198 0.15773064 0.06467865 0.04267655]
--------------------------------------------------------------------------------
Seed: 14
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.41509213149547575 | Validation loss: 0.38505399227142334
Validation loss (ends of cycles): [0.38505399]
------------------------------
Epoch: 1
Training loss: 0.39179774820804597 | Validation loss: 0.37304168939590454
Validation loss (ends of cycles): [0.38505399]
------------------------------
Epoch: 2
Training loss: 0.35843105912208556 | Validation loss: 0.35156017541885376
Validation loss (ends of cycles): [0.38505399]
------------------------------
Epoch: 3
Training loss: 0.31765433847904206 | Validation loss: 0.32422196865081787
Validation loss (ends of cycles): [0.38505399]
------------------------------
Epoch: 4
Training loss: 0.2673868998885155 | Validation loss: 0.2933422923088074
Validation loss (ends of cycles): [0.38505399]
------------------------------
Epoch: 5
Training loss: 0.21240467727184295 | Validation loss: 0.26028379797935486
Validation loss (ends of cycles): [0.38505399]
------------------------------
Epoch: 6
Training loss: 0.1678038567304611 | Validation loss: 0.2316894382238388
Validation loss (ends of cycles): [0.38505399]
------------------------------
Epoch: 7
Training loss: 0.1405745640397072 | Validation loss: 0.1750834584236145
Validation loss (ends of cycles): [0.38505399]
------------------------------
Epoch: 8
Training loss: 0.12251835465431213 | Validation loss: 0.1384737491607666
Validation loss (ends of cycles): [0.38505399]
------------------------------
Epoch: 9
Training loss: 0.11450750157237052 | Validation loss: 0.10572589933872223
Validation loss (ends of cycles): [0.38505399]
------------------------------
Epoch: 10
Training loss: 0.10943077206611633 | Validation loss: 0.08536231517791748
Validation loss (ends of cycles): [0.38505399 0.08536232]
------------------------------
Epoch: 11
Training loss: 0.10874983444809913 | Validation loss: 0.06326284259557724
Validation loss (ends of cycles): [0.38505399 0.08536232]
------------------------------
Epoch: 12
Training loss: 0.10184967592358589 | Validation loss: 0.05090981721878052
Validation loss (ends of cycles): [0.38505399 0.08536232]
------------------------------
Epoch: 13
Training loss: 0.09422398544847965 | Validation loss: 0.05360788851976395
Validation loss (ends of cycles): [0.38505399 0.08536232]
------------------------------
Epoch: 14
Training loss: 0.09096146337687969 | Validation loss: 0.1130673959851265
Validation loss (ends of cycles): [0.38505399 0.08536232]
------------------------------
Epoch: 15
Training loss: 0.0848462775349617 | Validation loss: 0.1005302146077156
Validation loss (ends of cycles): [0.38505399 0.08536232]
------------------------------
Epoch: 16
Training loss: 0.08346045427024365 | Validation loss: 0.04016828536987305
Validation loss (ends of cycles): [0.38505399 0.08536232]
------------------------------
Epoch: 17
Training loss: 0.08220213800668716 | Validation loss: 0.05280933529138565
Validation loss (ends of cycles): [0.38505399 0.08536232]
------------------------------
Epoch: 18
Training loss: 0.07907124496996402 | Validation loss: 0.04327217862010002
Validation loss (ends of cycles): [0.38505399 0.08536232]
------------------------------
Epoch: 19
Training loss: 0.07301834337413311 | Validation loss: 0.041828058660030365
Validation loss (ends of cycles): [0.38505399 0.08536232]
------------------------------
Epoch: 20
Training loss: 0.0685610394924879 | Validation loss: 0.04052760452032089
Validation loss (ends of cycles): [0.38505399 0.08536232 0.0405276 ]
------------------------------
Epoch: 21
Training loss: 0.06799696236848832 | Validation loss: 0.036265864968299866
Validation loss (ends of cycles): [0.38505399 0.08536232 0.0405276 ]
------------------------------
Epoch: 22
Training loss: 0.06856372691690922 | Validation loss: 0.03583543002605438
Validation loss (ends of cycles): [0.38505399 0.08536232 0.0405276 ]
------------------------------
Epoch: 23
Training loss: 0.06802004836499691 | Validation loss: 0.03715496510267258
Validation loss (ends of cycles): [0.38505399 0.08536232 0.0405276 ]
------------------------------
Epoch: 24
Training loss: 0.06912181153893471 | Validation loss: 0.04452311620116234
Validation loss (ends of cycles): [0.38505399 0.08536232 0.0405276 ]
------------------------------
Epoch: 25
Training loss: 0.0747891653329134 | Validation loss: 0.12103398144245148
Validation loss (ends of cycles): [0.38505399 0.08536232 0.0405276 ]
------------------------------
Epoch: 26
Training loss: 0.06961565390229225 | Validation loss: 0.17962095141410828
Validation loss (ends of cycles): [0.38505399 0.08536232 0.0405276 ]
------------------------------
Epoch: 27
Training loss: 0.06380176991224289 | Validation loss: 0.07026810944080353
Validation loss (ends of cycles): [0.38505399 0.08536232 0.0405276 ]
------------------------------
Epoch: 28
Training loss: 0.06431049816310405 | Validation loss: 0.0371607169508934
Validation loss (ends of cycles): [0.38505399 0.08536232 0.0405276 ]
------------------------------
Epoch: 29
Training loss: 0.06408782787621022 | Validation loss: 0.04384800046682358
Validation loss (ends of cycles): [0.38505399 0.08536232 0.0405276 ]
------------------------------
Epoch: 30
Training loss: 0.05868135690689087 | Validation loss: 0.03950566053390503
Validation loss (ends of cycles): [0.38505399 0.08536232 0.0405276  0.03950566]
--------------------------------------------------------------------------------
Seed: 15
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.497423380613327 | Validation loss: 0.5236005187034607
Validation loss (ends of cycles): [0.52360052]
------------------------------
Epoch: 1
Training loss: 0.47451717203313654 | Validation loss: 0.5123611688613892
Validation loss (ends of cycles): [0.52360052]
------------------------------
Epoch: 2
Training loss: 0.4432302469556982 | Validation loss: 0.4904400110244751
Validation loss (ends of cycles): [0.52360052]
------------------------------
Epoch: 3
Training loss: 0.4015417017719962 | Validation loss: 0.4578700065612793
Validation loss (ends of cycles): [0.52360052]
------------------------------
Epoch: 4
Training loss: 0.34881053458560596 | Validation loss: 0.41512584686279297
Validation loss (ends of cycles): [0.52360052]
------------------------------
Epoch: 5
Training loss: 0.2884210998361761 | Validation loss: 0.37235116958618164
Validation loss (ends of cycles): [0.52360052]
------------------------------
Epoch: 6
Training loss: 0.23100726983763956 | Validation loss: 0.3235102891921997
Validation loss (ends of cycles): [0.52360052]
------------------------------
Epoch: 7
Training loss: 0.19382140717723154 | Validation loss: 0.24588394165039062
Validation loss (ends of cycles): [0.52360052]
------------------------------
Epoch: 8
Training loss: 0.16921459138393402 | Validation loss: 0.187604621052742
Validation loss (ends of cycles): [0.52360052]
------------------------------
Epoch: 9
Training loss: 0.15417007424614645 | Validation loss: 0.20316246151924133
Validation loss (ends of cycles): [0.52360052]
------------------------------
Epoch: 10
Training loss: 0.14732958579605276 | Validation loss: 0.17677535116672516
Validation loss (ends of cycles): [0.52360052 0.17677535]
------------------------------
Epoch: 11
Training loss: 0.1463459621776234 | Validation loss: 0.14159077405929565
Validation loss (ends of cycles): [0.52360052 0.17677535]
------------------------------
Epoch: 12
Training loss: 0.13318475064906207 | Validation loss: 0.181656152009964
Validation loss (ends of cycles): [0.52360052 0.17677535]
------------------------------
Epoch: 13
Training loss: 0.12301303310827776 | Validation loss: 0.10158068686723709
Validation loss (ends of cycles): [0.52360052 0.17677535]
------------------------------
Epoch: 14
Training loss: 0.10729351978410374 | Validation loss: 0.09514954686164856
Validation loss (ends of cycles): [0.52360052 0.17677535]
------------------------------
Epoch: 15
Training loss: 0.09595484286546707 | Validation loss: 0.08995315432548523
Validation loss (ends of cycles): [0.52360052 0.17677535]
------------------------------
Epoch: 16
Training loss: 0.08417292159389365 | Validation loss: 0.0888112485408783
Validation loss (ends of cycles): [0.52360052 0.17677535]
------------------------------
Epoch: 17
Training loss: 0.08165155740624125 | Validation loss: 0.0818200558423996
Validation loss (ends of cycles): [0.52360052 0.17677535]
------------------------------
Epoch: 18
Training loss: 0.0723276978189295 | Validation loss: 0.08610425889492035
Validation loss (ends of cycles): [0.52360052 0.17677535]
------------------------------
Epoch: 19
Training loss: 0.07191044092178345 | Validation loss: 0.08774658292531967
Validation loss (ends of cycles): [0.52360052 0.17677535]
------------------------------
Epoch: 20
Training loss: 0.06833597780628638 | Validation loss: 0.08297054469585419
Validation loss (ends of cycles): [0.52360052 0.17677535 0.08297054]
------------------------------
Epoch: 21
Training loss: 0.06698174876245586 | Validation loss: 0.07742486894130707
Validation loss (ends of cycles): [0.52360052 0.17677535 0.08297054]
------------------------------
Epoch: 22
Training loss: 0.06470589475198225 | Validation loss: 0.07569701969623566
Validation loss (ends of cycles): [0.52360052 0.17677535 0.08297054]
------------------------------
Epoch: 23
Training loss: 0.06563659706576304 | Validation loss: 0.090906523168087
Validation loss (ends of cycles): [0.52360052 0.17677535 0.08297054]
------------------------------
Epoch: 24
Training loss: 0.06569409421221777 | Validation loss: 0.08477133512496948
Validation loss (ends of cycles): [0.52360052 0.17677535 0.08297054]
------------------------------
Epoch: 25
Training loss: 0.06704503907398744 | Validation loss: 0.09006834775209427
Validation loss (ends of cycles): [0.52360052 0.17677535 0.08297054]
------------------------------
Epoch: 26
Training loss: 0.07161630249836227 | Validation loss: 0.07447994500398636
Validation loss (ends of cycles): [0.52360052 0.17677535 0.08297054]
------------------------------
Epoch: 27
Training loss: 0.06389857828617096 | Validation loss: 0.0818374902009964
Validation loss (ends of cycles): [0.52360052 0.17677535 0.08297054]
------------------------------
Epoch: 28
Training loss: 0.060700812631032684 | Validation loss: 0.06722551584243774
Validation loss (ends of cycles): [0.52360052 0.17677535 0.08297054]
------------------------------
Epoch: 29
Training loss: 0.05577064026147127 | Validation loss: 0.07123492658138275
Validation loss (ends of cycles): [0.52360052 0.17677535 0.08297054]
------------------------------
Epoch: 30
Training loss: 0.05637003786184571 | Validation loss: 0.0676247850060463
Validation loss (ends of cycles): [0.52360052 0.17677535 0.08297054 0.06762479]
--------------------------------------------------------------------------------
Seed: 16
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.47987140308726917 | Validation loss: 0.4334615617990494
Validation loss (ends of cycles): [0.43346156]
------------------------------
Epoch: 1
Training loss: 0.45977337793870404 | Validation loss: 0.42151939868927
Validation loss (ends of cycles): [0.43346156]
------------------------------
Epoch: 2
Training loss: 0.42812349579551 | Validation loss: 0.39918772876262665
Validation loss (ends of cycles): [0.43346156]
------------------------------
Epoch: 3
Training loss: 0.3865386030890725 | Validation loss: 0.3691086918115616
Validation loss (ends of cycles): [0.43346156]
------------------------------
Epoch: 4
Training loss: 0.33454837040467694 | Validation loss: 0.3325386643409729
Validation loss (ends of cycles): [0.43346156]
------------------------------
Epoch: 5
Training loss: 0.27320032634518365 | Validation loss: 0.29823416471481323
Validation loss (ends of cycles): [0.43346156]
------------------------------
Epoch: 6
Training loss: 0.22226029634475708 | Validation loss: 0.2558329254388809
Validation loss (ends of cycles): [0.43346156]
------------------------------
Epoch: 7
Training loss: 0.18792442774230783 | Validation loss: 0.22173385322093964
Validation loss (ends of cycles): [0.43346156]
------------------------------
Epoch: 8
Training loss: 0.16651706397533417 | Validation loss: 0.18216048181056976
Validation loss (ends of cycles): [0.43346156]
------------------------------
Epoch: 9
Training loss: 0.1563743305477229 | Validation loss: 0.15323598682880402
Validation loss (ends of cycles): [0.43346156]
------------------------------
Epoch: 10
Training loss: 0.15220273286104202 | Validation loss: 0.14952246099710464
Validation loss (ends of cycles): [0.43346156 0.14952246]
------------------------------
Epoch: 11
Training loss: 0.14907611906528473 | Validation loss: 0.15525143593549728
Validation loss (ends of cycles): [0.43346156 0.14952246]
------------------------------
Epoch: 12
Training loss: 0.14079013602300125 | Validation loss: 0.12134148925542831
Validation loss (ends of cycles): [0.43346156 0.14952246]
------------------------------
Epoch: 13
Training loss: 0.12847822565924039 | Validation loss: 0.15095221251249313
Validation loss (ends of cycles): [0.43346156 0.14952246]
------------------------------
Epoch: 14
Training loss: 0.11500959301536734 | Validation loss: 0.09931726008653641
Validation loss (ends of cycles): [0.43346156 0.14952246]
------------------------------
Epoch: 15
Training loss: 0.10405388474464417 | Validation loss: 0.12409628182649612
Validation loss (ends of cycles): [0.43346156 0.14952246]
------------------------------
Epoch: 16
Training loss: 0.10094385120001706 | Validation loss: 0.10372162610292435
Validation loss (ends of cycles): [0.43346156 0.14952246]
------------------------------
Epoch: 17
Training loss: 0.09531495787880638 | Validation loss: 0.09211777150630951
Validation loss (ends of cycles): [0.43346156 0.14952246]
------------------------------
Epoch: 18
Training loss: 0.08857555281032216 | Validation loss: 0.09498467668890953
Validation loss (ends of cycles): [0.43346156 0.14952246]
------------------------------
Epoch: 19
Training loss: 0.0935635573484681 | Validation loss: 0.09667631611227989
Validation loss (ends of cycles): [0.43346156 0.14952246]
------------------------------
Epoch: 20
Training loss: 0.08716208182952621 | Validation loss: 0.09361663460731506
Validation loss (ends of cycles): [0.43346156 0.14952246 0.09361663]
------------------------------
Epoch: 21
Training loss: 0.086543853987347 | Validation loss: 0.0879531130194664
Validation loss (ends of cycles): [0.43346156 0.14952246 0.09361663]
------------------------------
Epoch: 22
Training loss: 0.08200569958849387 | Validation loss: 0.08755558729171753
Validation loss (ends of cycles): [0.43346156 0.14952246 0.09361663]
------------------------------
Epoch: 23
Training loss: 0.085517230020328 | Validation loss: 0.08203435316681862
Validation loss (ends of cycles): [0.43346156 0.14952246 0.09361663]
------------------------------
Epoch: 24
Training loss: 0.0809919996695085 | Validation loss: 0.07936260104179382
Validation loss (ends of cycles): [0.43346156 0.14952246 0.09361663]
------------------------------
Epoch: 25
Training loss: 0.08055772056633775 | Validation loss: 0.08800634741783142
Validation loss (ends of cycles): [0.43346156 0.14952246 0.09361663]
------------------------------
Epoch: 26
Training loss: 0.07741718976335092 | Validation loss: 0.08297253772616386
Validation loss (ends of cycles): [0.43346156 0.14952246 0.09361663]
------------------------------
Epoch: 27
Training loss: 0.07764130017974159 | Validation loss: 0.07773812860250473
Validation loss (ends of cycles): [0.43346156 0.14952246 0.09361663]
------------------------------
Epoch: 28
Training loss: 0.07542369074442169 | Validation loss: 0.0761253871023655
Validation loss (ends of cycles): [0.43346156 0.14952246 0.09361663]
------------------------------
Epoch: 29
Training loss: 0.07147530669515784 | Validation loss: 0.07653047516942024
Validation loss (ends of cycles): [0.43346156 0.14952246 0.09361663]
------------------------------
Epoch: 30
Training loss: 0.07272762432694435 | Validation loss: 0.07702647522091866
Validation loss (ends of cycles): [0.43346156 0.14952246 0.09361663 0.07702648]
--------------------------------------------------------------------------------
Seed: 17
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.5610803931951522 | Validation loss: 0.5742125511169434
Validation loss (ends of cycles): [0.57421255]
------------------------------
Epoch: 1
Training loss: 0.5384890228509903 | Validation loss: 0.5626022219657898
Validation loss (ends of cycles): [0.57421255]
------------------------------
Epoch: 2
Training loss: 0.5053218245506287 | Validation loss: 0.5410920977592468
Validation loss (ends of cycles): [0.57421255]
------------------------------
Epoch: 3
Training loss: 0.4658749431371689 | Validation loss: 0.5129314661026001
Validation loss (ends of cycles): [0.57421255]
------------------------------
Epoch: 4
Training loss: 0.4172980934381485 | Validation loss: 0.4840275049209595
Validation loss (ends of cycles): [0.57421255]
------------------------------
Epoch: 5
Training loss: 0.3584092140197754 | Validation loss: 0.45695602893829346
Validation loss (ends of cycles): [0.57421255]
------------------------------
Epoch: 6
Training loss: 0.3014706075191498 | Validation loss: 0.43395182490348816
Validation loss (ends of cycles): [0.57421255]
------------------------------
Epoch: 7
Training loss: 0.26016793251037595 | Validation loss: 0.3845180869102478
Validation loss (ends of cycles): [0.57421255]
------------------------------
Epoch: 8
Training loss: 0.23394290506839752 | Validation loss: 0.3097478747367859
Validation loss (ends of cycles): [0.57421255]
------------------------------
Epoch: 9
Training loss: 0.21567025035619736 | Validation loss: 0.23616959154605865
Validation loss (ends of cycles): [0.57421255]
------------------------------
Epoch: 10
Training loss: 0.20982832312583924 | Validation loss: 0.19686532020568848
Validation loss (ends of cycles): [0.57421255 0.19686532]
------------------------------
Epoch: 11
Training loss: 0.20462629497051238 | Validation loss: 0.1726258248090744
Validation loss (ends of cycles): [0.57421255 0.19686532]
------------------------------
Epoch: 12
Training loss: 0.19207933992147447 | Validation loss: 0.17744328081607819
Validation loss (ends of cycles): [0.57421255 0.19686532]
------------------------------
Epoch: 13
Training loss: 0.17268036156892777 | Validation loss: 0.12105994671583176
Validation loss (ends of cycles): [0.57421255 0.19686532]
------------------------------
Epoch: 14
Training loss: 0.14910297393798827 | Validation loss: 0.07750903069972992
Validation loss (ends of cycles): [0.57421255 0.19686532]
------------------------------
Epoch: 15
Training loss: 0.12406309843063354 | Validation loss: 0.06648653000593185
Validation loss (ends of cycles): [0.57421255 0.19686532]
------------------------------
Epoch: 16
Training loss: 0.1091826967895031 | Validation loss: 0.051474422216415405
Validation loss (ends of cycles): [0.57421255 0.19686532]
------------------------------
Epoch: 17
Training loss: 0.09453097954392434 | Validation loss: 0.09065815806388855
Validation loss (ends of cycles): [0.57421255 0.19686532]
------------------------------
Epoch: 18
Training loss: 0.08696112185716628 | Validation loss: 0.05250278487801552
Validation loss (ends of cycles): [0.57421255 0.19686532]
------------------------------
Epoch: 19
Training loss: 0.08315690755844116 | Validation loss: 0.047353047877550125
Validation loss (ends of cycles): [0.57421255 0.19686532]
------------------------------
Epoch: 20
Training loss: 0.08215439356863499 | Validation loss: 0.04667212441563606
Validation loss (ends of cycles): [0.57421255 0.19686532 0.04667212]
------------------------------
Epoch: 21
Training loss: 0.08283475637435914 | Validation loss: 0.04465808719396591
Validation loss (ends of cycles): [0.57421255 0.19686532 0.04667212]
------------------------------
Epoch: 22
Training loss: 0.07634258382022381 | Validation loss: 0.05145259574055672
Validation loss (ends of cycles): [0.57421255 0.19686532 0.04667212]
------------------------------
Epoch: 23
Training loss: 0.07885098718106746 | Validation loss: 0.055073946714401245
Validation loss (ends of cycles): [0.57421255 0.19686532 0.04667212]
------------------------------
Epoch: 24
Training loss: 0.07556320875883102 | Validation loss: 0.0855465829372406
Validation loss (ends of cycles): [0.57421255 0.19686532 0.04667212]
------------------------------
Epoch: 25
Training loss: 0.07224686443805695 | Validation loss: 0.0590750053524971
Validation loss (ends of cycles): [0.57421255 0.19686532 0.04667212]
------------------------------
Epoch: 26
Training loss: 0.07677309662103653 | Validation loss: 0.04134538397192955
Validation loss (ends of cycles): [0.57421255 0.19686532 0.04667212]
------------------------------
Epoch: 27
Training loss: 0.06744715496897698 | Validation loss: 0.03748737648129463
Validation loss (ends of cycles): [0.57421255 0.19686532 0.04667212]
------------------------------
Epoch: 28
Training loss: 0.06501720324158669 | Validation loss: 0.03147929161787033
Validation loss (ends of cycles): [0.57421255 0.19686532 0.04667212]
------------------------------
Epoch: 29
Training loss: 0.06560679152607918 | Validation loss: 0.031396884471178055
Validation loss (ends of cycles): [0.57421255 0.19686532 0.04667212]
------------------------------
Epoch: 30
Training loss: 0.06533972807228565 | Validation loss: 0.027407711371779442
Validation loss (ends of cycles): [0.57421255 0.19686532 0.04667212 0.02740771]
--------------------------------------------------------------------------------
Seed: 18
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.5691605454141443 | Validation loss: 0.608923077583313
Validation loss (ends of cycles): [0.60892308]
------------------------------
Epoch: 1
Training loss: 0.5476431304758246 | Validation loss: 0.6018098592758179
Validation loss (ends of cycles): [0.60892308]
------------------------------
Epoch: 2
Training loss: 0.5234504071148959 | Validation loss: 0.589514970779419
Validation loss (ends of cycles): [0.60892308]
------------------------------
Epoch: 3
Training loss: 0.49886292490092193 | Validation loss: 0.5730634927749634
Validation loss (ends of cycles): [0.60892308]
------------------------------
Epoch: 4
Training loss: 0.46711285276846454 | Validation loss: 0.5529573559761047
Validation loss (ends of cycles): [0.60892308]
------------------------------
Epoch: 5
Training loss: 0.42809410799633374 | Validation loss: 0.5257182121276855
Validation loss (ends of cycles): [0.60892308]
------------------------------
Epoch: 6
Training loss: 0.387255614454096 | Validation loss: 0.49099910259246826
Validation loss (ends of cycles): [0.60892308]
------------------------------
Epoch: 7
Training loss: 0.35546800223263825 | Validation loss: 0.43925637006759644
Validation loss (ends of cycles): [0.60892308]
------------------------------
Epoch: 8
Training loss: 0.3322257250547409 | Validation loss: 0.4052078127861023
Validation loss (ends of cycles): [0.60892308]
------------------------------
Epoch: 9
Training loss: 0.3180681358684193 | Validation loss: 0.38430100679397583
Validation loss (ends of cycles): [0.60892308]
------------------------------
Epoch: 10
Training loss: 0.31182124533436517 | Validation loss: 0.37427014112472534
Validation loss (ends of cycles): [0.60892308 0.37427014]
------------------------------
Epoch: 11
Training loss: 0.30706333301284094 | Validation loss: 0.35669952630996704
Validation loss (ends of cycles): [0.60892308 0.37427014]
------------------------------
Epoch: 12
Training loss: 0.29335934736511926 | Validation loss: 0.3390655219554901
Validation loss (ends of cycles): [0.60892308 0.37427014]
------------------------------
Epoch: 13
Training loss: 0.27410248057408765 | Validation loss: 0.31075388193130493
Validation loss (ends of cycles): [0.60892308 0.37427014]
------------------------------
Epoch: 14
Training loss: 0.2465149394490502 | Validation loss: 0.2842131555080414
Validation loss (ends of cycles): [0.60892308 0.37427014]
------------------------------
Epoch: 15
Training loss: 0.2152640623125163 | Validation loss: 0.22721347212791443
Validation loss (ends of cycles): [0.60892308 0.37427014]
------------------------------
Epoch: 16
Training loss: 0.18463559042323718 | Validation loss: 0.24249565601348877
Validation loss (ends of cycles): [0.60892308 0.37427014]
------------------------------
Epoch: 17
Training loss: 0.16122723370790482 | Validation loss: 0.15358808636665344
Validation loss (ends of cycles): [0.60892308 0.37427014]
------------------------------
Epoch: 18
Training loss: 0.14679778096350757 | Validation loss: 0.17089605331420898
Validation loss (ends of cycles): [0.60892308 0.37427014]
------------------------------
Epoch: 19
Training loss: 0.13611172512173653 | Validation loss: 0.15524759888648987
Validation loss (ends of cycles): [0.60892308 0.37427014]
------------------------------
Epoch: 20
Training loss: 0.13251594386317514 | Validation loss: 0.17342492938041687
Validation loss (ends of cycles): [0.60892308 0.37427014 0.17342493]
------------------------------
Epoch: 21
Training loss: 0.12898269871419127 | Validation loss: 0.16011351346969604
Validation loss (ends of cycles): [0.60892308 0.37427014 0.17342493]
------------------------------
Epoch: 22
Training loss: 0.12305541607466611 | Validation loss: 0.1949668526649475
Validation loss (ends of cycles): [0.60892308 0.37427014 0.17342493]
------------------------------
Epoch: 23
Training loss: 0.11235611580989578 | Validation loss: 0.10236149281263351
Validation loss (ends of cycles): [0.60892308 0.37427014 0.17342493]
------------------------------
Epoch: 24
Training loss: 0.10381116582588716 | Validation loss: 0.13342885673046112
Validation loss (ends of cycles): [0.60892308 0.37427014 0.17342493]
------------------------------
Epoch: 25
Training loss: 0.08670141505585476 | Validation loss: 0.09188296645879745
Validation loss (ends of cycles): [0.60892308 0.37427014 0.17342493]
------------------------------
Epoch: 26
Training loss: 0.08180331498045813 | Validation loss: 0.12372232973575592
Validation loss (ends of cycles): [0.60892308 0.37427014 0.17342493]
------------------------------
Epoch: 27
Training loss: 0.07200923278419809 | Validation loss: 0.07592625916004181
Validation loss (ends of cycles): [0.60892308 0.37427014 0.17342493]
------------------------------
Epoch: 28
Training loss: 0.066264333457432 | Validation loss: 0.0874938890337944
Validation loss (ends of cycles): [0.60892308 0.37427014 0.17342493]
------------------------------
Epoch: 29
Training loss: 0.06131140947003256 | Validation loss: 0.07372075319290161
Validation loss (ends of cycles): [0.60892308 0.37427014 0.17342493]
------------------------------
Epoch: 30
Training loss: 0.06025365598245778 | Validation loss: 0.07412300258874893
Validation loss (ends of cycles): [0.60892308 0.37427014 0.17342493 0.074123  ]
--------------------------------------------------------------------------------
Seed: 19
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.5367445856332779 | Validation loss: 0.4483487606048584
Validation loss (ends of cycles): [0.44834876]
------------------------------
Epoch: 1
Training loss: 0.5206190437078476 | Validation loss: 0.44038158655166626
Validation loss (ends of cycles): [0.44834876]
------------------------------
Epoch: 2
Training loss: 0.4976606220006943 | Validation loss: 0.4257300943136215
Validation loss (ends of cycles): [0.44834876]
------------------------------
Epoch: 3
Training loss: 0.4683428943157196 | Validation loss: 0.4045237749814987
Validation loss (ends of cycles): [0.44834876]
------------------------------
Epoch: 4
Training loss: 0.43049020171165464 | Validation loss: 0.382641464471817
Validation loss (ends of cycles): [0.44834876]
------------------------------
Epoch: 5
Training loss: 0.3816151052713394 | Validation loss: 0.35842086374759674
Validation loss (ends of cycles): [0.44834876]
------------------------------
Epoch: 6
Training loss: 0.3326111823320389 | Validation loss: 0.3364621549844742
Validation loss (ends of cycles): [0.44834876]
------------------------------
Epoch: 7
Training loss: 0.29561977237463 | Validation loss: 0.318710595369339
Validation loss (ends of cycles): [0.44834876]
------------------------------
Epoch: 8
Training loss: 0.26997964084148407 | Validation loss: 0.2970084324479103
Validation loss (ends of cycles): [0.44834876]
------------------------------
Epoch: 9
Training loss: 0.2537916973233223 | Validation loss: 0.27319707721471786
Validation loss (ends of cycles): [0.44834876]
------------------------------
Epoch: 10
Training loss: 0.24661508649587632 | Validation loss: 0.25705454498529434
Validation loss (ends of cycles): [0.44834876 0.25705454]
------------------------------
Epoch: 11
Training loss: 0.2426185429096222 | Validation loss: 0.22602836042642593
Validation loss (ends of cycles): [0.44834876 0.25705454]
------------------------------
Epoch: 12
Training loss: 0.22818358689546586 | Validation loss: 0.2005477249622345
Validation loss (ends of cycles): [0.44834876 0.25705454]
------------------------------
Epoch: 13
Training loss: 0.2082375779747963 | Validation loss: 0.19975723326206207
Validation loss (ends of cycles): [0.44834876 0.25705454]
------------------------------
Epoch: 14
Training loss: 0.1813648521900177 | Validation loss: 0.1895284503698349
Validation loss (ends of cycles): [0.44834876 0.25705454]
------------------------------
Epoch: 15
Training loss: 0.15327624678611756 | Validation loss: 0.1219119057059288
Validation loss (ends of cycles): [0.44834876 0.25705454]
------------------------------
Epoch: 16
Training loss: 0.12782533541321756 | Validation loss: 0.062152622267603874
Validation loss (ends of cycles): [0.44834876 0.25705454]
------------------------------
Epoch: 17
Training loss: 0.10957009494304656 | Validation loss: 0.1485811546444893
Validation loss (ends of cycles): [0.44834876 0.25705454]
------------------------------
Epoch: 18
Training loss: 0.0981064923107624 | Validation loss: 0.06758278980851173
Validation loss (ends of cycles): [0.44834876 0.25705454]
------------------------------
Epoch: 19
Training loss: 0.0926601156592369 | Validation loss: 0.09994453191757202
Validation loss (ends of cycles): [0.44834876 0.25705454]
------------------------------
Epoch: 20
Training loss: 0.08913373351097106 | Validation loss: 0.1018301472067833
Validation loss (ends of cycles): [0.44834876 0.25705454 0.10183015]
------------------------------
Epoch: 21
Training loss: 0.086388049274683 | Validation loss: 0.08931709080934525
Validation loss (ends of cycles): [0.44834876 0.25705454 0.10183015]
------------------------------
Epoch: 22
Training loss: 0.08263651877641678 | Validation loss: 0.1043701171875
Validation loss (ends of cycles): [0.44834876 0.25705454 0.10183015]
------------------------------
Epoch: 23
Training loss: 0.07813577800989151 | Validation loss: 0.061198340728878975
Validation loss (ends of cycles): [0.44834876 0.25705454 0.10183015]
------------------------------
Epoch: 24
Training loss: 0.07279523089528084 | Validation loss: 0.0668635182082653
Validation loss (ends of cycles): [0.44834876 0.25705454 0.10183015]
------------------------------
Epoch: 25
Training loss: 0.06805610843002796 | Validation loss: 0.5645378977060318
Validation loss (ends of cycles): [0.44834876 0.25705454 0.10183015]
------------------------------
Epoch: 26
Training loss: 0.06686716079711914 | Validation loss: 0.06682540383189917
Validation loss (ends of cycles): [0.44834876 0.25705454 0.10183015]
------------------------------
Epoch: 27
Training loss: 0.059197721630334856 | Validation loss: 0.0642244964838028
Validation loss (ends of cycles): [0.44834876 0.25705454 0.10183015]
------------------------------
Epoch: 28
Training loss: 0.06171116679906845 | Validation loss: 0.2145145758986473
Validation loss (ends of cycles): [0.44834876 0.25705454 0.10183015]
------------------------------
Epoch: 29
Training loss: 0.056293224170804027 | Validation loss: 0.05585295893251896
Validation loss (ends of cycles): [0.44834876 0.25705454 0.10183015]
------------------------------
Epoch: 30
Training loss: 0.0526149595156312 | Validation loss: 0.053398482501506805
Validation loss (ends of cycles): [0.44834876 0.25705454 0.10183015 0.05339848]
src_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/gelisols/models')
order = 12
seeds = range(20)
learners = Learners(Model, tax_lookup, seeds=seeds, device=device)
perfs_local_gelisols, _, _, _ = learners.evaluate((X, y, depth_order[:, -1]),
                                                  order = order,
                                                  src_dir_model=src_dir_model)

perfs_local_gelisols.describe()
rpd rpiq r2 lccc rmse mse mae mape bias stb
count 18.000000 18.000000 18.000000 18.000000 18.000000 18.000000 18.000000 18.000000 18.000000 18.000000
mean 1.887493 2.754606 0.691143 0.805993 0.657956 0.461482 0.346751 51.508686 -0.009657 -0.016915
std 0.330355 0.728265 0.088094 0.057573 0.173942 0.263369 0.094363 12.377707 0.048598 0.078858
min 1.547348 1.282034 0.572637 0.714614 0.376275 0.141583 0.181809 34.942499 -0.114649 -0.215123
25% 1.642766 2.256554 0.619215 0.753152 0.574179 0.329684 0.303541 43.296409 -0.029528 -0.041320
50% 1.817055 2.608120 0.690513 0.805246 0.617902 0.381848 0.335921 47.255385 -0.001811 -0.002683
75% 2.033550 3.186851 0.752098 0.844316 0.728023 0.530550 0.386050 61.034292 0.016324 0.030791
max 2.791070 4.049294 0.867742 0.920004 1.118009 1.249944 0.565283 73.572367 0.079008 0.092876

Train and test on Vertisols

# Replace following Paths with yours
dest_dir_loss = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/vertisols/losses')
dest_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/vertisols/models')

order = 10
seeds = range(20) 
n_epochs = 31
learners = Learners(Model, tax_lookup, seeds=seeds, device=device)
learners.train((X, y, depth_order[:, -1]), 
               order=order,
               dest_dir_loss=dest_dir_loss,
               dest_dir_model=dest_dir_model,
               n_epochs=n_epochs,
               sc_kwargs=params_scheduler)
--------------------------------------------------------------------------------
Seed: 0
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.161204216511626 | Validation loss: 0.1679159700870514
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 1
Training loss: 0.15437143099935433 | Validation loss: 0.15762153267860413
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 2
Training loss: 0.13889641942162262 | Validation loss: 0.13728273659944534
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 3
Training loss: 0.11834369207683362 | Validation loss: 0.10279234126210213
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 4
Training loss: 0.09297808573434227 | Validation loss: 0.09408992528915405
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 5
Training loss: 0.07264437330396552 | Validation loss: 0.055685702711343765
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 6
Training loss: 0.06562904072435279 | Validation loss: 0.08587116375565529
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 7
Training loss: 0.055855809269767055 | Validation loss: 0.0615625474601984
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 8
Training loss: 0.05285423092151943 | Validation loss: 0.05325787328183651
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 9
Training loss: 0.05118850050003905 | Validation loss: 0.06249404326081276
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 10
Training loss: 0.04790492297003144 | Validation loss: 0.05141059495508671
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 11
Training loss: 0.04692842046681203 | Validation loss: 0.05575957149267197
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 12
Training loss: 0.04742170605612429 | Validation loss: 0.048377299681305885
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 13
Training loss: 0.045442073164801845 | Validation loss: 0.05056057125329971
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 14
Training loss: 0.049288692442994365 | Validation loss: 0.045949578285217285
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 15
Training loss: 0.04942320875431362 | Validation loss: 0.046182602643966675
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 16
Training loss: 0.048454557790568 | Validation loss: 0.061180008575320244
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 17
Training loss: 0.04570011774960317 | Validation loss: 0.05048673413693905
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 18
Training loss: 0.04413507094508723 | Validation loss: 0.049398086965084076
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 19
Training loss: 0.03897152105836492 | Validation loss: 0.04086455702781677
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 20
Training loss: 0.03762085471106203 | Validation loss: 0.04161454364657402
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 21
Training loss: 0.03897195544682051 | Validation loss: 0.04463193938136101
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 22
Training loss: 0.03908969050175265 | Validation loss: 0.047162629663944244
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 23
Training loss: 0.03908744160281984 | Validation loss: 0.05538894981145859
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 24
Training loss: 0.03990407954705389 | Validation loss: 0.04268927872180939
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 25
Training loss: 0.03997622842067167 | Validation loss: 0.045263996347784996
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 26
Training loss: 0.04153041404328848 | Validation loss: 0.042628781870007515
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 27
Training loss: 0.034978562671887245 | Validation loss: 0.043797941878437996
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 28
Training loss: 0.03331961276891984 | Validation loss: 0.038327883929014206
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 29
Training loss: 0.03273361440944044 | Validation loss: 0.03366365935653448
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 30
Training loss: 0.029028360584848804 | Validation loss: 0.0335803534835577
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454 0.03358035]
--------------------------------------------------------------------------------
Seed: 1
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08880109026243813 | Validation loss: 0.08211258985102177
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 1
Training loss: 0.08559589107569895 | Validation loss: 0.07982487976551056
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 2
Training loss: 0.08120929841932498 | Validation loss: 0.07661886140704155
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 3
Training loss: 0.07261521545679946 | Validation loss: 0.07628549262881279
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 4
Training loss: 0.06738892393676858 | Validation loss: 0.06264040432870388
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 5
Training loss: 0.06283781206921528 | Validation loss: 0.05332220159471035
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 6
Training loss: 0.0563666181344735 | Validation loss: 0.052990976721048355
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 7
Training loss: 0.053898568314157035 | Validation loss: 0.05359513498842716
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 8
Training loss: 0.0495483853707188 | Validation loss: 0.04845046065747738
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 9
Training loss: 0.04790328227375683 | Validation loss: 0.04489264823496342
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 10
Training loss: 0.0470738457025666 | Validation loss: 0.044814372435212135
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 11
Training loss: 0.04557334945390099 | Validation loss: 0.045520488172769547
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 12
Training loss: 0.04488633739712991 | Validation loss: 0.045287614688277245
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 13
Training loss: 0.04274003678246548 | Validation loss: 0.05250200070440769
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 14
Training loss: 0.04469038634315917 | Validation loss: 0.06699041835963726
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 15
Training loss: 0.04659118973895123 | Validation loss: 0.0444390494376421
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 16
Training loss: 0.045118058786580435 | Validation loss: 0.04487036541104317
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 17
Training loss: 0.04162646614407238 | Validation loss: 0.04729745723307133
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 18
Training loss: 0.040547048084829986 | Validation loss: 0.039926101453602314
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 19
Training loss: 0.03623779441573118 | Validation loss: 0.03939523547887802
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 20
Training loss: 0.03616418807130111 | Validation loss: 0.03911025729030371
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 21
Training loss: 0.035900585745510305 | Validation loss: 0.03916540555655956
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 22
Training loss: 0.03519123988716226 | Validation loss: 0.03917317185550928
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 23
Training loss: 0.03432324999257138 | Validation loss: 0.04085913486778736
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 24
Training loss: 0.036748546616811505 | Validation loss: 0.041731780394911766
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 25
Training loss: 0.037517647111886425 | Validation loss: 0.09109430015087128
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 26
Training loss: 0.03717169126397685 | Validation loss: 0.04643022455275059
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 27
Training loss: 0.03365675114879483 | Validation loss: 0.039381884038448334
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 28
Training loss: 0.03359140867465421 | Validation loss: 0.035449360497295856
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 29
Training loss: 0.029328686920435804 | Validation loss: 0.03771654795855284
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 30
Training loss: 0.029815007001161575 | Validation loss: 0.035608227364718914
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026 0.03560823]
--------------------------------------------------------------------------------
Seed: 2
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08840935884250535 | Validation loss: 0.08210575208067894
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 1
Training loss: 0.08507650593916576 | Validation loss: 0.08108918741345406
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 2
Training loss: 0.07737381735609637 | Validation loss: 0.0804343322912852
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 3
Training loss: 0.06796439374900526 | Validation loss: 0.08102053900559743
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 4
Training loss: 0.05701384869300657 | Validation loss: 0.06451516598463058
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 5
Training loss: 0.04790849404202567 | Validation loss: 0.04559866711497307
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 6
Training loss: 0.0425061976744069 | Validation loss: 0.05457633485396703
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 7
Training loss: 0.03917014981723494 | Validation loss: 0.10548477371533711
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 8
Training loss: 0.03815334942191839 | Validation loss: 0.07415188476443291
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 9
Training loss: 0.03634079255991512 | Validation loss: 0.03436201065778732
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 10
Training loss: 0.03238243547578653 | Validation loss: 0.03496560640633106
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 11
Training loss: 0.031512254331674844 | Validation loss: 0.03387966255346934
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 12
Training loss: 0.03323024997694625 | Validation loss: 0.06266245618462563
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 13
Training loss: 0.03258724557235837 | Validation loss: 0.07572312156359355
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 14
Training loss: 0.03392490858419074 | Validation loss: 0.08760666350523631
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 15
Training loss: 0.032957878481182784 | Validation loss: 0.06539637347062428
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 16
Training loss: 0.03240760095003578 | Validation loss: 0.030444981530308723
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 17
Training loss: 0.030572137277987268 | Validation loss: 0.028518366316954296
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 18
Training loss: 0.02843097411096096 | Validation loss: 0.028718551620841026
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 19
Training loss: 0.025377622495094936 | Validation loss: 0.03340643892685572
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 20
Training loss: 0.02524500247091055 | Validation loss: 0.027380989864468575
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 21
Training loss: 0.025558336534433894 | Validation loss: 0.026460225383440655
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 22
Training loss: 0.023630426679220464 | Validation loss: 0.028433510412772495
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 23
Training loss: 0.02497979895108276 | Validation loss: 0.032619635264078774
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 24
Training loss: 0.026406725351181295 | Validation loss: 0.035625407472252846
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 25
Training loss: 0.026628990140226152 | Validation loss: 0.12885981798171997
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 26
Training loss: 0.026922656533618767 | Validation loss: 0.055682502686977386
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 27
Training loss: 0.025312546226713393 | Validation loss: 0.04670518139998118
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 28
Training loss: 0.023591533665441804 | Validation loss: 0.075415700674057
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 29
Training loss: 0.02143574645742774 | Validation loss: 0.021575671931107838
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 30
Training loss: 0.019714292811436787 | Validation loss: 0.024841646663844585
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099 0.02484165]
--------------------------------------------------------------------------------
Seed: 3
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08366356927313302 | Validation loss: 0.06628272930781047
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 1
Training loss: 0.08258866577556259 | Validation loss: 0.06598960359891255
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 2
Training loss: 0.08011062364829213 | Validation loss: 0.06656766682863235
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 3
Training loss: 0.07609734664622106 | Validation loss: 0.07429493218660355
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 4
Training loss: 0.06887183612898777 | Validation loss: 0.08531180272499721
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 5
Training loss: 0.062297113827968896 | Validation loss: 0.05507988358537356
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 6
Training loss: 0.057449167300211754 | Validation loss: 0.05169703687230746
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 7
Training loss: 0.05063312598749211 | Validation loss: 0.044730848322312035
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 8
Training loss: 0.04662537633588439 | Validation loss: 0.05200311293204626
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 9
Training loss: 0.042545238313706296 | Validation loss: 0.040968768298625946
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 10
Training loss: 0.040791098518591175 | Validation loss: 0.04076941559712092
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 11
Training loss: 0.03988300430539407 | Validation loss: 0.04062818984190623
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 12
Training loss: 0.041134035116747805 | Validation loss: 0.06347007056077321
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 13
Training loss: 0.040732931933904946 | Validation loss: 0.053751084953546524
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 14
Training loss: 0.0412690176775581 | Validation loss: 0.04342729101578394
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 15
Training loss: 0.04279994739121512 | Validation loss: 0.03779313713312149
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 16
Training loss: 0.04107574108791979 | Validation loss: 0.05226917316516241
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 17
Training loss: 0.03682027963039122 | Validation loss: 0.04407886415719986
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 18
Training loss: 0.03602115907951405 | Validation loss: 0.0353589312483867
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 19
Training loss: 0.03072228841483593 | Validation loss: 0.03361495025455952
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 20
Training loss: 0.03127634593922841 | Validation loss: 0.03227363092203935
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 21
Training loss: 0.030668180729997784 | Validation loss: 0.03339084858695666
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 22
Training loss: 0.03049330334914358 | Validation loss: 0.033799403036634125
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 23
Training loss: 0.030560468372545745 | Validation loss: 0.052750845750172935
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 24
Training loss: 0.032240519398137144 | Validation loss: 0.11552038788795471
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 25
Training loss: 0.03203717256455045 | Validation loss: 0.03324045240879059
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 26
Training loss: 0.03040875906222745 | Validation loss: 0.04665656387805939
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 27
Training loss: 0.029416492551957305 | Validation loss: 0.04413521351913611
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 28
Training loss: 0.029010920001095848 | Validation loss: 0.03647958238919576
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 29
Training loss: 0.02751604928389976 | Validation loss: 0.050870560109615326
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 30
Training loss: 0.02429521475967608 | Validation loss: 0.028451986610889435
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363 0.02845199]
--------------------------------------------------------------------------------
Seed: 4
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.19473344401309364 | Validation loss: 0.15656746923923492
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 1
Training loss: 0.18613588496258385 | Validation loss: 0.14746378362178802
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 2
Training loss: 0.16706412207139165 | Validation loss: 0.12539705261588097
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 3
Training loss: 0.13910072571352908 | Validation loss: 0.08593828231096268
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 4
Training loss: 0.10771784460858295 | Validation loss: 0.06520361453294754
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 5
Training loss: 0.07648744355691106 | Validation loss: 0.07435402646660805
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 6
Training loss: 0.06046923661702558 | Validation loss: 0.08778238669037819
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 7
Training loss: 0.05391533515955273 | Validation loss: 0.06520635634660721
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 8
Training loss: 0.04810122106420366 | Validation loss: 0.06413957849144936
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 9
Training loss: 0.044574140816142686 | Validation loss: 0.045684026554226875
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 10
Training loss: 0.045094708470921764 | Validation loss: 0.04497688636183739
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 11
Training loss: 0.043865959503148734 | Validation loss: 0.04489790461957455
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 12
Training loss: 0.044169505273825245 | Validation loss: 0.04815280996263027
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 13
Training loss: 0.04281844169293579 | Validation loss: 0.08249081298708916
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 14
Training loss: 0.043214576024758186 | Validation loss: 0.05961386486887932
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 15
Training loss: 0.042521203917108084 | Validation loss: 0.0713415015488863
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 16
Training loss: 0.0419846284938486 | Validation loss: 0.07541047409176826
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 17
Training loss: 0.03957709375964968 | Validation loss: 0.05406338535249233
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 18
Training loss: 0.035520012049298534 | Validation loss: 0.10436020791530609
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 19
Training loss: 0.034939819456715336 | Validation loss: 0.03864527679979801
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 20
Training loss: 0.03136764094233513 | Validation loss: 0.03485617786645889
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 21
Training loss: 0.031533444790463695 | Validation loss: 0.032069167122244835
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 22
Training loss: 0.03128002505553396 | Validation loss: 0.03990967012941837
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 23
Training loss: 0.032222791231776536 | Validation loss: 0.06194067373871803
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 24
Training loss: 0.030879733495806392 | Validation loss: 0.04783654771745205
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 25
Training loss: 0.034811057640533695 | Validation loss: 0.19700831919908524
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 26
Training loss: 0.03208483088957636 | Validation loss: 0.04298360459506512
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 27
Training loss: 0.02921375377397788 | Validation loss: 0.04345952346920967
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 28
Training loss: 0.02759896424648009 | Validation loss: 0.030260787345468998
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 29
Training loss: 0.02629519714728782 | Validation loss: 0.04085123725235462
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 30
Training loss: 0.02536522258857363 | Validation loss: 0.028398994356393814
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618 0.02839899]
--------------------------------------------------------------------------------
Seed: 5
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.09135209562049972 | Validation loss: 0.06946399062871933
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 1
Training loss: 0.0878267308904065 | Validation loss: 0.0675109475851059
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 2
Training loss: 0.07988109977708922 | Validation loss: 0.06493373587727547
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 3
Training loss: 0.0666891232960754 | Validation loss: 0.06385781802237034
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 4
Training loss: 0.05818599214156469 | Validation loss: 0.06167275831103325
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 5
Training loss: 0.05256716679367754 | Validation loss: 0.07637954130768776
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 6
Training loss: 0.049445088331898056 | Validation loss: 0.22612106055021286
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 7
Training loss: 0.045817383771969214 | Validation loss: 0.050643378868699074
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 8
Training loss: 0.040001612777511276 | Validation loss: 0.0446147657930851
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 9
Training loss: 0.03829288575798273 | Validation loss: 0.04114661552011967
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 10
Training loss: 0.03613738570776251 | Validation loss: 0.040317755192518234
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 11
Training loss: 0.034891982562839985 | Validation loss: 0.039226071909070015
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 12
Training loss: 0.03366432442433304 | Validation loss: 0.046774642542004585
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 13
Training loss: 0.03640781891428762 | Validation loss: 0.04789281450212002
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 14
Training loss: 0.03705685358080599 | Validation loss: 0.05857366323471069
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 15
Training loss: 0.035437823894123234 | Validation loss: 0.07393408939242363
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 16
Training loss: 0.03706711303028795 | Validation loss: 0.06966803222894669
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 17
Training loss: 0.03401203018923601 | Validation loss: 0.04148573614656925
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 18
Training loss: 0.03279454085148043 | Validation loss: 0.03339186776429415
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 19
Training loss: 0.029390734827352896 | Validation loss: 0.03046796005219221
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 20
Training loss: 0.029600716920362577 | Validation loss: 0.03065457008779049
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 21
Training loss: 0.029213293352060847 | Validation loss: 0.031029099598526955
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 22
Training loss: 0.028610273264348507 | Validation loss: 0.030708318576216698
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 23
Training loss: 0.028793518121043842 | Validation loss: 0.046449968591332436
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 24
Training loss: 0.029298070300784376 | Validation loss: 0.04168690741062164
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 25
Training loss: 0.030492768364234105 | Validation loss: 0.0755428783595562
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 26
Training loss: 0.030982901031772297 | Validation loss: 0.030249490402638912
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 27
Training loss: 0.028873344521141715 | Validation loss: 0.03989887796342373
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 28
Training loss: 0.02943373481846518 | Validation loss: 0.029455197043716908
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 29
Training loss: 0.025847461229811113 | Validation loss: 0.03161353338509798
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 30
Training loss: 0.024220067593786452 | Validation loss: 0.027339047752320766
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457 0.02733905]
--------------------------------------------------------------------------------
Seed: 6
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08758864708636936 | Validation loss: 0.09045941010117531
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 1
Training loss: 0.08511891274860031 | Validation loss: 0.08673127368092537
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 2
Training loss: 0.0793429758203657 | Validation loss: 0.08125056326389313
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 3
Training loss: 0.07344777999739897 | Validation loss: 0.07784003764390945
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 4
Training loss: 0.06621623725483292 | Validation loss: 0.08580468967556953
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 5
Training loss: 0.06146987037439095 | Validation loss: 0.08310152217745781
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 6
Training loss: 0.05917700419300481 | Validation loss: 0.06960192322731018
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 7
Training loss: 0.054859228236110585 | Validation loss: 0.067630959674716
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 8
Training loss: 0.05065024114753071 | Validation loss: 0.055472830310463905
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 9
Training loss: 0.04813684611336181 | Validation loss: 0.05709882639348507
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 10
Training loss: 0.04541594505702194 | Validation loss: 0.053741781041026115
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 11
Training loss: 0.043925387784838676 | Validation loss: 0.055874619632959366
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 12
Training loss: 0.043821888142510465 | Validation loss: 0.05342106893658638
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 13
Training loss: 0.04354088537787136 | Validation loss: 0.05569586902856827
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 14
Training loss: 0.04596223790002497 | Validation loss: 0.08136944100260735
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 15
Training loss: 0.04429164842555398 | Validation loss: 0.056143974885344505
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 16
Training loss: 0.04604476484421052 | Validation loss: 0.058117739856243134
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 17
Training loss: 0.04353812356528483 | Validation loss: 0.06109851598739624
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 18
Training loss: 0.03982813224980706 | Validation loss: 0.04927295260131359
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 19
Training loss: 0.03627215433669718 | Validation loss: 0.047167809680104256
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 20
Training loss: 0.034992945233457966 | Validation loss: 0.047399308532476425
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 21
Training loss: 0.03621590147285085 | Validation loss: 0.04751015082001686
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 22
Training loss: 0.033725014152495486 | Validation loss: 0.047297827899456024
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 23
Training loss: 0.03643197546664037 | Validation loss: 0.045731207355856895
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 24
Training loss: 0.03781851704575514 | Validation loss: 0.04651406966149807
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 25
Training loss: 0.03732413926014775 | Validation loss: 0.04698087275028229
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 26
Training loss: 0.037296586522930546 | Validation loss: 0.04507102258503437
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 27
Training loss: 0.03556570381318268 | Validation loss: 0.042184218764305115
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 28
Training loss: 0.030846391461397473 | Validation loss: 0.04002702981233597
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 29
Training loss: 0.03090105475367684 | Validation loss: 0.038508640602231026
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 30
Training loss: 0.028801383156525463 | Validation loss: 0.03822813369333744
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931 0.03822813]
--------------------------------------------------------------------------------
Seed: 7
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08331045665239033 | Validation loss: 0.06520787129799525
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 1
Training loss: 0.08203087198106866 | Validation loss: 0.06532517820596695
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 2
Training loss: 0.08000684745217625 | Validation loss: 0.06518079092105229
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 3
Training loss: 0.0739791032515074 | Validation loss: 0.06506842374801636
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 4
Training loss: 0.06385869787711847 | Validation loss: 0.05250853920976321
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 5
Training loss: 0.057416989615089016 | Validation loss: 0.05340991293390592
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 6
Training loss: 0.052579465861383234 | Validation loss: 0.029369194293394685
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 7
Training loss: 0.04429109718062376 | Validation loss: 0.04665235554178556
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 8
Training loss: 0.04031965685518164 | Validation loss: 0.06348493695259094
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 9
Training loss: 0.03789979886067541 | Validation loss: 0.03177877189591527
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 10
Training loss: 0.03492777098558451 | Validation loss: 0.030528849456459284
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 11
Training loss: 0.03243052214384079 | Validation loss: 0.025340224984878052
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 12
Training loss: 0.033256117744665394 | Validation loss: 0.030804906350870926
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 13
Training loss: 0.03333615707723718 | Validation loss: 0.035566401512672506
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 14
Training loss: 0.03184130630995098 | Validation loss: 0.03293635222750405
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 15
Training loss: 0.03302473133723987 | Validation loss: 0.028592127503846616
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 16
Training loss: 0.030961162704778344 | Validation loss: 0.02682249341160059
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 17
Training loss: 0.028243662789463997 | Validation loss: 0.027744205047686894
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 18
Training loss: 0.02676465332900223 | Validation loss: 0.02321198567127188
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 19
Training loss: 0.02272553192941766 | Validation loss: 0.02984648073712985
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 20
Training loss: 0.021555469567446334 | Validation loss: 0.023247383224467438
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 21
Training loss: 0.0220749583340397 | Validation loss: 0.021610831453775365
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 22
Training loss: 0.020930324485035318 | Validation loss: 0.023576893222828705
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 23
Training loss: 0.021814725116679545 | Validation loss: 0.025978229319055874
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 24
Training loss: 0.02283184808727942 | Validation loss: 0.025270794207851093
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 25
Training loss: 0.02307317018704979 | Validation loss: 0.08468196541070938
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 26
Training loss: 0.021557157663138288 | Validation loss: 0.027905408913890522
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 27
Training loss: 0.021273777182949215 | Validation loss: 0.02349347559114297
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 28
Training loss: 0.01986664131675896 | Validation loss: 0.0233243799302727
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 29
Training loss: 0.017681540676245566 | Validation loss: 0.022845523431897163
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 30
Training loss: 0.016350375782502324 | Validation loss: 0.02178852337722977
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738 0.02178852]
--------------------------------------------------------------------------------
Seed: 8
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.14074529629004628 | Validation loss: 0.13468989357352257
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 1
Training loss: 0.13303319051077492 | Validation loss: 0.1283198483288288
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 2
Training loss: 0.11730245461589411 | Validation loss: 0.1113440953195095
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 3
Training loss: 0.09925832560187892 | Validation loss: 0.0883958488702774
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 4
Training loss: 0.08102804579232868 | Validation loss: 0.0767072718590498
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 5
Training loss: 0.06754080539471224 | Validation loss: 0.0630732923746109
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 6
Training loss: 0.0626953479490782 | Validation loss: 0.05822800286114216
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 7
Training loss: 0.05980836462817694 | Validation loss: 0.057572031393647194
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 8
Training loss: 0.05771961749384278 | Validation loss: 0.06076772231608629
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 9
Training loss: 0.0532009103580525 | Validation loss: 0.05623008869588375
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 10
Training loss: 0.05379903512565713 | Validation loss: 0.05519082024693489
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 11
Training loss: 0.052072555807076 | Validation loss: 0.05436007305979729
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 12
Training loss: 0.051279104265727495 | Validation loss: 0.06033742055296898
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 13
Training loss: 0.05221212537665116 | Validation loss: 0.05254641734063625
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 14
Training loss: 0.052044751024559924 | Validation loss: 0.05064303055405617
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 15
Training loss: 0.04771482081789719 | Validation loss: 0.18143466114997864
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 16
Training loss: 0.048779759654089025 | Validation loss: 0.049892572686076164
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 17
Training loss: 0.04602725470536634 | Validation loss: 0.0489846533164382
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 18
Training loss: 0.043393145756501904 | Validation loss: 0.04021776653826237
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 19
Training loss: 0.037118491863733845 | Validation loss: 0.043297613970935345
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 20
Training loss: 0.036233729535811825 | Validation loss: 0.04204665496945381
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 21
Training loss: 0.036370223486109785 | Validation loss: 0.04038366116583347
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 22
Training loss: 0.03642301369262369 | Validation loss: 0.04990543611347675
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 23
Training loss: 0.03510999748189198 | Validation loss: 0.035950854420661926
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 24
Training loss: 0.03829076278366541 | Validation loss: 0.08307567611336708
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 25
Training loss: 0.04044385832783423 | Validation loss: 0.047016918659210205
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 26
Training loss: 0.037926588403551204 | Validation loss: 0.037543052807450294
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 27
Training loss: 0.034744165445628916 | Validation loss: 0.05590544827282429
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 28
Training loss: 0.03150788292680916 | Validation loss: 0.03907494433224201
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 29
Training loss: 0.029333851839366713 | Validation loss: 0.030293073505163193
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 30
Training loss: 0.027970748414334497 | Validation loss: 0.030819999054074287
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665 0.03082   ]
--------------------------------------------------------------------------------
Seed: 9
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.12100787852939807 | Validation loss: 0.14878312995036444
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 1
Training loss: 0.1156453094200084 | Validation loss: 0.14218211422363916
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 2
Training loss: 0.10426287549106698 | Validation loss: 0.12800817439953485
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 3
Training loss: 0.08943178112569608 | Validation loss: 0.10652916630109151
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 4
Training loss: 0.07398556525769986 | Validation loss: 0.0644477941095829
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 5
Training loss: 0.06363658705040028 | Validation loss: 0.05572609603404999
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 6
Training loss: 0.06081610623943178 | Validation loss: 0.09725653131802876
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 7
Training loss: 0.05732113220974019 | Validation loss: 0.06353205566604932
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 8
Training loss: 0.053295500772564036 | Validation loss: 0.05046262095371882
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 9
Training loss: 0.05417528699495291 | Validation loss: 0.047901748990019165
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 10
Training loss: 0.0489277304395249 | Validation loss: 0.046974229936798416
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 11
Training loss: 0.05104770844704226 | Validation loss: 0.046928669015566506
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 12
Training loss: 0.04871545045783645 | Validation loss: 0.04629917008181413
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 13
Training loss: 0.04950790577813199 | Validation loss: 0.06741906702518463
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 14
Training loss: 0.050524057428303515 | Validation loss: 0.0722799909611543
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 15
Training loss: 0.051111385226249695 | Validation loss: 0.08795048048098882
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 16
Training loss: 0.049861222603603414 | Validation loss: 0.04207110404968262
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 17
Training loss: 0.04603122537465472 | Validation loss: 0.03900467542310556
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 18
Training loss: 0.043004892276305905 | Validation loss: 0.0376884446789821
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 19
Training loss: 0.03993555070146134 | Validation loss: 0.03346223756670952
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 20
Training loss: 0.038372725444404704 | Validation loss: 0.033292777525881924
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 21
Training loss: 0.03876232497982288 | Validation loss: 0.03255048921952645
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 22
Training loss: 0.038259138892355715 | Validation loss: 0.03326376589636008
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 23
Training loss: 0.039389371773914286 | Validation loss: 0.03649425941208998
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 24
Training loss: 0.037030863918756186 | Validation loss: 0.03978176477054755
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 25
Training loss: 0.04088841525739745 | Validation loss: 0.04172546664873759
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 26
Training loss: 0.038719205234787966 | Validation loss: 0.03826622168223063
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 27
Training loss: 0.03795238160283158 | Validation loss: 0.039025234058499336
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 28
Training loss: 0.0355978360105502 | Validation loss: 0.035915122057000794
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 29
Training loss: 0.03182907368203527 | Validation loss: 0.03283580827216307
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 30
Training loss: 0.03164514409084069 | Validation loss: 0.03219876562555631
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278 0.03219877]
--------------------------------------------------------------------------------
Seed: 10
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.09328515749228627 | Validation loss: 0.0794174075126648
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 1
Training loss: 0.08947268716598812 | Validation loss: 0.07702409103512764
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 2
Training loss: 0.08151930766670328 | Validation loss: 0.07422645390033722
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 3
Training loss: 0.07421657756755226 | Validation loss: 0.0915425568819046
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 4
Training loss: 0.06644049580944211 | Validation loss: 0.15212642401456833
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 5
Training loss: 0.06289171956871685 | Validation loss: 0.06643170863389969
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 6
Training loss: 0.060507005570750484 | Validation loss: 0.07660464942455292
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 7
Training loss: 0.05770743657883845 | Validation loss: 0.04702441208064556
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 8
Training loss: 0.055238479160164534 | Validation loss: 0.04257218353450298
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 9
Training loss: 0.04844514770727409 | Validation loss: 0.038986045867204666
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 10
Training loss: 0.04857376590371132 | Validation loss: 0.03860069438815117
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 11
Training loss: 0.0471659146837498 | Validation loss: 0.03836953267455101
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 12
Training loss: 0.048829075243127976 | Validation loss: 0.03513345122337341
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 13
Training loss: 0.05086563390336538 | Validation loss: 0.03178618475794792
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 14
Training loss: 0.04892818766989206 | Validation loss: 0.07313217967748642
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 15
Training loss: 0.05062789891503359 | Validation loss: 0.04181492328643799
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 16
Training loss: 0.04422282143250892 | Validation loss: 0.031231501139700413
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 17
Training loss: 0.0414473704601589 | Validation loss: 0.0330012571066618
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 18
Training loss: 0.041221494541356436 | Validation loss: 0.03016264084726572
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 19
Training loss: 0.035920314490795135 | Validation loss: 0.024509469978511333
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 20
Training loss: 0.03617992938349122 | Validation loss: 0.023955611512064934
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 21
Training loss: 0.03439663468222869 | Validation loss: 0.024173706769943237
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 22
Training loss: 0.03432707321879111 | Validation loss: 0.03172864858061075
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 23
Training loss: 0.035520500748565324 | Validation loss: 0.02993414457887411
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 24
Training loss: 0.03720426637875406 | Validation loss: 0.036875439807772636
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 25
Training loss: 0.03747940367381824 | Validation loss: 0.055036623030900955
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 26
Training loss: 0.035825867597994054 | Validation loss: 0.04328293725848198
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 27
Training loss: 0.03354562186685048 | Validation loss: 0.025072680786252022
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 28
Training loss: 0.03464834401874166 | Validation loss: 0.024809451773762703
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 29
Training loss: 0.03116374404022568 | Validation loss: 0.033944932743906975
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 30
Training loss: 0.02939593855683741 | Validation loss: 0.022330881096422672
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561 0.02233088]
--------------------------------------------------------------------------------
Seed: 11
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.12622165640718058 | Validation loss: 0.13938650488853455
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 1
Training loss: 0.1200494174110262 | Validation loss: 0.13313303887844086
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 2
Training loss: 0.10607607701891347 | Validation loss: 0.12224111333489418
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 3
Training loss: 0.09052326138082303 | Validation loss: 0.10416682437062263
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 4
Training loss: 0.07436744202124446 | Validation loss: 0.07375293970108032
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 5
Training loss: 0.063938575747766 | Validation loss: 0.07426713779568672
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 6
Training loss: 0.057538029590719623 | Validation loss: 0.06209025718271732
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 7
Training loss: 0.05095285353691954 | Validation loss: 0.06709360145032406
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 8
Training loss: 0.04912557217635607 | Validation loss: 0.0595396663993597
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 9
Training loss: 0.04791054472719368 | Validation loss: 0.05402009002864361
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 10
Training loss: 0.04601734866829295 | Validation loss: 0.0537868607789278
Validation loss (ends of cycles): [0.1393865  0.05378686]
------------------------------
Epoch: 11
Training loss: 0.04642667229238309 | Validation loss: 0.05364326946437359
Validation loss (ends of cycles): [0.1393865  0.05378686]
------------------------------
Epoch: 12
Training loss: 0.04651502166923724 | Validation loss: 0.05703286826610565
Validation loss (ends of cycles): [0.1393865  0.05378686]
------------------------------
Epoch: 13
Training loss: 0.04464602313543621 | Validation loss: 0.07510419934988022
Validation loss (ends of cycles): [0.1393865  0.05378686]
------------------------------
Epoch: 14
Training loss: 0.04594061886401553 | Validation loss: 0.0557715930044651
Validation loss (ends of cycles): [0.1393865  0.05378686]
------------------------------
Epoch: 15
Training loss: 0.0421954180653158 | Validation loss: 0.04985055699944496
Validation loss (ends of cycles): [0.1393865  0.05378686]
------------------------------
Epoch: 16
Training loss: 0.04223741483139364 | Validation loss: 0.04710370860993862
Validation loss (ends of cycles): [0.1393865  0.05378686]
------------------------------
Epoch: 17
Training loss: 0.038117557764053345 | Validation loss: 0.04934592917561531
Validation loss (ends of cycles): [0.1393865  0.05378686]
------------------------------
Epoch: 18
Training loss: 0.03437867579295447 | Validation loss: 0.05659086816012859
Validation loss (ends of cycles): [0.1393865  0.05378686]
------------------------------
Epoch: 19
Training loss: 0.03409294313506076 | Validation loss: 0.04514491185545921
Validation loss (ends of cycles): [0.1393865  0.05378686]
------------------------------
Epoch: 20
Training loss: 0.03242792100890687 | Validation loss: 0.04265304282307625
Validation loss (ends of cycles): [0.1393865  0.05378686 0.04265304]
------------------------------
Epoch: 21
Training loss: 0.03009496952750181 | Validation loss: 0.04248355142772198
Validation loss (ends of cycles): [0.1393865  0.05378686 0.04265304]
------------------------------
Epoch: 22
Training loss: 0.03202511644677112 | Validation loss: 0.0439732950180769
Validation loss (ends of cycles): [0.1393865  0.05378686 0.04265304]
------------------------------
Epoch: 23
Training loss: 0.03216276111963548 | Validation loss: 0.05873432569205761
Validation loss (ends of cycles): [0.1393865  0.05378686 0.04265304]
------------------------------
Epoch: 24
Training loss: 0.032698783141217734 | Validation loss: 0.04619231075048447
Validation loss (ends of cycles): [0.1393865  0.05378686 0.04265304]
------------------------------
Epoch: 25
Training loss: 0.03633682802319527 | Validation loss: 0.072358887642622
Validation loss (ends of cycles): [0.1393865  0.05378686 0.04265304]
------------------------------
Epoch: 26
Training loss: 0.033581405681999105 | Validation loss: 0.05562468618154526
Validation loss (ends of cycles): [0.1393865  0.05378686 0.04265304]
------------------------------
Epoch: 27
Training loss: 0.030268098472764616 | Validation loss: 0.059526894241571426
Validation loss (ends of cycles): [0.1393865  0.05378686 0.04265304]
------------------------------
Epoch: 28
Training loss: 0.02835421930802496 | Validation loss: 0.035784799605607986
Validation loss (ends of cycles): [0.1393865  0.05378686 0.04265304]
------------------------------
Epoch: 29
Training loss: 0.024994256367024622 | Validation loss: 0.036349328234791756
Validation loss (ends of cycles): [0.1393865  0.05378686 0.04265304]
------------------------------
Epoch: 30
Training loss: 0.023506488503986282 | Validation loss: 0.03648699168115854
Validation loss (ends of cycles): [0.1393865  0.05378686 0.04265304 0.03648699]
--------------------------------------------------------------------------------
Seed: 12
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.12465888732358028 | Validation loss: 0.11083460599184036
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 1
Training loss: 0.11687581594053068 | Validation loss: 0.1044333999355634
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 2
Training loss: 0.10341314363636468 | Validation loss: 0.09221626569827397
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 3
Training loss: 0.08686307386348122 | Validation loss: 0.07851269468665123
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 4
Training loss: 0.07083727340949209 | Validation loss: 0.0551288320372502
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 5
Training loss: 0.06321360847275508 | Validation loss: 0.05201607135434946
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 6
Training loss: 0.059471154487446734 | Validation loss: 0.050572953497370086
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 7
Training loss: 0.05677154197014476 | Validation loss: 0.048783741891384125
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 8
Training loss: 0.05208255586483957 | Validation loss: 0.05494089797139168
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 9
Training loss: 0.05054084868415406 | Validation loss: 0.042090740675727524
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 10
Training loss: 0.046194194313628895 | Validation loss: 0.0418690579632918
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 11
Training loss: 0.04912359110618893 | Validation loss: 0.03736517330010732
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 12
Training loss: 0.050159576250926444 | Validation loss: 0.03728324609498183
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 13
Training loss: 0.04813679552784091 | Validation loss: 0.06099647656083107
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 14
Training loss: 0.05095356658689285 | Validation loss: 0.042035351817806564
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 15
Training loss: 0.05249298383530818 | Validation loss: 0.08353547627727191
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 16
Training loss: 0.04622210692124147 | Validation loss: 0.05047812437017759
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 17
Training loss: 0.04407538248128012 | Validation loss: 0.03657640082140764
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 18
Training loss: 0.04126803819580298 | Validation loss: 0.045112963765859604
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 19
Training loss: 0.03931908642775134 | Validation loss: 0.03320226073265076
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 20
Training loss: 0.040036032294952555 | Validation loss: 0.034371147553126015
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 21
Training loss: 0.03779972744737997 | Validation loss: 0.034147227803866066
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 22
Training loss: 0.03736349481991247 | Validation loss: 0.03392460756003857
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 23
Training loss: 0.036734214554981964 | Validation loss: 0.05457255865136782
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 24
Training loss: 0.03623275096699791 | Validation loss: 0.041262177750468254
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 25
Training loss: 0.039135233744194635 | Validation loss: 0.06989621991912524
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 26
Training loss: 0.041533306927273146 | Validation loss: 0.08880491803089778
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 27
Training loss: 0.036787426638367926 | Validation loss: 0.05547218148907026
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 28
Training loss: 0.037730952602271974 | Validation loss: 0.04338609303037325
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 29
Training loss: 0.03741005190501088 | Validation loss: 0.03517623494068781
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 30
Training loss: 0.03459033566085916 | Validation loss: 0.029467060541113217
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115 0.02946706]
--------------------------------------------------------------------------------
Seed: 13
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.14787319733908302 | Validation loss: 0.12134905159473419
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 1
Training loss: 0.13971965289429614 | Validation loss: 0.1133270300924778
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 2
Training loss: 0.12202531177746623 | Validation loss: 0.09741818159818649
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 3
Training loss: 0.09899224105634187 | Validation loss: 0.0680413693189621
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 4
Training loss: 0.07547486730312046 | Validation loss: 0.05193481966853142
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 5
Training loss: 0.059965328754563084 | Validation loss: 0.1478968784213066
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 6
Training loss: 0.058371061949353466 | Validation loss: 0.060820143669843674
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 7
Training loss: 0.0518569956092458 | Validation loss: 0.04691869765520096
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 8
Training loss: 0.04747700201053368 | Validation loss: 0.05568346567451954
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 9
Training loss: 0.04481482358747407 | Validation loss: 0.044492047280073166
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 10
Training loss: 0.04322212032581631 | Validation loss: 0.041569143533706665
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 11
Training loss: 0.04246603638718003 | Validation loss: 0.04150853492319584
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 12
Training loss: 0.04089725978280369 | Validation loss: 0.03653017058968544
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 13
Training loss: 0.041098617801540775 | Validation loss: 0.04898947477340698
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 14
Training loss: 0.04085124303635798 | Validation loss: 0.04935206472873688
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 15
Training loss: 0.041323604552369365 | Validation loss: 0.04239597171545029
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 16
Training loss: 0.039808995531577816 | Validation loss: 0.0840645469725132
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 17
Training loss: 0.03703288322216586 | Validation loss: 0.03245018795132637
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 18
Training loss: 0.03433209422387575 | Validation loss: 0.03348969016224146
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 19
Training loss: 0.03210164145811608 | Validation loss: 0.030444078147411346
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 20
Training loss: 0.03053837034263109 | Validation loss: 0.03115426003932953
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 21
Training loss: 0.031159722393280583 | Validation loss: 0.03271864727139473
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 22
Training loss: 0.03416347729140207 | Validation loss: 0.045318085700273514
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 23
Training loss: 0.03107438314902155 | Validation loss: 0.056804386898875237
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 24
Training loss: 0.033158473474414724 | Validation loss: 0.05310705862939358
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 25
Training loss: 0.03283156532990305 | Validation loss: 0.05297096632421017
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 26
Training loss: 0.033077974362592945 | Validation loss: 0.03136811312288046
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 27
Training loss: 0.03158471576477352 | Validation loss: 0.058228276669979095
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 28
Training loss: 0.02844038595886607 | Validation loss: 0.046033360064029694
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 29
Training loss: 0.026794205566770153 | Validation loss: 0.02960763592272997
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 30
Training loss: 0.025866988360097532 | Validation loss: 0.030549601651728153
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426 0.0305496 ]
--------------------------------------------------------------------------------
Seed: 14
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.2685273501433824 | Validation loss: 0.2683800756931305
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 1
Training loss: 0.25670922037802246 | Validation loss: 0.2526206970214844
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 2
Training loss: 0.22681226542121485 | Validation loss: 0.21906188130378723
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 3
Training loss: 0.18514166144948258 | Validation loss: 0.16225957870483398
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 4
Training loss: 0.13773585306970695 | Validation loss: 0.0982358418405056
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 5
Training loss: 0.09757452673817936 | Validation loss: 0.16215666383504868
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 6
Training loss: 0.0740412101149559 | Validation loss: 0.055965250357985497
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 7
Training loss: 0.06161656583610334 | Validation loss: 0.062003035098314285
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 8
Training loss: 0.056791181431004874 | Validation loss: 0.05057838559150696
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 9
Training loss: 0.05035581047597684 | Validation loss: 0.04332583770155907
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 10
Training loss: 0.047245774467132594 | Validation loss: 0.04338065907359123
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 11
Training loss: 0.048102253459786116 | Validation loss: 0.04022406227886677
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 12
Training loss: 0.04582173936069012 | Validation loss: 0.03690587542951107
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 13
Training loss: 0.041917656352253335 | Validation loss: 0.04160160571336746
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 14
Training loss: 0.04336070749712618 | Validation loss: 0.0791056714951992
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 15
Training loss: 0.04261575992170133 | Validation loss: 0.053798090666532516
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 16
Training loss: 0.04363253075433405 | Validation loss: 0.040439238771796227
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 17
Training loss: 0.04248141987543357 | Validation loss: 0.03731362521648407
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 18
Training loss: 0.038024303209232654 | Validation loss: 0.041773609817028046
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 19
Training loss: 0.03374856283986255 | Validation loss: 0.03551979921758175
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 20
Training loss: 0.033305737042897625 | Validation loss: 0.03380383178591728
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 21
Training loss: 0.03389084388158823 | Validation loss: 0.03323566913604736
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 22
Training loss: 0.0329377921788316 | Validation loss: 0.03338051959872246
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 23
Training loss: 0.03327037482277343 | Validation loss: 0.032685402780771255
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 24
Training loss: 0.03479252481146863 | Validation loss: 0.0644093994051218
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 25
Training loss: 0.03432726771815827 | Validation loss: 0.049457062035799026
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 26
Training loss: 0.03283493897240413 | Validation loss: 0.03840087540447712
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 27
Training loss: 0.029340636769407673 | Validation loss: 0.036333074793219566
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 28
Training loss: 0.028881219558809932 | Validation loss: 0.03344106115400791
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 29
Training loss: 0.026324676251725146 | Validation loss: 0.029291590675711632
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 30
Training loss: 0.02539735272722809 | Validation loss: 0.029340913519263268
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383 0.02934091]
--------------------------------------------------------------------------------
Seed: 15
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.0841492728183144 | Validation loss: 0.06075831688940525
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 1
Training loss: 0.08169916311376973 | Validation loss: 0.06054982356727123
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 2
Training loss: 0.07849763216156709 | Validation loss: 0.06086615286767483
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 3
Training loss: 0.07423726802593783 | Validation loss: 0.06112533435225487
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 4
Training loss: 0.06984295362704679 | Validation loss: 0.04825133830308914
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 5
Training loss: 0.06283724523688618 | Validation loss: 0.04184510372579098
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 6
Training loss: 0.058544049725720755 | Validation loss: 0.041651615872979164
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 7
Training loss: 0.05212017580082542 | Validation loss: 0.04360814392566681
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 8
Training loss: 0.052451391067159805 | Validation loss: 0.04157676547765732
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 9
Training loss: 0.04793435246928742 | Validation loss: 0.03575167618691921
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 10
Training loss: 0.04477887306558458 | Validation loss: 0.035469865426421165
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 11
Training loss: 0.045943650466046836 | Validation loss: 0.0336600337177515
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 12
Training loss: 0.04350362703400223 | Validation loss: 0.03219062741845846
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 13
Training loss: 0.04283701863728071 | Validation loss: 0.03756018541753292
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 14
Training loss: 0.04464582922427278 | Validation loss: 0.0390226636081934
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 15
Training loss: 0.046644166406047974 | Validation loss: 0.03757801093161106
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 16
Training loss: 0.04258477227076104 | Validation loss: 0.03985445946455002
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 17
Training loss: 0.04088323328055834 | Validation loss: 0.026787959039211273
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 18
Training loss: 0.03871416957362702 | Validation loss: 0.025218220427632332
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 19
Training loss: 0.03509655153672946 | Validation loss: 0.02421511523425579
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 20
Training loss: 0.035071449550358874 | Validation loss: 0.02311981562525034
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 21
Training loss: 0.034543048394353765 | Validation loss: 0.022894551046192646
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 22
Training loss: 0.03377740447850604 | Validation loss: 0.024682712741196156
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 23
Training loss: 0.03353702306355301 | Validation loss: 0.029586568474769592
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 24
Training loss: 0.03612418242387081 | Validation loss: 0.027507783845067024
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 25
Training loss: 0.03587194492942408 | Validation loss: 0.027790222316980362
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 26
Training loss: 0.03557291837703241 | Validation loss: 0.03710603527724743
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 27
Training loss: 0.033871732284560016 | Validation loss: 0.03677363134920597
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 28
Training loss: 0.03167560118201532 | Validation loss: 0.032423168420791626
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 29
Training loss: 0.02956923759101253 | Validation loss: 0.019483156502246857
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 30
Training loss: 0.02946120588795135 | Validation loss: 0.018310876563191414
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982 0.01831088]
--------------------------------------------------------------------------------
Seed: 16
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08156146030676992 | Validation loss: 0.0705137016872565
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 1
Training loss: 0.07990784237259313 | Validation loss: 0.06988636900981267
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 2
Training loss: 0.07819349220708798 | Validation loss: 0.06863330366710822
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 3
Training loss: 0.07480346470286972 | Validation loss: 0.0666625127196312
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 4
Training loss: 0.06917970745187056 | Validation loss: 0.058160472040375076
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 5
Training loss: 0.06264691466563627 | Validation loss: 0.08878007034460704
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 6
Training loss: 0.06039848516842252 | Validation loss: 0.05209088449676832
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 7
Training loss: 0.05595046673950396 | Validation loss: 0.04798128828406334
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 8
Training loss: 0.05155527258389875 | Validation loss: 0.041437882309158645
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 9
Training loss: 0.04763707479363993 | Validation loss: 0.043172294894854225
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 10
Training loss: 0.04461290363810564 | Validation loss: 0.04293485110004743
Validation loss (ends of cycles): [0.0705137  0.04293485]
------------------------------
Epoch: 11
Training loss: 0.04245269229929698 | Validation loss: 0.03906371258199215
Validation loss (ends of cycles): [0.0705137  0.04293485]
------------------------------
Epoch: 12
Training loss: 0.043166923287667726 | Validation loss: 0.04080717754550278
Validation loss (ends of cycles): [0.0705137  0.04293485]
------------------------------
Epoch: 13
Training loss: 0.042604215247066396 | Validation loss: 0.03541523963212967
Validation loss (ends of cycles): [0.0705137  0.04293485]
------------------------------
Epoch: 14
Training loss: 0.04206815513929254 | Validation loss: 0.03434837299088637
Validation loss (ends of cycles): [0.0705137  0.04293485]
------------------------------
Epoch: 15
Training loss: 0.040814362760437164 | Validation loss: 0.034936813535750844
Validation loss (ends of cycles): [0.0705137  0.04293485]
------------------------------
Epoch: 16
Training loss: 0.04256990846050413 | Validation loss: 0.05865098908543587
Validation loss (ends of cycles): [0.0705137  0.04293485]
------------------------------
Epoch: 17
Training loss: 0.038062922558502146 | Validation loss: 0.032220245649417244
Validation loss (ends of cycles): [0.0705137  0.04293485]
------------------------------
Epoch: 18
Training loss: 0.03491674932210069 | Validation loss: 0.03063141368329525
Validation loss (ends of cycles): [0.0705137  0.04293485]
------------------------------
Epoch: 19
Training loss: 0.032406675727351716 | Validation loss: 0.031299490481615067
Validation loss (ends of cycles): [0.0705137  0.04293485]
------------------------------
Epoch: 20
Training loss: 0.030975149993441607 | Validation loss: 0.03276701706151167
Validation loss (ends of cycles): [0.0705137  0.04293485 0.03276702]
------------------------------
Epoch: 21
Training loss: 0.03167034911089822 | Validation loss: 0.031768561651309334
Validation loss (ends of cycles): [0.0705137  0.04293485 0.03276702]
------------------------------
Epoch: 22
Training loss: 0.02994556332889356 | Validation loss: 0.030046330144008
Validation loss (ends of cycles): [0.0705137  0.04293485 0.03276702]
------------------------------
Epoch: 23
Training loss: 0.03042360659884779 | Validation loss: 0.02618786444266637
Validation loss (ends of cycles): [0.0705137  0.04293485 0.03276702]
------------------------------
Epoch: 24
Training loss: 0.033261800380913836 | Validation loss: 0.03181804623454809
Validation loss (ends of cycles): [0.0705137  0.04293485 0.03276702]
------------------------------
Epoch: 25
Training loss: 0.03491548017451638 | Validation loss: 0.03237322314331929
Validation loss (ends of cycles): [0.0705137  0.04293485 0.03276702]
------------------------------
Epoch: 26
Training loss: 0.03453618023348482 | Validation loss: 0.030259561104079086
Validation loss (ends of cycles): [0.0705137  0.04293485 0.03276702]
------------------------------
Epoch: 27
Training loss: 0.02996809525709403 | Validation loss: 0.02521776221692562
Validation loss (ends of cycles): [0.0705137  0.04293485 0.03276702]
------------------------------
Epoch: 28
Training loss: 0.029768657135336024 | Validation loss: 0.027145131180683773
Validation loss (ends of cycles): [0.0705137  0.04293485 0.03276702]
------------------------------
Epoch: 29
Training loss: 0.02678287053774846 | Validation loss: 0.02985484277208646
Validation loss (ends of cycles): [0.0705137  0.04293485 0.03276702]
------------------------------
Epoch: 30
Training loss: 0.02542775310575962 | Validation loss: 0.029113321254650753
Validation loss (ends of cycles): [0.0705137  0.04293485 0.03276702 0.02911332]
--------------------------------------------------------------------------------
Seed: 17
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.18578452575537893 | Validation loss: 0.17607577641805014
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 1
Training loss: 0.17636994024117789 | Validation loss: 0.16514772176742554
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 2
Training loss: 0.15567576388518015 | Validation loss: 0.14257992307345072
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 3
Training loss: 0.1261256126066049 | Validation loss: 0.11006870617469151
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 4
Training loss: 0.09516125255160862 | Validation loss: 0.07898629705111186
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 5
Training loss: 0.07224813931518131 | Validation loss: 0.07449610034624736
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 6
Training loss: 0.06180248782038689 | Validation loss: 0.059722560147444405
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 7
Training loss: 0.05495261256065634 | Validation loss: 0.05743814756472906
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 8
Training loss: 0.0541409340997537 | Validation loss: 0.05083008110523224
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 9
Training loss: 0.0495873944212993 | Validation loss: 0.05104871218403181
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 10
Training loss: 0.04791343791617288 | Validation loss: 0.049677314857641854
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 11
Training loss: 0.048474044952955514 | Validation loss: 0.04772906253735224
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 12
Training loss: 0.04561085191865762 | Validation loss: 0.04465216274062792
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 13
Training loss: 0.04276795002321402 | Validation loss: 0.044079518566528954
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 14
Training loss: 0.046003543875283666 | Validation loss: 0.05234615504741669
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 15
Training loss: 0.04754653500599994 | Validation loss: 0.057119290033976235
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 16
Training loss: 0.04541455095426904 | Validation loss: 0.051875809828440346
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 17
Training loss: 0.04212984825587935 | Validation loss: 0.061386716862519584
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 18
Training loss: 0.04025129984236426 | Validation loss: 0.0377594760308663
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 19
Training loss: 0.037910942195190325 | Validation loss: 0.03676092314223448
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 20
Training loss: 0.035172241946889296 | Validation loss: 0.03536786511540413
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 21
Training loss: 0.03531419661723905 | Validation loss: 0.03570879126588503
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 22
Training loss: 0.0355098739059435 | Validation loss: 0.033749821285406746
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 23
Training loss: 0.03708219093581041 | Validation loss: 0.034131928657492004
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 24
Training loss: 0.036728250690632396 | Validation loss: 0.08373745282491048
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 25
Training loss: 0.037703154815567866 | Validation loss: 0.16881321867307028
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 26
Training loss: 0.04052116773608658 | Validation loss: 0.035450027945140995
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 27
Training loss: 0.03702345759504371 | Validation loss: 0.04713146264354388
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 28
Training loss: 0.033382929033703275 | Validation loss: 0.033809199929237366
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 29
Training loss: 0.030483958828780387 | Validation loss: 0.03201883099973202
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 30
Training loss: 0.028836593238843813 | Validation loss: 0.029221948857108753
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787 0.02922195]
--------------------------------------------------------------------------------
Seed: 18
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08226940329921872 | Validation loss: 0.08771190047264099
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 1
Training loss: 0.08080815602290004 | Validation loss: 0.08779796585440636
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 2
Training loss: 0.07893975392768257 | Validation loss: 0.08810674771666527
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 3
Training loss: 0.07525817559737909 | Validation loss: 0.08791105076670647
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 4
Training loss: 0.06839138034142946 | Validation loss: 0.0787418819963932
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 5
Training loss: 0.06047361873482403 | Validation loss: 0.057681020349264145
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 6
Training loss: 0.05439754487260392 | Validation loss: 0.10234533622860909
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 7
Training loss: 0.048915984697247804 | Validation loss: 0.09288699552416801
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 8
Training loss: 0.05011697858572006 | Validation loss: 0.07140225917100906
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 9
Training loss: 0.04586619089700674 | Validation loss: 0.054284341633319855
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 10
Training loss: 0.04105820320546627 | Validation loss: 0.05086086876690388
Validation loss (ends of cycles): [0.0877119  0.05086087]
------------------------------
Epoch: 11
Training loss: 0.040411756717060744 | Validation loss: 0.05188886821269989
Validation loss (ends of cycles): [0.0877119  0.05086087]
------------------------------
Epoch: 12
Training loss: 0.04021171529434229 | Validation loss: 0.052854619920253754
Validation loss (ends of cycles): [0.0877119  0.05086087]
------------------------------
Epoch: 13
Training loss: 0.039001153291840306 | Validation loss: 0.053517796099185944
Validation loss (ends of cycles): [0.0877119  0.05086087]
------------------------------
Epoch: 14
Training loss: 0.03992528409550065 | Validation loss: 0.11048462241888046
Validation loss (ends of cycles): [0.0877119  0.05086087]
------------------------------
Epoch: 15
Training loss: 0.0428880665843424 | Validation loss: 0.048895107582211494
Validation loss (ends of cycles): [0.0877119  0.05086087]
------------------------------
Epoch: 16
Training loss: 0.03952205651684811 | Validation loss: 0.09723616763949394
Validation loss (ends of cycles): [0.0877119  0.05086087]
------------------------------
Epoch: 17
Training loss: 0.03598170609850632 | Validation loss: 0.054928792640566826
Validation loss (ends of cycles): [0.0877119  0.05086087]
------------------------------
Epoch: 18
Training loss: 0.034703530959392846 | Validation loss: 0.04207434877753258
Validation loss (ends of cycles): [0.0877119  0.05086087]
------------------------------
Epoch: 19
Training loss: 0.03246637443570714 | Validation loss: 0.04125319607555866
Validation loss (ends of cycles): [0.0877119  0.05086087]
------------------------------
Epoch: 20
Training loss: 0.03022692215285803 | Validation loss: 0.03823002055287361
Validation loss (ends of cycles): [0.0877119  0.05086087 0.03823002]
------------------------------
Epoch: 21
Training loss: 0.031208522029613193 | Validation loss: 0.04041556641459465
Validation loss (ends of cycles): [0.0877119  0.05086087 0.03823002]
------------------------------
Epoch: 22
Training loss: 0.029294442140350218 | Validation loss: 0.0428590402007103
Validation loss (ends of cycles): [0.0877119  0.05086087 0.03823002]
------------------------------
Epoch: 23
Training loss: 0.0297195372220717 | Validation loss: 0.04256322421133518
Validation loss (ends of cycles): [0.0877119  0.05086087 0.03823002]
------------------------------
Epoch: 24
Training loss: 0.03125792741775513 | Validation loss: 0.10660433769226074
Validation loss (ends of cycles): [0.0877119  0.05086087 0.03823002]
------------------------------
Epoch: 25
Training loss: 0.03019392794292224 | Validation loss: 0.06545785069465637
Validation loss (ends of cycles): [0.0877119  0.05086087 0.03823002]
------------------------------
Epoch: 26
Training loss: 0.033481411537841746 | Validation loss: 0.03471088223159313
Validation loss (ends of cycles): [0.0877119  0.05086087 0.03823002]
------------------------------
Epoch: 27
Training loss: 0.027845038640263834 | Validation loss: 0.07449803873896599
Validation loss (ends of cycles): [0.0877119  0.05086087 0.03823002]
------------------------------
Epoch: 28
Training loss: 0.027143595397080247 | Validation loss: 0.03891387768089771
Validation loss (ends of cycles): [0.0877119  0.05086087 0.03823002]
------------------------------
Epoch: 29
Training loss: 0.025757619964056892 | Validation loss: 0.041501617059111595
Validation loss (ends of cycles): [0.0877119  0.05086087 0.03823002]
------------------------------
Epoch: 30
Training loss: 0.02374906617363817 | Validation loss: 0.04013838246464729
Validation loss (ends of cycles): [0.0877119  0.05086087 0.03823002 0.04013838]
--------------------------------------------------------------------------------
Seed: 19
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.16255763173103333 | Validation loss: 0.14109364648660025
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 1
Training loss: 0.15516356999675432 | Validation loss: 0.13523547103007635
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 2
Training loss: 0.14172234551774132 | Validation loss: 0.12201743572950363
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 3
Training loss: 0.12399974134233263 | Validation loss: 0.09632302448153496
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 4
Training loss: 0.10233049053284857 | Validation loss: 0.06885181864102681
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 5
Training loss: 0.0784348054892487 | Validation loss: 0.05762290582060814
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 6
Training loss: 0.06391300840510263 | Validation loss: 0.0725318193435669
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 7
Training loss: 0.05626908710433377 | Validation loss: 0.04346885159611702
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 8
Training loss: 0.051691514543361135 | Validation loss: 0.04434716080625852
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 9
Training loss: 0.04746196946750084 | Validation loss: 0.04489594325423241
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 10
Training loss: 0.043907886577977076 | Validation loss: 0.04307339588801066
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 11
Training loss: 0.04457749778197871 | Validation loss: 0.04406307637691498
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 12
Training loss: 0.044939831313159734 | Validation loss: 0.08915746957063675
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 13
Training loss: 0.044208405539393425 | Validation loss: 0.036763026068607964
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 14
Training loss: 0.04433420538488361 | Validation loss: 0.04416805567840735
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 15
Training loss: 0.04537590737971994 | Validation loss: 0.4176284372806549
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 16
Training loss: 0.04425143709199296 | Validation loss: 0.18371537327766418
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 17
Training loss: 0.03873702914764484 | Validation loss: 0.0807472715775172
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 18
Training loss: 0.03545353727208243 | Validation loss: 0.042597355941931404
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 19
Training loss: 0.032974150549206466 | Validation loss: 0.033211088428894676
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 20
Training loss: 0.030094111027816933 | Validation loss: 0.033144605035583176
Validation loss (ends of cycles): [0.14109365 0.0430734  0.03314461]
------------------------------
Epoch: 21
Training loss: 0.03217791558967696 | Validation loss: 0.03846348077058792
Validation loss (ends of cycles): [0.14109365 0.0430734  0.03314461]
------------------------------
Epoch: 22
Training loss: 0.031263780780136585 | Validation loss: 0.030576524635155995
Validation loss (ends of cycles): [0.14109365 0.0430734  0.03314461]
------------------------------
Epoch: 23
Training loss: 0.030434809832109347 | Validation loss: 0.04343028490742048
Validation loss (ends of cycles): [0.14109365 0.0430734  0.03314461]
------------------------------
Epoch: 24
Training loss: 0.032326566986739635 | Validation loss: 0.13629954804976782
Validation loss (ends of cycles): [0.14109365 0.0430734  0.03314461]
------------------------------
Epoch: 25
Training loss: 0.03342853072616789 | Validation loss: 0.06467030942440033
Validation loss (ends of cycles): [0.14109365 0.0430734  0.03314461]
------------------------------
Epoch: 26
Training loss: 0.03306691503773133 | Validation loss: 0.041692071904738746
Validation loss (ends of cycles): [0.14109365 0.0430734  0.03314461]
------------------------------
Epoch: 27
Training loss: 0.030296926179693803 | Validation loss: 0.049537912011146545
Validation loss (ends of cycles): [0.14109365 0.0430734  0.03314461]
------------------------------
Epoch: 28
Training loss: 0.026243075573196013 | Validation loss: 0.031099140644073486
Validation loss (ends of cycles): [0.14109365 0.0430734  0.03314461]
------------------------------
Epoch: 29
Training loss: 0.024759062979784276 | Validation loss: 0.026985854531327885
Validation loss (ends of cycles): [0.14109365 0.0430734  0.03314461]
------------------------------
Epoch: 30
Training loss: 0.02487910890744792 | Validation loss: 0.029790397733449936
Validation loss (ends of cycles): [0.14109365 0.0430734  0.03314461 0.0297904 ]
# Replace following Paths with yours
src_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/vertisols/models')
order = 10
seeds = range(20)
learners = Learners(Model, tax_lookup, seeds=seeds, device=device)
perfs_local_vertisols, _, _, _ = learners.evaluate((X, y, depth_order[:, -1]),
                                                  order = order,
                                                  src_dir_model=src_dir_model)

perfs_local_vertisols.describe()
rpd rpiq r2 lccc rmse mse mae mape bias stb
count 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000 20.000000
mean 1.777816 2.448334 0.669128 0.800569 0.328906 0.110845 0.213575 30.958227 -0.001265 -0.003632
std 0.184440 0.352951 0.069418 0.041342 0.052977 0.037720 0.023262 4.237755 0.024421 0.065963
min 1.473109 1.825536 0.532598 0.716435 0.252563 0.063788 0.179316 24.304497 -0.041362 -0.115813
25% 1.615667 2.245532 0.611233 0.773600 0.292217 0.085391 0.197175 28.024661 -0.016770 -0.041078
50% 1.788574 2.443511 0.683310 0.807425 0.318792 0.101629 0.209331 29.388569 0.000355 0.000905
75% 1.925041 2.621849 0.726185 0.828116 0.349539 0.122219 0.231883 34.220246 0.018196 0.041607
max 2.113319 3.132748 0.772750 0.861943 0.458266 0.210007 0.255553 40.589359 0.043430 0.129690

Checking losses

from mirzai.training.core import load_dumps

# Replace following Paths with yours
dest_dir_loss = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/vertisols/losses')
losses = load_dumps(dest_dir_loss)
pd.DataFrame(losses[4]['valid']).plot()
<AxesSubplot:>

Compile metrics for “local vs. global” Fig. 6

def format_metrics(*dfs):
    perfs = {'r2': {'mean': [], 'std': []},
             'mape': {'mean': [], 'std': []}}
    for df in dfs:
        for metric in ['r2', 'mape']:
            mean, std = df.describe().loc[['mean', 'std'], metric].items()
            perfs[metric]['mean'].append(mean[1])
            perfs[metric]['std'].append(std[1])
    return perfs
perfs = {}
perfs['global'] = format_metrics(perfs_global_mollisols, perfs_global_gelisols, perfs_global_vertisols)
perfs['local'] = format_metrics(perfs_local_mollisols, perfs_local_gelisols, perfs_local_vertisols)
perfs
{'global': {'r2': {'mean': [0.7676331517935623,
    0.7426116984444495,
    0.7447822285232857],
   'std': [0.022648288190552323, 0.0767026908137805, 0.06303329177546012]},
  'mape': {'mean': [27.362060844898224, 47.88334220647812, 26.8556547164917],
   'std': [1.1716483447190629, 10.194857908308927, 3.2722567422732105]}},
 'local': {'r2': {'mean': [0.7727136496500038,
    0.7461132281184896,
    0.7569868331639625],
   'std': [0.01908464680719597, 0.08643306096790201, 0.05496486944699407]},
  'mape': {'mean': [26.96375846862793, 43.78511905670166, 25.207100808620453],
   'std': [1.352271118990351, 10.660068154956656, 2.427168862994315]}}}
dest_dir = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn')
with open(dest_dir/'global_vs_local.pickle', 'wb') as f:
    pickle.dump(perfs, f)