# For testing purpose#src_dir = 'test'#fnames = ['spectra-features-smp.npy', 'spectra-wavenumbers-smp.npy', # 'depth-order-smp.npy', 'target-smp.npy', # 'tax-order-lu-smp.pkl', 'spectra-id-smp.npy']# or with all datasrc_dir ='/content/drive/MyDrive/research/predict-k-mirs-dl/data/potassium'fnames = ['spectra-features.npy', 'spectra-wavenumbers.npy', 'depth-order.npy', 'target.npy', 'tax-order-lu.pkl', 'spectra-id.npy']X, X_names, depth_order, y, tax_lookup, X_id = load_kssl(src_dir, fnames=fnames)data = X, y, X_id, depth_ordertransforms = [select_y, select_tax_order, select_X, log_transform_y]X, y, X_id, depth_order = compose(*transforms)(data)
Experiment
Setup
# Is a GPU available?use_cuda = torch.cuda.is_available()device = torch.device('cuda:0'if use_cuda else'cpu')print(f'Runtime is: {device}')params_scheduler = {'base_lr': 3e-5,'max_lr': 1e-3,'step_size_up': 5,'mode': 'triangular','cycle_momentum': False}n_epochs =201seeds =range(20)seeds =range(15, 20)
Runtime is: cuda:0
Train on all Soil Taxonomic Orders
# Replace following Paths with yoursdest_dir_loss = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/all/losses')dest_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/all/models')learners = Learners(Model, tax_lookup, seeds=seeds, device=device)learners.train((X, y, depth_order[:, -1]), dest_dir_loss=dest_dir_loss, dest_dir_model=dest_dir_model, n_epochs=n_epochs, sc_kwargs=params_scheduler)
--------------------------------------------------------------------------------
Seed: 15
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.21006273476802928 | Validation loss: 0.16685934806555774
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 1
Training loss: 0.10366790779631614 | Validation loss: 0.12193218717532875
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 2
Training loss: 0.08515885396031883 | Validation loss: 0.07348759699843627
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 3
Training loss: 0.07681338573754655 | Validation loss: 0.0775093322174739
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 4
Training loss: 0.07012089212630385 | Validation loss: 0.06804058407392122
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 5
Training loss: 0.06663906943111673 | Validation loss: 0.06383055649631847
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 6
Training loss: 0.06138754077439057 | Validation loss: 0.05653325315004429
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 7
Training loss: 0.057238537138034624 | Validation loss: 0.05207979377458053
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 8
Training loss: 0.053663239335846595 | Validation loss: 0.04987848757774429
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 9
Training loss: 0.05063598560660667 | Validation loss: 0.04656141813415869
Validation loss (ends of cycles): [0.16685935]
------------------------------
Epoch: 10
Training loss: 0.048641224010429515 | Validation loss: 0.045020436414772956
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 11
Training loss: 0.04944960257005474 | Validation loss: 0.04620409672832595
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 12
Training loss: 0.05068350452599739 | Validation loss: 0.0466461240322189
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 13
Training loss: 0.05165666111700941 | Validation loss: 0.048602522936015004
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 14
Training loss: 0.05240861789184058 | Validation loss: 0.04709551842734877
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 15
Training loss: 0.053400266255567395 | Validation loss: 0.05373546291571275
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 16
Training loss: 0.050950179472345654 | Validation loss: 0.046860707412778806
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 17
Training loss: 0.04810693795888091 | Validation loss: 0.049740820958287316
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 18
Training loss: 0.045981014085068245 | Validation loss: 0.04217774676472212
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 19
Training loss: 0.04369535342254859 | Validation loss: 0.04023268423249236
Validation loss (ends of cycles): [0.16685935 0.04502044]
------------------------------
Epoch: 20
Training loss: 0.04157303913503768 | Validation loss: 0.03929079252303438
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 21
Training loss: 0.04292749061604859 | Validation loss: 0.03935050310781308
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 22
Training loss: 0.04409183335387859 | Validation loss: 0.04125337862302508
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 23
Training loss: 0.04542167893222233 | Validation loss: 0.04289548397393881
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 24
Training loss: 0.04644744235868236 | Validation loss: 0.04298555405101681
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 25
Training loss: 0.04782074668232029 | Validation loss: 0.04539380502779927
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 26
Training loss: 0.04569561580951348 | Validation loss: 0.04396933422679395
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 27
Training loss: 0.04414120492503399 | Validation loss: 0.04126848547463923
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 28
Training loss: 0.041882850408488076 | Validation loss: 0.03920497682637873
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 29
Training loss: 0.03984382633563483 | Validation loss: 0.03720222598156043
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079]
------------------------------
Epoch: 30
Training loss: 0.03824256880274849 | Validation loss: 0.03624860502252009
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 31
Training loss: 0.039079163438002544 | Validation loss: 0.036830263261773945
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 32
Training loss: 0.04015510225774881 | Validation loss: 0.037671556770471876
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 33
Training loss: 0.04150084875373712 | Validation loss: 0.040750166566102905
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 34
Training loss: 0.0429485932956853 | Validation loss: 0.04076049813127096
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 35
Training loss: 0.044326993446122474 | Validation loss: 0.04463050882043564
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 36
Training loss: 0.042592402374349886 | Validation loss: 0.04191957023489264
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 37
Training loss: 0.04073864440012019 | Validation loss: 0.039296620451243575
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 38
Training loss: 0.0388192206421339 | Validation loss: 0.040280445091492305
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 39
Training loss: 0.03695870364381836 | Validation loss: 0.03545188218915621
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861]
------------------------------
Epoch: 40
Training loss: 0.03567687908414839 | Validation loss: 0.03444945969643582
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 41
Training loss: 0.03621109440926404 | Validation loss: 0.03518754746600063
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 42
Training loss: 0.03740448744116923 | Validation loss: 0.035378930587486354
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 43
Training loss: 0.038526631041987267 | Validation loss: 0.037030073467938775
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 44
Training loss: 0.04034058869666031 | Validation loss: 0.03999876431230687
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 45
Training loss: 0.041215690155330255 | Validation loss: 0.041170561809785074
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 46
Training loss: 0.040143450241770566 | Validation loss: 0.03838985987765863
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 47
Training loss: 0.03800150607052574 | Validation loss: 0.04004549053786075
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 48
Training loss: 0.03671635882598971 | Validation loss: 0.03537545478449459
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 49
Training loss: 0.0347628424257612 | Validation loss: 0.034168427686446005
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946]
------------------------------
Epoch: 50
Training loss: 0.03355114189009586 | Validation loss: 0.03320952699379583
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 51
Training loss: 0.034235434844085255 | Validation loss: 0.033966143739170736
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 52
Training loss: 0.03527097588709343 | Validation loss: 0.03463689532889202
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 53
Training loss: 0.036384562783925906 | Validation loss: 0.043450314378514224
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 54
Training loss: 0.037796340584864946 | Validation loss: 0.037529862032527415
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 55
Training loss: 0.040052222718638696 | Validation loss: 0.037125598194193
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 56
Training loss: 0.037771799428366476 | Validation loss: 0.03799022023722661
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 57
Training loss: 0.03619785493654089 | Validation loss: 0.037243562148867454
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 58
Training loss: 0.03466686871229106 | Validation loss: 0.036433838901266585
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 59
Training loss: 0.03298729962181652 | Validation loss: 0.03261748232375995
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953]
------------------------------
Epoch: 60
Training loss: 0.0318513594633775 | Validation loss: 0.03203852672492508
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853]
------------------------------
Epoch: 61
Training loss: 0.03245821607750144 | Validation loss: 0.03227031828456484
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853]
------------------------------
Epoch: 62
Training loss: 0.033735571010221586 | Validation loss: 0.036995998938131124
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853]
------------------------------
Epoch: 63
Training loss: 0.03462631959838455 | Validation loss: 0.03523477534475052
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853]
------------------------------
Epoch: 64
Training loss: 0.03623737948744991 | Validation loss: 0.03887613358355201
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853]
------------------------------
Epoch: 65
Training loss: 0.037692032123761855 | Validation loss: 0.0393562819314214
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853]
------------------------------
Epoch: 66
Training loss: 0.03629782138663659 | Validation loss: 0.03982825133850617
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853]
------------------------------
Epoch: 67
Training loss: 0.034753578830525045 | Validation loss: 0.035794618621930084
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853]
------------------------------
Epoch: 68
Training loss: 0.03335795502130705 | Validation loss: 0.03471514085653873
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853]
------------------------------
Epoch: 69
Training loss: 0.031571629029097346 | Validation loss: 0.03225022736541201
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853]
------------------------------
Epoch: 70
Training loss: 0.03022792704007405 | Validation loss: 0.03138879530824128
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 ]
------------------------------
Epoch: 71
Training loss: 0.03081579728044687 | Validation loss: 0.0321810659581581
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 ]
------------------------------
Epoch: 72
Training loss: 0.031975348947540394 | Validation loss: 0.03330272281196265
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 ]
------------------------------
Epoch: 73
Training loss: 0.03338813357143186 | Validation loss: 0.03498803695614359
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 ]
------------------------------
Epoch: 74
Training loss: 0.03510964766335417 | Validation loss: 0.03675926781663325
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 ]
------------------------------
Epoch: 75
Training loss: 0.03621827504931267 | Validation loss: 0.04031967806868848
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 ]
------------------------------
Epoch: 76
Training loss: 0.03502747318872405 | Validation loss: 0.03644606344138099
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 ]
------------------------------
Epoch: 77
Training loss: 0.033388999948923394 | Validation loss: 0.03493262910638499
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 ]
------------------------------
Epoch: 78
Training loss: 0.031588033860087336 | Validation loss: 0.03346493154500438
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 ]
------------------------------
Epoch: 79
Training loss: 0.030269897492477802 | Validation loss: 0.031828868301766636
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 ]
------------------------------
Epoch: 80
Training loss: 0.029260817702536978 | Validation loss: 0.030966343737281528
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634]
------------------------------
Epoch: 81
Training loss: 0.02962339904496637 | Validation loss: 0.03175661460686047
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634]
------------------------------
Epoch: 82
Training loss: 0.030601339510964654 | Validation loss: 0.03278937654784032
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634]
------------------------------
Epoch: 83
Training loss: 0.03195925210107587 | Validation loss: 0.034539599556772584
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634]
------------------------------
Epoch: 84
Training loss: 0.03342047210885432 | Validation loss: 0.03554835073254277
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634]
------------------------------
Epoch: 85
Training loss: 0.03485352619034128 | Validation loss: 0.03597379817866382
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634]
------------------------------
Epoch: 86
Training loss: 0.03383251438632343 | Validation loss: 0.03613145378278156
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634]
------------------------------
Epoch: 87
Training loss: 0.032122112861027806 | Validation loss: 0.03380987030841344
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634]
------------------------------
Epoch: 88
Training loss: 0.030471733455018208 | Validation loss: 0.03271121671835406
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634]
------------------------------
Epoch: 89
Training loss: 0.029372684216662125 | Validation loss: 0.031361686634476735
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634]
------------------------------
Epoch: 90
Training loss: 0.028345677224317873 | Validation loss: 0.03078324074931113
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324]
------------------------------
Epoch: 91
Training loss: 0.02855107771859717 | Validation loss: 0.03187606106223786
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324]
------------------------------
Epoch: 92
Training loss: 0.029404923101652443 | Validation loss: 0.03323598527473159
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324]
------------------------------
Epoch: 93
Training loss: 0.03089467578340234 | Validation loss: 0.03677618085711667
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324]
------------------------------
Epoch: 94
Training loss: 0.032298622542889566 | Validation loss: 0.03654344159846021
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324]
------------------------------
Epoch: 95
Training loss: 0.033855747049271065 | Validation loss: 0.03389050771144375
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324]
------------------------------
Epoch: 96
Training loss: 0.032686932765013416 | Validation loss: 0.03563051058424521
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324]
------------------------------
Epoch: 97
Training loss: 0.03117338846085637 | Validation loss: 0.036191234661810165
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324]
------------------------------
Epoch: 98
Training loss: 0.029448336268973162 | Validation loss: 0.03330194152298227
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324]
------------------------------
Epoch: 99
Training loss: 0.028292122928565002 | Validation loss: 0.03124773993560698
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324]
------------------------------
Epoch: 100
Training loss: 0.027255275193235184 | Validation loss: 0.030370980245679354
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 101
Training loss: 0.027862145766197874 | Validation loss: 0.031165265132038468
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 102
Training loss: 0.028608309474451043 | Validation loss: 0.03325484803490407
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 103
Training loss: 0.02942115707344955 | Validation loss: 0.03360139277755423
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 104
Training loss: 0.031027071798535606 | Validation loss: 0.035984445478668255
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 105
Training loss: 0.03268752752080941 | Validation loss: 0.04319226613218805
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 106
Training loss: 0.03163052153018281 | Validation loss: 0.033679972509894754
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 107
Training loss: 0.030197662804605747 | Validation loss: 0.03366826350215526
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 108
Training loss: 0.028440817642414313 | Validation loss: 0.03247853203684883
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 109
Training loss: 0.02726938468074118 | Validation loss: 0.031231511301830807
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098]
------------------------------
Epoch: 110
Training loss: 0.026660842424089923 | Validation loss: 0.030472978850645302
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 111
Training loss: 0.026930124313739225 | Validation loss: 0.030931837073799255
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 112
Training loss: 0.027782215082313953 | Validation loss: 0.03232833159576475
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 113
Training loss: 0.02897368960804856 | Validation loss: 0.03295310706196897
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 114
Training loss: 0.030728193830449398 | Validation loss: 0.034547940753729994
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 115
Training loss: 0.03180026859100028 | Validation loss: 0.03586612556096727
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 116
Training loss: 0.030403358757305217 | Validation loss: 0.036287696990885035
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 117
Training loss: 0.029338854772962747 | Validation loss: 0.033448813400701084
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 118
Training loss: 0.027786362097657277 | Validation loss: 0.03349754812640954
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 119
Training loss: 0.02651610360645843 | Validation loss: 0.03075202837982009
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298]
------------------------------
Epoch: 120
Training loss: 0.025863564623644444 | Validation loss: 0.030081888875075145
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189]
------------------------------
Epoch: 121
Training loss: 0.025841036838054013 | Validation loss: 0.030718148438737984
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189]
------------------------------
Epoch: 122
Training loss: 0.026887093637666597 | Validation loss: 0.03189418640449247
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189]
------------------------------
Epoch: 123
Training loss: 0.0282036214269419 | Validation loss: 0.033052585845961505
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189]
------------------------------
Epoch: 124
Training loss: 0.02943545867190293 | Validation loss: 0.03484091803068872
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189]
------------------------------
Epoch: 125
Training loss: 0.031106999646285622 | Validation loss: 0.03587077220127118
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189]
------------------------------
Epoch: 126
Training loss: 0.029798714455041127 | Validation loss: 0.03478194975945274
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189]
------------------------------
Epoch: 127
Training loss: 0.028319378263375713 | Validation loss: 0.03545742583320995
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189]
------------------------------
Epoch: 128
Training loss: 0.02707512585783568 | Validation loss: 0.031950104305833844
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189]
------------------------------
Epoch: 129
Training loss: 0.025894583061733468 | Validation loss: 0.03058983964960923
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189]
------------------------------
Epoch: 130
Training loss: 0.02535024630070818 | Validation loss: 0.030067410971145188
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741]
------------------------------
Epoch: 131
Training loss: 0.025085657799715454 | Validation loss: 0.03055939608278264
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741]
------------------------------
Epoch: 132
Training loss: 0.026115530444580447 | Validation loss: 0.03141266045745759
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741]
------------------------------
Epoch: 133
Training loss: 0.027371244338216392 | Validation loss: 0.03283754703217903
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741]
------------------------------
Epoch: 134
Training loss: 0.028676321570061176 | Validation loss: 0.0335505915624378
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741]
------------------------------
Epoch: 135
Training loss: 0.030171366977710716 | Validation loss: 0.036135951370264575
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741]
------------------------------
Epoch: 136
Training loss: 0.028916694708791834 | Validation loss: 0.03220553534616411
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741]
------------------------------
Epoch: 137
Training loss: 0.02758560871511082 | Validation loss: 0.034407987646692624
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741]
------------------------------
Epoch: 138
Training loss: 0.026476486200580417 | Validation loss: 0.032068598825913085
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741]
------------------------------
Epoch: 139
Training loss: 0.025373654041299962 | Validation loss: 0.03088633716930594
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741]
------------------------------
Epoch: 140
Training loss: 0.02446029111412977 | Validation loss: 0.029658473261623783
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 141
Training loss: 0.024613056748951454 | Validation loss: 0.030765600189302876
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 142
Training loss: 0.02563061149273889 | Validation loss: 0.031055306257531706
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 143
Training loss: 0.026793520601546963 | Validation loss: 0.03368187746016589
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 144
Training loss: 0.028121095472455435 | Validation loss: 0.034668188031136464
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 145
Training loss: 0.02954825781392095 | Validation loss: 0.036907955427217275
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 146
Training loss: 0.02867951834992337 | Validation loss: 0.03384090841343972
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 147
Training loss: 0.027033594139030306 | Validation loss: 0.03186083252055455
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 148
Training loss: 0.02586826024298358 | Validation loss: 0.03187882715621881
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 149
Training loss: 0.02469652157055038 | Validation loss: 0.030002151368663903
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847]
------------------------------
Epoch: 150
Training loss: 0.024060468244417682 | Validation loss: 0.02966953616226669
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 151
Training loss: 0.02407615681684862 | Validation loss: 0.030171342457817718
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 152
Training loss: 0.025213620124942087 | Validation loss: 0.03115276776386046
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 153
Training loss: 0.02618761574271993 | Validation loss: 0.03224841546498041
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 154
Training loss: 0.02742909563532994 | Validation loss: 0.033071257239949386
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 155
Training loss: 0.028960566331566438 | Validation loss: 0.03520545495294892
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 156
Training loss: 0.027878794489578734 | Validation loss: 0.03331690206629249
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 157
Training loss: 0.026770222055777088 | Validation loss: 0.03253524844427552
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 158
Training loss: 0.025255439297926768 | Validation loss: 0.031415996751624396
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 159
Training loss: 0.023955525235478274 | Validation loss: 0.030671831624236253
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954]
------------------------------
Epoch: 160
Training loss: 0.023950381831936977 | Validation loss: 0.02982574759001753
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 161
Training loss: 0.023566104945594694 | Validation loss: 0.030525477438242035
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 162
Training loss: 0.02436839387015124 | Validation loss: 0.03077274331806508
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 163
Training loss: 0.025670240620356492 | Validation loss: 0.03336958942390912
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 164
Training loss: 0.026950350978162403 | Validation loss: 0.03207134384563012
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 165
Training loss: 0.02825807590371247 | Validation loss: 0.03577499762507139
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 166
Training loss: 0.027466312799727412 | Validation loss: 0.034949956346402127
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 167
Training loss: 0.025565710374315483 | Validation loss: 0.03352450120336978
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 168
Training loss: 0.024536826447987065 | Validation loss: 0.030833024449184933
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 169
Training loss: 0.023544609255164745 | Validation loss: 0.030002048099001425
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575]
------------------------------
Epoch: 170
Training loss: 0.02321960965692117 | Validation loss: 0.02953936348347801
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 171
Training loss: 0.02307436841917701 | Validation loss: 0.030252535151630903
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 172
Training loss: 0.02397868466677188 | Validation loss: 0.031158533524227355
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 173
Training loss: 0.025020400550781096 | Validation loss: 0.032842995162097224
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 174
Training loss: 0.026263095045693248 | Validation loss: 0.033730272541597356
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 175
Training loss: 0.02800633968862404 | Validation loss: 0.03604198408733427
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 176
Training loss: 0.02651089855550429 | Validation loss: 0.03513341484410045
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 177
Training loss: 0.02560398147412114 | Validation loss: 0.0321963859095642
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 178
Training loss: 0.02422986511053063 | Validation loss: 0.03157056310106959
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 179
Training loss: 0.023086803899271282 | Validation loss: 0.030515983684268673
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936]
------------------------------
Epoch: 180
Training loss: 0.022762703376558176 | Validation loss: 0.02935167956701686
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168]
------------------------------
Epoch: 181
Training loss: 0.022699793172343892 | Validation loss: 0.030329733221602123
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168]
------------------------------
Epoch: 182
Training loss: 0.023518949007344116 | Validation loss: 0.030502196112894907
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168]
------------------------------
Epoch: 183
Training loss: 0.024388733594935007 | Validation loss: 0.03174259513616562
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168]
------------------------------
Epoch: 184
Training loss: 0.025955424753135056 | Validation loss: 0.03257236281096672
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168]
------------------------------
Epoch: 185
Training loss: 0.027296889776833297 | Validation loss: 0.03400973833899582
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168]
------------------------------
Epoch: 186
Training loss: 0.026026336777786627 | Validation loss: 0.03210558142282267
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168]
------------------------------
Epoch: 187
Training loss: 0.025013451092311834 | Validation loss: 0.03260836849170449
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168]
------------------------------
Epoch: 188
Training loss: 0.02365047061141199 | Validation loss: 0.03127120152544395
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168]
------------------------------
Epoch: 189
Training loss: 0.022748118945705432 | Validation loss: 0.030008847141160373
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168]
------------------------------
Epoch: 190
Training loss: 0.022331331235972623 | Validation loss: 0.029065387826248082
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168 0.02906539]
------------------------------
Epoch: 191
Training loss: 0.022117540770114994 | Validation loss: 0.029946955860452314
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168 0.02906539]
------------------------------
Epoch: 192
Training loss: 0.02293831821417392 | Validation loss: 0.030338736745504153
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168 0.02906539]
------------------------------
Epoch: 193
Training loss: 0.024056396900408526 | Validation loss: 0.03252197858935173
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168 0.02906539]
------------------------------
Epoch: 194
Training loss: 0.02526241676986408 | Validation loss: 0.032178846088988064
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168 0.02906539]
------------------------------
Epoch: 195
Training loss: 0.026862020887559148 | Validation loss: 0.0375595488214651
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168 0.02906539]
------------------------------
Epoch: 196
Training loss: 0.025642202743585772 | Validation loss: 0.03363440044850639
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168 0.02906539]
------------------------------
Epoch: 197
Training loss: 0.024161111101907804 | Validation loss: 0.032072389045054404
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168 0.02906539]
------------------------------
Epoch: 198
Training loss: 0.02327819517813623 | Validation loss: 0.030471804131448798
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168 0.02906539]
------------------------------
Epoch: 199
Training loss: 0.022183483882667863 | Validation loss: 0.02956509131905252
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168 0.02906539]
------------------------------
Epoch: 200
Training loss: 0.022008079754951313 | Validation loss: 0.029146107779074032
Validation loss (ends of cycles): [0.16685935 0.04502044 0.03929079 0.03624861 0.03444946 0.03320953
0.03203853 0.0313888 0.03096634 0.03078324 0.03037098 0.03047298
0.03008189 0.03006741 0.02965847 0.02966954 0.02982575 0.02953936
0.02935168 0.02906539 0.02914611]
--------------------------------------------------------------------------------
Seed: 16
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.2737787661369978 | Validation loss: 0.20023219089592453
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 1
Training loss: 0.10422682451216254 | Validation loss: 0.09987394712799419
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 2
Training loss: 0.08268818595468647 | Validation loss: 0.07145754689663912
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 3
Training loss: 0.07512161015041584 | Validation loss: 0.06633957702897292
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 4
Training loss: 0.07036189057893581 | Validation loss: 0.06764761305752054
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 5
Training loss: 0.06704740458366087 | Validation loss: 0.06609862109860487
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 6
Training loss: 0.06204092073965612 | Validation loss: 0.05567961194412371
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 7
Training loss: 0.05805403113504505 | Validation loss: 0.05421012262525284
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 8
Training loss: 0.054467795774208634 | Validation loss: 0.05056867451794379
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 9
Training loss: 0.05140738620965441 | Validation loss: 0.04763612818731144
Validation loss (ends of cycles): [0.20023219]
------------------------------
Epoch: 10
Training loss: 0.049082192368658746 | Validation loss: 0.04559225025298321
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 11
Training loss: 0.04975575218380495 | Validation loss: 0.046683357748310124
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 12
Training loss: 0.05101741386364823 | Validation loss: 0.047293581431154655
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 13
Training loss: 0.05192214983339444 | Validation loss: 0.0476417343206374
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 14
Training loss: 0.053264485360453216 | Validation loss: 0.05344951939068537
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 15
Training loss: 0.05388570320975827 | Validation loss: 0.05068364296009583
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 16
Training loss: 0.051310861563882024 | Validation loss: 0.04789036223912133
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 17
Training loss: 0.04877522815290222 | Validation loss: 0.051986201707504495
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 18
Training loss: 0.04632410562347885 | Validation loss: 0.043588970103754406
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 19
Training loss: 0.04381964350573894 | Validation loss: 0.042146987932841336
Validation loss (ends of cycles): [0.20023219 0.04559225]
------------------------------
Epoch: 20
Training loss: 0.04200194686500898 | Validation loss: 0.03990887502719343
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 21
Training loss: 0.04261076600862828 | Validation loss: 0.041349465662069554
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 22
Training loss: 0.044018229102994515 | Validation loss: 0.044328473145719124
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 23
Training loss: 0.04533218726970431 | Validation loss: 0.04635683298770305
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 24
Training loss: 0.04664624337194942 | Validation loss: 0.05748243245864864
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 25
Training loss: 0.04778335881456146 | Validation loss: 0.04696861471552237
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 26
Training loss: 0.045971616663544726 | Validation loss: 0.04174737498757586
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 27
Training loss: 0.04344583072655607 | Validation loss: 0.04394390559301967
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 28
Training loss: 0.04164245962000298 | Validation loss: 0.041117376507779135
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 29
Training loss: 0.0392682946013932 | Validation loss: 0.03982357207718676
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888]
------------------------------
Epoch: 30
Training loss: 0.037741293180110595 | Validation loss: 0.036664327384030394
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 31
Training loss: 0.038508784161572614 | Validation loss: 0.03875069599598646
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 32
Training loss: 0.03946622626353642 | Validation loss: 0.03949111793190241
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 33
Training loss: 0.04122776931929072 | Validation loss: 0.043168582302411045
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 34
Training loss: 0.04259670557505561 | Validation loss: 0.042725293262474304
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 35
Training loss: 0.04403580553411675 | Validation loss: 0.04463229184986743
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 36
Training loss: 0.04248637171447512 | Validation loss: 0.040492692583166394
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 37
Training loss: 0.040044852228116566 | Validation loss: 0.038837947511831214
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 38
Training loss: 0.0383974302965253 | Validation loss: 0.03937832881754215
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 39
Training loss: 0.03638932610473295 | Validation loss: 0.03644921438587186
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433]
------------------------------
Epoch: 40
Training loss: 0.03480823439413931 | Validation loss: 0.03473438378588288
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 41
Training loss: 0.03557032718246172 | Validation loss: 0.036001175196956746
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 42
Training loss: 0.036576151781840115 | Validation loss: 0.037461821863477206
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 43
Training loss: 0.038131684358195056 | Validation loss: 0.03910830907062092
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 44
Training loss: 0.03957182635905177 | Validation loss: 0.04135864841199554
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 45
Training loss: 0.04120923336925293 | Validation loss: 0.04687123353370523
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 46
Training loss: 0.03937091541890876 | Validation loss: 0.04366569883659878
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 47
Training loss: 0.03759949713465061 | Validation loss: 0.03793287259913915
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 48
Training loss: 0.035793748596228483 | Validation loss: 0.036690053769049394
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 49
Training loss: 0.034185784380318315 | Validation loss: 0.0347253294348453
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438]
------------------------------
Epoch: 50
Training loss: 0.03280941256061636 | Validation loss: 0.033420166544682155
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 51
Training loss: 0.03306701497768792 | Validation loss: 0.03450813103236456
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 52
Training loss: 0.03430615315089134 | Validation loss: 0.03600950290209952
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 53
Training loss: 0.03589855856720475 | Validation loss: 0.037268991961218086
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 54
Training loss: 0.037301842755762725 | Validation loss: 0.04395043955440015
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 55
Training loss: 0.038821042116592076 | Validation loss: 0.043963905704100574
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 56
Training loss: 0.03712832113071601 | Validation loss: 0.040459541430845194
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 57
Training loss: 0.035522612095920475 | Validation loss: 0.038693558564821706
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 58
Training loss: 0.03354250567352883 | Validation loss: 0.03509865964935944
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 59
Training loss: 0.03192239735774168 | Validation loss: 0.03395114307481367
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017]
------------------------------
Epoch: 60
Training loss: 0.030853263973295103 | Validation loss: 0.03248945271711698
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945]
------------------------------
Epoch: 61
Training loss: 0.03124623723130116 | Validation loss: 0.03348119809926875
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945]
------------------------------
Epoch: 62
Training loss: 0.032311980573159794 | Validation loss: 0.035267103158465
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945]
------------------------------
Epoch: 63
Training loss: 0.033898110482218406 | Validation loss: 0.03626691218696337
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945]
------------------------------
Epoch: 64
Training loss: 0.03541915445023869 | Validation loss: 0.03732870159819063
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945]
------------------------------
Epoch: 65
Training loss: 0.03680497418546418 | Validation loss: 0.04455761301570234
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945]
------------------------------
Epoch: 66
Training loss: 0.03531477025592714 | Validation loss: 0.0376130314873515
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945]
------------------------------
Epoch: 67
Training loss: 0.033475336373057656 | Validation loss: 0.03557608191715141
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945]
------------------------------
Epoch: 68
Training loss: 0.03201586188103153 | Validation loss: 0.03565459449536505
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945]
------------------------------
Epoch: 69
Training loss: 0.030342980738047247 | Validation loss: 0.033357611354605285
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945]
------------------------------
Epoch: 70
Training loss: 0.029202395655933445 | Validation loss: 0.03172996544600588
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997]
------------------------------
Epoch: 71
Training loss: 0.029675779442530213 | Validation loss: 0.03339542720499819
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997]
------------------------------
Epoch: 72
Training loss: 0.030540388210412613 | Validation loss: 0.03487979933059057
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997]
------------------------------
Epoch: 73
Training loss: 0.03213733445218669 | Validation loss: 0.03967391237187966
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997]
------------------------------
Epoch: 74
Training loss: 0.03372008166198919 | Validation loss: 0.04293370014468653
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997]
------------------------------
Epoch: 75
Training loss: 0.03520092460983176 | Validation loss: 0.04376732475594082
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997]
------------------------------
Epoch: 76
Training loss: 0.033938281122580405 | Validation loss: 0.03703245200810179
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997]
------------------------------
Epoch: 77
Training loss: 0.032296309855355934 | Validation loss: 0.03492208946067675
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997]
------------------------------
Epoch: 78
Training loss: 0.030508256989428554 | Validation loss: 0.035185562314844765
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997]
------------------------------
Epoch: 79
Training loss: 0.02909078923380369 | Validation loss: 0.03294597795249614
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997]
------------------------------
Epoch: 80
Training loss: 0.02803419754514104 | Validation loss: 0.031490433136973764
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 81
Training loss: 0.028328925535251483 | Validation loss: 0.032534939574852456
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 82
Training loss: 0.02926808329002096 | Validation loss: 0.03435256992326637
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 83
Training loss: 0.03089131600513145 | Validation loss: 0.035728449348063594
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 84
Training loss: 0.032449012219825185 | Validation loss: 0.037415493403322404
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 85
Training loss: 0.03403688801679848 | Validation loss: 0.04564265021642225
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 86
Training loss: 0.03234556234463697 | Validation loss: 0.041697077766324565
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 87
Training loss: 0.03099816353665298 | Validation loss: 0.03541089097384067
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 88
Training loss: 0.029276077776751296 | Validation loss: 0.03348477607576457
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 89
Training loss: 0.027866539251110628 | Validation loss: 0.032401630176907094
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043]
------------------------------
Epoch: 90
Training loss: 0.026741057555521214 | Validation loss: 0.030844259166480165
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 91
Training loss: 0.02716905342902636 | Validation loss: 0.03190479325377835
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 92
Training loss: 0.02814049190863615 | Validation loss: 0.03378076414552938
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 93
Training loss: 0.02952301814257512 | Validation loss: 0.034746238747529225
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 94
Training loss: 0.031075982355850008 | Validation loss: 0.03731691609837313
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 95
Training loss: 0.03271141126256905 | Validation loss: 0.060955908380251014
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 96
Training loss: 0.03134089423614869 | Validation loss: 0.03622718771045978
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 97
Training loss: 0.029479217560646514 | Validation loss: 0.033980404232851175
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 98
Training loss: 0.028381742420606315 | Validation loss: 0.03344887570865386
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 99
Training loss: 0.02674665158214854 | Validation loss: 0.03224219867250824
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426]
------------------------------
Epoch: 100
Training loss: 0.0260751176383493 | Validation loss: 0.030910984786078993
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 101
Training loss: 0.02635237188665123 | Validation loss: 0.03226050084242515
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 102
Training loss: 0.027261614670251004 | Validation loss: 0.033868492984798104
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 103
Training loss: 0.028737410438232358 | Validation loss: 0.03410222200033939
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 104
Training loss: 0.03006675193823258 | Validation loss: 0.056256531152577526
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 105
Training loss: 0.03191802492016912 | Validation loss: 0.048147553182413094
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 106
Training loss: 0.030185575500739314 | Validation loss: 0.03620794531683215
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 107
Training loss: 0.029039505783901146 | Validation loss: 0.03526321702958208
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 108
Training loss: 0.027365574273134134 | Validation loss: 0.03291359628690819
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 109
Training loss: 0.026011773122027224 | Validation loss: 0.03202740053555607
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098]
------------------------------
Epoch: 110
Training loss: 0.02524538153294826 | Validation loss: 0.030587680316406542
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 111
Training loss: 0.02533113995959293 | Validation loss: 0.032261734877803684
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 112
Training loss: 0.02640439592041515 | Validation loss: 0.03358038018696603
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 113
Training loss: 0.027467445776555834 | Validation loss: 0.03410669124429732
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 114
Training loss: 0.029203762646808518 | Validation loss: 0.03419871860703008
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 115
Training loss: 0.030872044258010316 | Validation loss: 0.035971489265165496
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 116
Training loss: 0.029417167569869968 | Validation loss: 0.03570391744605999
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 117
Training loss: 0.0279873248426185 | Validation loss: 0.03376021398544575
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 118
Training loss: 0.02645933153804421 | Validation loss: 0.032196724564826064
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 119
Training loss: 0.025299409457563062 | Validation loss: 0.03130617110569656
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768]
------------------------------
Epoch: 120
Training loss: 0.024414063379989834 | Validation loss: 0.030250687490060792
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069]
------------------------------
Epoch: 121
Training loss: 0.024591018528747188 | Validation loss: 0.03162372223538371
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069]
------------------------------
Epoch: 122
Training loss: 0.025364326536802092 | Validation loss: 0.03282762849621013
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069]
------------------------------
Epoch: 123
Training loss: 0.026572306790777784 | Validation loss: 0.03422165076058786
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069]
------------------------------
Epoch: 124
Training loss: 0.028306469948702 | Validation loss: 0.035084132006210564
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069]
------------------------------
Epoch: 125
Training loss: 0.030099466125898824 | Validation loss: 0.04039957774061281
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069]
------------------------------
Epoch: 126
Training loss: 0.028633888242546264 | Validation loss: 0.037205871458338424
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069]
------------------------------
Epoch: 127
Training loss: 0.027315277815246442 | Validation loss: 0.03901227530652443
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069]
------------------------------
Epoch: 128
Training loss: 0.025800537531113794 | Validation loss: 0.03346189024108174
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069]
------------------------------
Epoch: 129
Training loss: 0.024668992970021456 | Validation loss: 0.031561887114251085
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069]
------------------------------
Epoch: 130
Training loss: 0.02404350608699097 | Validation loss: 0.030343229462087683
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323]
------------------------------
Epoch: 131
Training loss: 0.02407458088888721 | Validation loss: 0.031308728204298336
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323]
------------------------------
Epoch: 132
Training loss: 0.024918176032104125 | Validation loss: 0.03360111465825971
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323]
------------------------------
Epoch: 133
Training loss: 0.026058742077104513 | Validation loss: 0.03443116572710265
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323]
------------------------------
Epoch: 134
Training loss: 0.027624107491203004 | Validation loss: 0.03449264597312539
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323]
------------------------------
Epoch: 135
Training loss: 0.02928206292494369 | Validation loss: 0.03776389098338849
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323]
------------------------------
Epoch: 136
Training loss: 0.028131398687681813 | Validation loss: 0.03479092995083965
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323]
------------------------------
Epoch: 137
Training loss: 0.026286100760637598 | Validation loss: 0.0348371911167571
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323]
------------------------------
Epoch: 138
Training loss: 0.0250626021893475 | Validation loss: 0.03248374907514163
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323]
------------------------------
Epoch: 139
Training loss: 0.024081416701252654 | Validation loss: 0.03174565414345897
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323]
------------------------------
Epoch: 140
Training loss: 0.02352582935544508 | Validation loss: 0.030051779883823036
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 141
Training loss: 0.023359468449677127 | Validation loss: 0.0312756019174657
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 142
Training loss: 0.02415206788408416 | Validation loss: 0.03251146805365529
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 143
Training loss: 0.02550879377490423 | Validation loss: 0.03490573369724825
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 144
Training loss: 0.026806120814821557 | Validation loss: 0.0345633335873089
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 145
Training loss: 0.0286723116860087 | Validation loss: 0.03890983096892591
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 146
Training loss: 0.027140730675713227 | Validation loss: 0.03415699658371442
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 147
Training loss: 0.02588838122154432 | Validation loss: 0.03404608176015647
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 148
Training loss: 0.024426499718074195 | Validation loss: 0.03193692797053177
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 149
Training loss: 0.023166143655314983 | Validation loss: 0.03149128182733481
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178]
------------------------------
Epoch: 150
Training loss: 0.0229603738123441 | Validation loss: 0.029910476128282272
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 151
Training loss: 0.02284284436792927 | Validation loss: 0.030855711153914445
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 152
Training loss: 0.02365900123194887 | Validation loss: 0.03222047771632144
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 153
Training loss: 0.024800017015003843 | Validation loss: 0.03301934850921409
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 154
Training loss: 0.026273147880437043 | Validation loss: 0.03420751300368425
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 155
Training loss: 0.027664226583410494 | Validation loss: 0.0349949984110694
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 156
Training loss: 0.026663722442444707 | Validation loss: 0.033630230348656136
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 157
Training loss: 0.025194934456550525 | Validation loss: 0.03472642444766465
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 158
Training loss: 0.023837915450283215 | Validation loss: 0.032156074790498326
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 159
Training loss: 0.023005821732950845 | Validation loss: 0.031500728331110645
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048]
------------------------------
Epoch: 160
Training loss: 0.02235974420061863 | Validation loss: 0.0296877749115888
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 161
Training loss: 0.022434936120911818 | Validation loss: 0.030912983049927033
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 162
Training loss: 0.02321441728148727 | Validation loss: 0.03159520993487234
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 163
Training loss: 0.024087414784378952 | Validation loss: 0.03426515600289655
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 164
Training loss: 0.02584073524720468 | Validation loss: 0.03549671795351052
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 165
Training loss: 0.027481816246130275 | Validation loss: 0.04569036421258893
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 166
Training loss: 0.025911257378610334 | Validation loss: 0.03599856464208755
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 167
Training loss: 0.0245359625546335 | Validation loss: 0.03235892685278591
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 168
Training loss: 0.02326479062573676 | Validation loss: 0.03178735901560403
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 169
Training loss: 0.02218883088652382 | Validation loss: 0.03116621243544912
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777]
------------------------------
Epoch: 170
Training loss: 0.021824004793609308 | Validation loss: 0.029547290860024174
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 171
Training loss: 0.021793495756885637 | Validation loss: 0.0311219496570066
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 172
Training loss: 0.022444999180954155 | Validation loss: 0.03314602608273251
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 173
Training loss: 0.023874418106018088 | Validation loss: 0.033780165544125884
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 174
Training loss: 0.025131154348927102 | Validation loss: 0.03473723620440053
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 175
Training loss: 0.0267372918269399 | Validation loss: 0.03518312112881546
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 176
Training loss: 0.02542390567012219 | Validation loss: 0.03591320638965189
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 177
Training loss: 0.02414658128038254 | Validation loss: 0.03431402006705778
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 178
Training loss: 0.02274914544788886 | Validation loss: 0.03193719099910386
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 179
Training loss: 0.021763978673577836 | Validation loss: 0.030923344120712935
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729]
------------------------------
Epoch: 180
Training loss: 0.02152854327009771 | Validation loss: 0.029603901574701334
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 ]
------------------------------
Epoch: 181
Training loss: 0.021365951617912014 | Validation loss: 0.03074364638598883
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 ]
------------------------------
Epoch: 182
Training loss: 0.021945311958300257 | Validation loss: 0.035094125188095904
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 ]
------------------------------
Epoch: 183
Training loss: 0.02318344924848263 | Validation loss: 0.03233091902416364
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 ]
------------------------------
Epoch: 184
Training loss: 0.0248239246356543 | Validation loss: 0.03261617678258799
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 ]
------------------------------
Epoch: 185
Training loss: 0.026298621777577077 | Validation loss: 0.03375745768271453
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 ]
------------------------------
Epoch: 186
Training loss: 0.024853772596514775 | Validation loss: 0.03509717840965079
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 ]
------------------------------
Epoch: 187
Training loss: 0.023714199431211225 | Validation loss: 0.03329228342766256
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 ]
------------------------------
Epoch: 188
Training loss: 0.022584474363224947 | Validation loss: 0.032880636723299994
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 ]
------------------------------
Epoch: 189
Training loss: 0.02153710593188944 | Validation loss: 0.03080118345343961
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 ]
------------------------------
Epoch: 190
Training loss: 0.02122366759563085 | Validation loss: 0.029605589848242502
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 0.02960559]
------------------------------
Epoch: 191
Training loss: 0.020808289958692088 | Validation loss: 0.030839143395094217
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 0.02960559]
------------------------------
Epoch: 192
Training loss: 0.021608949259407526 | Validation loss: 0.03275987589979066
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 0.02960559]
------------------------------
Epoch: 193
Training loss: 0.022862843535730513 | Validation loss: 0.03243703589634558
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 0.02960559]
------------------------------
Epoch: 194
Training loss: 0.024137301308168903 | Validation loss: 0.034653909075840386
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 0.02960559]
------------------------------
Epoch: 195
Training loss: 0.02555413278989406 | Validation loss: 0.03679713180733729
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 0.02960559]
------------------------------
Epoch: 196
Training loss: 0.024435840169422504 | Validation loss: 0.03782723306686477
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 0.02960559]
------------------------------
Epoch: 197
Training loss: 0.02327967907932741 | Validation loss: 0.03321316239910316
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 0.02960559]
------------------------------
Epoch: 198
Training loss: 0.022098688396815884 | Validation loss: 0.03147691657222741
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 0.02960559]
------------------------------
Epoch: 199
Training loss: 0.02113150341131937 | Validation loss: 0.03040765568745875
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 0.02960559]
------------------------------
Epoch: 200
Training loss: 0.020685331293704413 | Validation loss: 0.029657005612631286
Validation loss (ends of cycles): [0.20023219 0.04559225 0.03990888 0.03666433 0.03473438 0.03342017
0.03248945 0.03172997 0.03149043 0.03084426 0.03091098 0.03058768
0.03025069 0.03034323 0.03005178 0.02991048 0.02968777 0.02954729
0.0296039 0.02960559 0.02965701]
Early stopping!
--------------------------------------------------------------------------------
Seed: 17
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.19528153301839987 | Validation loss: 0.15108045474090406
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 1
Training loss: 0.09630404248481661 | Validation loss: 0.07499229344250881
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 2
Training loss: 0.0770322958220119 | Validation loss: 0.0657210370476267
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 3
Training loss: 0.07153833540721144 | Validation loss: 0.06682331098523815
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 4
Training loss: 0.06794721161301329 | Validation loss: 0.058392948561669454
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 5
Training loss: 0.06519884280823758 | Validation loss: 0.05638599817731739
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 6
Training loss: 0.06049888590204082 | Validation loss: 0.062485484785474506
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 7
Training loss: 0.056857929563179144 | Validation loss: 0.04876981789361587
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 8
Training loss: 0.053296953524044885 | Validation loss: 0.04630405498685035
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 9
Training loss: 0.05026454213588667 | Validation loss: 0.04334998397832423
Validation loss (ends of cycles): [0.15108045]
------------------------------
Epoch: 10
Training loss: 0.04759277868282607 | Validation loss: 0.04201392273921355
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 11
Training loss: 0.04893287182528907 | Validation loss: 0.042212111397390874
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 12
Training loss: 0.0500142074952738 | Validation loss: 0.04350175510729309
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 13
Training loss: 0.05130318975384076 | Validation loss: 0.04694888262753993
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 14
Training loss: 0.05227278390583971 | Validation loss: 0.048200364991099434
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 15
Training loss: 0.05314655587678467 | Validation loss: 0.05896381667889325
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 16
Training loss: 0.05050081228030714 | Validation loss: 0.04554605564778357
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 17
Training loss: 0.048094492702649566 | Validation loss: 0.0431616822735662
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 18
Training loss: 0.04548985188584921 | Validation loss: 0.04048775626033281
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 19
Training loss: 0.04332584545280286 | Validation loss: 0.03895747074774936
Validation loss (ends of cycles): [0.15108045 0.04201392]
------------------------------
Epoch: 20
Training loss: 0.04135821033525831 | Validation loss: 0.037356803407975
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 21
Training loss: 0.04213623469237121 | Validation loss: 0.038663210462679905
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 22
Training loss: 0.04348462626730334 | Validation loss: 0.04059584674516083
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 23
Training loss: 0.044896001835889 | Validation loss: 0.0408192587175728
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 24
Training loss: 0.04613425673396275 | Validation loss: 0.040819176052392055
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 25
Training loss: 0.04764102466081775 | Validation loss: 0.04785382432813665
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 26
Training loss: 0.04551267926581204 | Validation loss: 0.04141203267911894
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 27
Training loss: 0.04350365237809542 | Validation loss: 0.038893470408774056
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 28
Training loss: 0.041088982838147325 | Validation loss: 0.03846450380783165
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 29
Training loss: 0.039170486238367094 | Validation loss: 0.035727420409696294
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 ]
------------------------------
Epoch: 30
Training loss: 0.03741839054172609 | Validation loss: 0.03534357691496874
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358]
------------------------------
Epoch: 31
Training loss: 0.038331123063428785 | Validation loss: 0.03712475580056157
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358]
------------------------------
Epoch: 32
Training loss: 0.03958883121752686 | Validation loss: 0.03590556875332794
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358]
------------------------------
Epoch: 33
Training loss: 0.04092563144536322 | Validation loss: 0.03797215149136244
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358]
------------------------------
Epoch: 34
Training loss: 0.042431336150167316 | Validation loss: 0.03862651510048756
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358]
------------------------------
Epoch: 35
Training loss: 0.04385021059612531 | Validation loss: 0.04650221689216859
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358]
------------------------------
Epoch: 36
Training loss: 0.042333089369789176 | Validation loss: 0.04138898033549828
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358]
------------------------------
Epoch: 37
Training loss: 0.040143472958341475 | Validation loss: 0.04083230859080775
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358]
------------------------------
Epoch: 38
Training loss: 0.038212460662810296 | Validation loss: 0.03604175413307627
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358]
------------------------------
Epoch: 39
Training loss: 0.03621550740795137 | Validation loss: 0.03490787063750018
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358]
------------------------------
Epoch: 40
Training loss: 0.0347418464531418 | Validation loss: 0.033902818650270984
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282]
------------------------------
Epoch: 41
Training loss: 0.03551816663844144 | Validation loss: 0.0339503469918154
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282]
------------------------------
Epoch: 42
Training loss: 0.03664073076623484 | Validation loss: 0.036111429589182405
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282]
------------------------------
Epoch: 43
Training loss: 0.03794130406940106 | Validation loss: 0.038581867809448625
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282]
------------------------------
Epoch: 44
Training loss: 0.039544879462511284 | Validation loss: 0.03753844731017551
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282]
------------------------------
Epoch: 45
Training loss: 0.04120968023817399 | Validation loss: 0.040164445156017234
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282]
------------------------------
Epoch: 46
Training loss: 0.03963290285832417 | Validation loss: 0.03767492653455882
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282]
------------------------------
Epoch: 47
Training loss: 0.037655649592139295 | Validation loss: 0.03940686863739934
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282]
------------------------------
Epoch: 48
Training loss: 0.0360215248523544 | Validation loss: 0.03606964662605155
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282]
------------------------------
Epoch: 49
Training loss: 0.03379709184294435 | Validation loss: 0.03300323327426362
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282]
------------------------------
Epoch: 50
Training loss: 0.032713453326645624 | Validation loss: 0.032592986331247124
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 51
Training loss: 0.03327064899779035 | Validation loss: 0.0332814001074407
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 52
Training loss: 0.03456818787784382 | Validation loss: 0.03814794470976412
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 53
Training loss: 0.035899267839011186 | Validation loss: 0.03423762608286554
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 54
Training loss: 0.03718895989829513 | Validation loss: 0.037633575235320404
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 55
Training loss: 0.038761256454241146 | Validation loss: 0.0388596230145313
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 56
Training loss: 0.03721329200770852 | Validation loss: 0.03893185608023036
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 57
Training loss: 0.035715234900392065 | Validation loss: 0.03628284927556472
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 58
Training loss: 0.03370676121645145 | Validation loss: 0.033211706354554775
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 59
Training loss: 0.0323047479999611 | Validation loss: 0.03253601488978725
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299]
------------------------------
Epoch: 60
Training loss: 0.030942986885225034 | Validation loss: 0.03169571242369382
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571]
------------------------------
Epoch: 61
Training loss: 0.03151171489281389 | Validation loss: 0.03195847763754098
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571]
------------------------------
Epoch: 62
Training loss: 0.0326535613743559 | Validation loss: 0.03367336982020498
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571]
------------------------------
Epoch: 63
Training loss: 0.03397060671223577 | Validation loss: 0.0392299928265599
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571]
------------------------------
Epoch: 64
Training loss: 0.03557851326406208 | Validation loss: 0.036114410315928734
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571]
------------------------------
Epoch: 65
Training loss: 0.036995837769258445 | Validation loss: 0.046228273697527106
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571]
------------------------------
Epoch: 66
Training loss: 0.03573583279219346 | Validation loss: 0.035999696652314304
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571]
------------------------------
Epoch: 67
Training loss: 0.03398162927240221 | Validation loss: 0.033870757285472564
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571]
------------------------------
Epoch: 68
Training loss: 0.032125317916919395 | Validation loss: 0.03275708983594601
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571]
------------------------------
Epoch: 69
Training loss: 0.030659903030600545 | Validation loss: 0.030907584725162095
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571]
------------------------------
Epoch: 70
Training loss: 0.029677720091445006 | Validation loss: 0.030892113479167486
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211]
------------------------------
Epoch: 71
Training loss: 0.030018180326419317 | Validation loss: 0.030626152551411528
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211]
------------------------------
Epoch: 72
Training loss: 0.03106435915953883 | Validation loss: 0.033222900274976166
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211]
------------------------------
Epoch: 73
Training loss: 0.032380191656929534 | Validation loss: 0.03328017588803726
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211]
------------------------------
Epoch: 74
Training loss: 0.03383174836525591 | Validation loss: 0.03539824518745979
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211]
------------------------------
Epoch: 75
Training loss: 0.03549576116777135 | Validation loss: 0.03629769839807949
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211]
------------------------------
Epoch: 76
Training loss: 0.034087463657877695 | Validation loss: 0.03376856591321726
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211]
------------------------------
Epoch: 77
Training loss: 0.03234960998346164 | Validation loss: 0.03851411953172852
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211]
------------------------------
Epoch: 78
Training loss: 0.03068069219369236 | Validation loss: 0.0314846167884833
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211]
------------------------------
Epoch: 79
Training loss: 0.029369526592222608 | Validation loss: 0.030833860068062765
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211]
------------------------------
Epoch: 80
Training loss: 0.02836777383780591 | Validation loss: 0.030553860598103662
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 81
Training loss: 0.028889779986879662 | Validation loss: 0.031743324707896835
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 82
Training loss: 0.029690888467406137 | Validation loss: 0.031142891442353746
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 83
Training loss: 0.031053277377889852 | Validation loss: 0.03351418073165469
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 84
Training loss: 0.03267707209850801 | Validation loss: 0.042135526879435094
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 85
Training loss: 0.03411234047458192 | Validation loss: 0.0370925584132165
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 86
Training loss: 0.03266451155994175 | Validation loss: 0.03274107290909881
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 87
Training loss: 0.031198181013034027 | Validation loss: 0.0340254512013851
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 88
Training loss: 0.029595627138706466 | Validation loss: 0.031916605308651924
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 89
Training loss: 0.028043366407638225 | Validation loss: 0.030369010976458017
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386]
------------------------------
Epoch: 90
Training loss: 0.027346802133824823 | Validation loss: 0.030075445589897908
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 91
Training loss: 0.027608434050013935 | Validation loss: 0.030171571629105415
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 92
Training loss: 0.028327987452338294 | Validation loss: 0.03024076547605538
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 93
Training loss: 0.029624219871984107 | Validation loss: 0.03404002234471583
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 94
Training loss: 0.03148827917679528 | Validation loss: 0.03371854062167417
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 95
Training loss: 0.03307377927529618 | Validation loss: 0.04036632132411531
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 96
Training loss: 0.031799388683665046 | Validation loss: 0.03298156123311646
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 97
Training loss: 0.03007377838939575 | Validation loss: 0.033205109755549814
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 98
Training loss: 0.028580836875650183 | Validation loss: 0.030641627497971058
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 99
Training loss: 0.027427082016008106 | Validation loss: 0.0313515687625097
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545]
------------------------------
Epoch: 100
Training loss: 0.026483976918338672 | Validation loss: 0.030348778084184215
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 101
Training loss: 0.026674966022599576 | Validation loss: 0.03149610185023166
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 102
Training loss: 0.02751217969953574 | Validation loss: 0.03084385859887157
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 103
Training loss: 0.028731230297125876 | Validation loss: 0.031040501080255592
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 104
Training loss: 0.030062061833496405 | Validation loss: 0.03604654427123281
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 105
Training loss: 0.0318882070439611 | Validation loss: 0.039417957194742906
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 106
Training loss: 0.030514564989391334 | Validation loss: 0.03316041232144411
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 107
Training loss: 0.028916336958091267 | Validation loss: 0.035398528771062865
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 108
Training loss: 0.02764455142969955 | Validation loss: 0.031008046623154553
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 109
Training loss: 0.02622884780894525 | Validation loss: 0.031359491927499795
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878]
------------------------------
Epoch: 110
Training loss: 0.02562284080117325 | Validation loss: 0.030416654530022524
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 111
Training loss: 0.02577647100859065 | Validation loss: 0.03090014162811294
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 112
Training loss: 0.026568011741894555 | Validation loss: 0.030865917913615704
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 113
Training loss: 0.028003387864473768 | Validation loss: 0.03234168109937315
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 114
Training loss: 0.029407237495999695 | Validation loss: 0.032332020902396306
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 115
Training loss: 0.03110336661650469 | Validation loss: 0.03286148103333152
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 116
Training loss: 0.02993302761877733 | Validation loss: 0.035776853462499855
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 117
Training loss: 0.02837571760045555 | Validation loss: 0.03131979187966975
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 118
Training loss: 0.02665195077214038 | Validation loss: 0.031021781488264028
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 119
Training loss: 0.02563444459370858 | Validation loss: 0.030956934082560835
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665]
------------------------------
Epoch: 120
Training loss: 0.024944440375529522 | Validation loss: 0.030078876598746376
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888]
------------------------------
Epoch: 121
Training loss: 0.02519254589051844 | Validation loss: 0.03037429168259939
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888]
------------------------------
Epoch: 122
Training loss: 0.02574926438213392 | Validation loss: 0.030591894485122336
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888]
------------------------------
Epoch: 123
Training loss: 0.027375541950070012 | Validation loss: 0.03259252623316988
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888]
------------------------------
Epoch: 124
Training loss: 0.028651660226659454 | Validation loss: 0.03243902798709089
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888]
------------------------------
Epoch: 125
Training loss: 0.030220652338278694 | Validation loss: 0.03472108380720679
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888]
------------------------------
Epoch: 126
Training loss: 0.02893120789346762 | Validation loss: 0.0334128510635511
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888]
------------------------------
Epoch: 127
Training loss: 0.02742837137619664 | Validation loss: 0.03295834002340526
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888]
------------------------------
Epoch: 128
Training loss: 0.02586745542719755 | Validation loss: 0.03359922730421598
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888]
------------------------------
Epoch: 129
Training loss: 0.02518866923601022 | Validation loss: 0.030972652326840742
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888]
------------------------------
Epoch: 130
Training loss: 0.024370399135069585 | Validation loss: 0.029321946928986407
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195]
------------------------------
Epoch: 131
Training loss: 0.024452643489142455 | Validation loss: 0.03170669283223363
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195]
------------------------------
Epoch: 132
Training loss: 0.02514903414646149 | Validation loss: 0.02934865020545183
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195]
------------------------------
Epoch: 133
Training loss: 0.026473972576076355 | Validation loss: 0.03256165103541803
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195]
------------------------------
Epoch: 134
Training loss: 0.027980016625126985 | Validation loss: 0.037934470789886154
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195]
------------------------------
Epoch: 135
Training loss: 0.02950241145545866 | Validation loss: 0.033272428256748
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195]
------------------------------
Epoch: 136
Training loss: 0.02817217055726503 | Validation loss: 0.03544298366569312
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195]
------------------------------
Epoch: 137
Training loss: 0.02676052859971505 | Validation loss: 0.03308902620708784
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195]
------------------------------
Epoch: 138
Training loss: 0.025434287322692456 | Validation loss: 0.0314219071554531
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195]
------------------------------
Epoch: 139
Training loss: 0.024347204041271286 | Validation loss: 0.03068882338208171
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195]
------------------------------
Epoch: 140
Training loss: 0.023864611201391623 | Validation loss: 0.029601984030970956
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 141
Training loss: 0.023795595388918175 | Validation loss: 0.032649262017051206
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 142
Training loss: 0.0245595584966345 | Validation loss: 0.03269626537288448
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 143
Training loss: 0.025921545380095796 | Validation loss: 0.03559081084136151
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 144
Training loss: 0.02733462275735535 | Validation loss: 0.033264496363699436
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 145
Training loss: 0.029037256825964634 | Validation loss: 0.0364198326833744
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 146
Training loss: 0.027521122688808897 | Validation loss: 0.03114709215222207
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 147
Training loss: 0.026036638752777334 | Validation loss: 0.03610302006776354
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 148
Training loss: 0.024805288894769302 | Validation loss: 0.030778389053204947
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 149
Training loss: 0.023793241487610646 | Validation loss: 0.030905219281207673
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198]
------------------------------
Epoch: 150
Training loss: 0.02341242203824442 | Validation loss: 0.029345920012719864
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 151
Training loss: 0.023260569254919067 | Validation loss: 0.030717732623812898
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 152
Training loss: 0.023884564654637626 | Validation loss: 0.03010071575872402
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 153
Training loss: 0.025170819309547426 | Validation loss: 0.029870342207759356
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 154
Training loss: 0.026484384465463987 | Validation loss: 0.04114546122408546
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 155
Training loss: 0.028278595922830125 | Validation loss: 0.037228376563407675
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 156
Training loss: 0.026891657869023543 | Validation loss: 0.035947980639417614
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 157
Training loss: 0.025646816195707446 | Validation loss: 0.03072655048485087
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 158
Training loss: 0.024305647449003254 | Validation loss: 0.03094838262922996
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 159
Training loss: 0.0235144635589104 | Validation loss: 0.03033747435011695
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592]
------------------------------
Epoch: 160
Training loss: 0.02274166181863205 | Validation loss: 0.028855146320981788
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 161
Training loss: 0.022850188836418678 | Validation loss: 0.030827936905938966
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 162
Training loss: 0.023490403991056914 | Validation loss: 0.03056321562622237
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 163
Training loss: 0.024536157205634877 | Validation loss: 0.030726340179026656
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 164
Training loss: 0.025968177275233498 | Validation loss: 0.03182310807164264
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 165
Training loss: 0.02745464058156587 | Validation loss: 0.03240670689043745
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 166
Training loss: 0.02638791023944248 | Validation loss: 0.03207409606867396
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 167
Training loss: 0.025040658959004237 | Validation loss: 0.03346892037486608
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 168
Training loss: 0.023840111028586373 | Validation loss: 0.03250710043924308
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 169
Training loss: 0.022957657927676567 | Validation loss: 0.03261066463866592
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515]
------------------------------
Epoch: 170
Training loss: 0.02253573403536071 | Validation loss: 0.028775880929181534
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 171
Training loss: 0.022440487484990317 | Validation loss: 0.030062347749429466
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 172
Training loss: 0.022867488988315848 | Validation loss: 0.03976280561986223
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 173
Training loss: 0.02382546374629303 | Validation loss: 0.03142198853491418
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 174
Training loss: 0.025262574162004208 | Validation loss: 0.03314230741059358
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 175
Training loss: 0.02710123380965106 | Validation loss: 0.03711360927044818
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 176
Training loss: 0.025813494960220135 | Validation loss: 0.034733611777161076
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 177
Training loss: 0.0244471754999758 | Validation loss: 0.030967152215936017
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 178
Training loss: 0.023251139880598 | Validation loss: 0.03004695099275724
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 179
Training loss: 0.0224692894349344 | Validation loss: 0.03162622818542001
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588]
------------------------------
Epoch: 180
Training loss: 0.021959586049738068 | Validation loss: 0.0287618160874179
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
0.02876182]
------------------------------
Epoch: 181
Training loss: 0.021884380696766723 | Validation loss: 0.03256968013216964
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
0.02876182]
------------------------------
Epoch: 182
Training loss: 0.02237591364670281 | Validation loss: 0.03341920127416343
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
0.02876182]
------------------------------
Epoch: 183
Training loss: 0.023713711671179204 | Validation loss: 0.029729280975210454
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
0.02876182]
------------------------------
Epoch: 184
Training loss: 0.025079944019763194 | Validation loss: 0.03119673509051842
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
0.02876182]
------------------------------
Epoch: 185
Training loss: 0.02635635260866559 | Validation loss: 0.03225741138170778
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
0.02876182]
------------------------------
Epoch: 186
Training loss: 0.025441277809771085 | Validation loss: 0.03134904677394481
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
0.02876182]
------------------------------
Epoch: 187
Training loss: 0.024010141896208616 | Validation loss: 0.031274090043013604
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
0.02876182]
------------------------------
Epoch: 188
Training loss: 0.023025057314552264 | Validation loss: 0.03273896375193005
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
0.02876182]
------------------------------
Epoch: 189
Training loss: 0.022124788933800255 | Validation loss: 0.03352637900517578
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
0.02876182]
------------------------------
Epoch: 190
Training loss: 0.02186805220626984 | Validation loss: 0.029281317882767294
Validation loss (ends of cycles): [0.15108045 0.04201392 0.0373568 0.03534358 0.03390282 0.03259299
0.03169571 0.03089211 0.03055386 0.03007545 0.03034878 0.03041665
0.03007888 0.02932195 0.02960198 0.02934592 0.02885515 0.02877588
0.02876182 0.02928132]
Early stopping!
--------------------------------------------------------------------------------
Seed: 18
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.14745687956987755 | Validation loss: 0.11181736810017476
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 1
Training loss: 0.09623743342162352 | Validation loss: 0.08580441937773628
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 2
Training loss: 0.08750657575556964 | Validation loss: 0.08224849567740364
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 3
Training loss: 0.08265806757294991 | Validation loss: 0.07812433627195063
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 4
Training loss: 0.07909638743011618 | Validation loss: 0.10523036336608693
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 5
Training loss: 0.07519952726997728 | Validation loss: 0.07433472433646696
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 6
Training loss: 0.06963092798191145 | Validation loss: 0.0681168861621249
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 7
Training loss: 0.0647309234413487 | Validation loss: 0.061909659196977065
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 8
Training loss: 0.0603061709351339 | Validation loss: 0.06635272014985043
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 9
Training loss: 0.056970927845980006 | Validation loss: 0.053171325606846176
Validation loss (ends of cycles): [0.11181737]
------------------------------
Epoch: 10
Training loss: 0.054451444431934067 | Validation loss: 0.050428496311064316
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 11
Training loss: 0.05530370387989353 | Validation loss: 0.05998936325179792
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 12
Training loss: 0.0562868577292497 | Validation loss: 0.08638219957330585
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 13
Training loss: 0.0572645927277049 | Validation loss: 0.06002994229506075
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 14
Training loss: 0.05791082459604588 | Validation loss: 0.07916613048420543
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 15
Training loss: 0.058337017147633154 | Validation loss: 0.06136379625021884
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 16
Training loss: 0.0556387303206395 | Validation loss: 0.05869993445488204
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 17
Training loss: 0.05294741382895727 | Validation loss: 0.05184250018369835
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 18
Training loss: 0.05023024163912894 | Validation loss: 0.05074060479162541
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 19
Training loss: 0.047944747648517215 | Validation loss: 0.04994288424804675
Validation loss (ends of cycles): [0.11181737 0.0504285 ]
------------------------------
Epoch: 20
Training loss: 0.045913187161777315 | Validation loss: 0.04389677417621148
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677]
------------------------------
Epoch: 21
Training loss: 0.047116746218493724 | Validation loss: 0.04913257856943966
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677]
------------------------------
Epoch: 22
Training loss: 0.048278160490403614 | Validation loss: 0.04979463033707796
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677]
------------------------------
Epoch: 23
Training loss: 0.04970042983906006 | Validation loss: 0.05419276432191904
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677]
------------------------------
Epoch: 24
Training loss: 0.05063182357918677 | Validation loss: 0.05380100643146882
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677]
------------------------------
Epoch: 25
Training loss: 0.05186005225991757 | Validation loss: 0.05329234860178116
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677]
------------------------------
Epoch: 26
Training loss: 0.04985330287572436 | Validation loss: 0.054778525721182865
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677]
------------------------------
Epoch: 27
Training loss: 0.047700814294122804 | Validation loss: 0.04927220066959879
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677]
------------------------------
Epoch: 28
Training loss: 0.04578730350316275 | Validation loss: 0.04668996552318598
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677]
------------------------------
Epoch: 29
Training loss: 0.04332482842838494 | Validation loss: 0.04906922478261774
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677]
------------------------------
Epoch: 30
Training loss: 0.04196088215130873 | Validation loss: 0.04077261508302351
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262]
------------------------------
Epoch: 31
Training loss: 0.042725179914575744 | Validation loss: 0.04192748965810886
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262]
------------------------------
Epoch: 32
Training loss: 0.043890829696723324 | Validation loss: 0.0423654904506639
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262]
------------------------------
Epoch: 33
Training loss: 0.04479648191188499 | Validation loss: 0.045411624986909135
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262]
------------------------------
Epoch: 34
Training loss: 0.046417209704765885 | Validation loss: 0.04752046215982564
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262]
------------------------------
Epoch: 35
Training loss: 0.04789944863548194 | Validation loss: 0.04866404211626644
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262]
------------------------------
Epoch: 36
Training loss: 0.04611892869886686 | Validation loss: 0.049108236747901
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262]
------------------------------
Epoch: 37
Training loss: 0.044292686737186504 | Validation loss: 0.049626477540726154
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262]
------------------------------
Epoch: 38
Training loss: 0.042089813981043896 | Validation loss: 0.05028616577650594
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262]
------------------------------
Epoch: 39
Training loss: 0.040471516651336074 | Validation loss: 0.04539883885104044
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262]
------------------------------
Epoch: 40
Training loss: 0.038953275320772814 | Validation loss: 0.038260011332093086
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 41
Training loss: 0.039753880176700535 | Validation loss: 0.03953419919166945
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 42
Training loss: 0.04075378145267233 | Validation loss: 0.04775480268341777
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 43
Training loss: 0.04191550611716321 | Validation loss: 0.05315274405664047
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 44
Training loss: 0.04337615864333059 | Validation loss: 0.04678723331441922
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 45
Training loss: 0.0448561115373718 | Validation loss: 0.04518167654761171
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 46
Training loss: 0.04328001322276069 | Validation loss: 0.04366327006270928
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 47
Training loss: 0.04148955256348168 | Validation loss: 0.04564844665274156
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 48
Training loss: 0.03969575352251794 | Validation loss: 0.04270295615810736
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 49
Training loss: 0.03778248318328016 | Validation loss: 0.045216823161571426
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001]
------------------------------
Epoch: 50
Training loss: 0.036728900827020114 | Validation loss: 0.036400728900216325
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 51
Training loss: 0.037182638656318656 | Validation loss: 0.039920183492049705
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 52
Training loss: 0.03852524513958066 | Validation loss: 0.04021004818182076
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 53
Training loss: 0.039626261542045224 | Validation loss: 0.039501024509030105
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 54
Training loss: 0.04119911792644072 | Validation loss: 0.04457332704842618
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 55
Training loss: 0.042616045866968245 | Validation loss: 0.045998089163836124
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 56
Training loss: 0.04113453015039756 | Validation loss: 0.050729175304285196
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 57
Training loss: 0.03976550421266868 | Validation loss: 0.04352686356390472
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 58
Training loss: 0.03769710026296428 | Validation loss: 0.0416220211283823
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 59
Training loss: 0.03594528460933819 | Validation loss: 0.041781470377360824
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073]
------------------------------
Epoch: 60
Training loss: 0.034993118661507144 | Validation loss: 0.03542220634531922
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221]
------------------------------
Epoch: 61
Training loss: 0.03528238928649487 | Validation loss: 0.041782269765317966
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221]
------------------------------
Epoch: 62
Training loss: 0.036277946592565245 | Validation loss: 0.048255273973387955
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221]
------------------------------
Epoch: 63
Training loss: 0.037810007194570436 | Validation loss: 0.04529560640849899
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221]
------------------------------
Epoch: 64
Training loss: 0.039236139712349224 | Validation loss: 0.046266291455357475
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221]
------------------------------
Epoch: 65
Training loss: 0.040906800351318294 | Validation loss: 0.06313739067553419
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221]
------------------------------
Epoch: 66
Training loss: 0.03917124048407065 | Validation loss: 0.04793259138818336
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221]
------------------------------
Epoch: 67
Training loss: 0.037500780060205345 | Validation loss: 0.04579635729304457
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221]
------------------------------
Epoch: 68
Training loss: 0.0361412501221112 | Validation loss: 0.04605197491107789
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221]
------------------------------
Epoch: 69
Training loss: 0.03438679540280517 | Validation loss: 0.0456059823983011
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221]
------------------------------
Epoch: 70
Training loss: 0.033415974244314035 | Validation loss: 0.03462613776194311
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614]
------------------------------
Epoch: 71
Training loss: 0.03395316714611579 | Validation loss: 0.038469126250232216
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614]
------------------------------
Epoch: 72
Training loss: 0.03479183258353109 | Validation loss: 0.03594474876876426
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614]
------------------------------
Epoch: 73
Training loss: 0.03633535791401143 | Validation loss: 0.04774723141000862
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614]
------------------------------
Epoch: 74
Training loss: 0.0374829898621359 | Validation loss: 0.04101652198726625
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614]
------------------------------
Epoch: 75
Training loss: 0.0391945250370855 | Validation loss: 0.0401327273915563
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614]
------------------------------
Epoch: 76
Training loss: 0.03788902773430175 | Validation loss: 0.03995021490330717
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614]
------------------------------
Epoch: 77
Training loss: 0.03639131400176859 | Validation loss: 0.03722566730482916
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614]
------------------------------
Epoch: 78
Training loss: 0.0348304618996986 | Validation loss: 0.04006522394453002
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614]
------------------------------
Epoch: 79
Training loss: 0.03319978419834323 | Validation loss: 0.04412269613713817
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614]
------------------------------
Epoch: 80
Training loss: 0.03219999324468031 | Validation loss: 0.03391640984445019
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 81
Training loss: 0.032688427642244465 | Validation loss: 0.037923850531203555
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 82
Training loss: 0.03355343060890638 | Validation loss: 0.04907491460310674
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 83
Training loss: 0.03503366941531137 | Validation loss: 0.040808099231361285
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 84
Training loss: 0.036361888898023234 | Validation loss: 0.04570449341451172
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 85
Training loss: 0.037688679410159354 | Validation loss: 0.03851071792959639
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 86
Training loss: 0.036509045416225246 | Validation loss: 0.05178985225481797
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 87
Training loss: 0.03476176010243096 | Validation loss: 0.04965500327535963
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 88
Training loss: 0.033466790973274846 | Validation loss: 0.04419837694252487
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 89
Training loss: 0.03203875968768078 | Validation loss: 0.037252725571789574
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641]
------------------------------
Epoch: 90
Training loss: 0.0312534848193264 | Validation loss: 0.03305692382288718
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 91
Training loss: 0.031425257888072586 | Validation loss: 0.035670075104036164
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 92
Training loss: 0.032341907633189844 | Validation loss: 0.05314765029908281
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 93
Training loss: 0.03363316069544095 | Validation loss: 0.05042708106338978
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 94
Training loss: 0.03508961931472336 | Validation loss: 0.04901893657788766
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 95
Training loss: 0.036707183874146204 | Validation loss: 0.06835057373793252
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 96
Training loss: 0.0352418556319931 | Validation loss: 0.05205297728885064
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 97
Training loss: 0.03360738364271966 | Validation loss: 0.03979173940565206
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 98
Training loss: 0.032393003445426664 | Validation loss: 0.04202516406642652
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 99
Training loss: 0.030928450876729578 | Validation loss: 0.03461811653610352
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692]
------------------------------
Epoch: 100
Training loss: 0.030302010686503445 | Validation loss: 0.032624518037237954
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 101
Training loss: 0.030482625337610914 | Validation loss: 0.03657771112908304
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 102
Training loss: 0.031544560573877785 | Validation loss: 0.04379735982655424
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 103
Training loss: 0.03267700951289767 | Validation loss: 0.05044605840096431
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 104
Training loss: 0.033956850249151606 | Validation loss: 0.039384911364290565
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 105
Training loss: 0.035652619702228175 | Validation loss: 0.040526895561313205
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 106
Training loss: 0.0343283357183031 | Validation loss: 0.037462938774739746
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 107
Training loss: 0.0329232818115224 | Validation loss: 0.04072877669097048
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 108
Training loss: 0.03152846630963343 | Validation loss: 0.039494824257835875
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 109
Training loss: 0.030297402225574113 | Validation loss: 0.035708916085088145
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452]
------------------------------
Epoch: 110
Training loss: 0.02962929017907815 | Validation loss: 0.032125119894611094
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 111
Training loss: 0.029658222022855025 | Validation loss: 0.03462422362207312
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 112
Training loss: 0.030390083743992637 | Validation loss: 0.037167447627381944
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 113
Training loss: 0.03176091816716307 | Validation loss: 0.035366525972443344
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 114
Training loss: 0.0330082182680792 | Validation loss: 0.03707461589864925
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 115
Training loss: 0.03469608666642943 | Validation loss: 0.05098501968700274
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 116
Training loss: 0.033389072871664316 | Validation loss: 0.05167623942272853
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 117
Training loss: 0.0319534340987026 | Validation loss: 0.03803503140807152
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 118
Training loss: 0.03054207461651444 | Validation loss: 0.037278614163530614
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 119
Training loss: 0.029319434328642712 | Validation loss: 0.03323572590551545
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512]
------------------------------
Epoch: 120
Training loss: 0.028277252811762528 | Validation loss: 0.031856293105973606
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629]
------------------------------
Epoch: 121
Training loss: 0.028927232044396966 | Validation loss: 0.03381007744938926
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629]
------------------------------
Epoch: 122
Training loss: 0.029823541352488686 | Validation loss: 0.036562808286563483
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629]
------------------------------
Epoch: 123
Training loss: 0.030995890572410457 | Validation loss: 0.03735244338425387
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629]
------------------------------
Epoch: 124
Training loss: 0.03232222717754015 | Validation loss: 0.0393577231737101
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629]
------------------------------
Epoch: 125
Training loss: 0.033686123715024295 | Validation loss: 0.039191240626098835
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629]
------------------------------
Epoch: 126
Training loss: 0.0328642097200949 | Validation loss: 0.04119740207308689
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629]
------------------------------
Epoch: 127
Training loss: 0.03124009068563991 | Validation loss: 0.045967824147206494
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629]
------------------------------
Epoch: 128
Training loss: 0.029692287028833166 | Validation loss: 0.03530660837855751
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629]
------------------------------
Epoch: 129
Training loss: 0.028460362538380007 | Validation loss: 0.034659543915330306
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629]
------------------------------
Epoch: 130
Training loss: 0.02782241098296748 | Validation loss: 0.03151542620848766
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543]
------------------------------
Epoch: 131
Training loss: 0.028115404061203105 | Validation loss: 0.0364043337779235
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543]
------------------------------
Epoch: 132
Training loss: 0.028829176158563594 | Validation loss: 0.0347792377069065
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543]
------------------------------
Epoch: 133
Training loss: 0.030203928205727298 | Validation loss: 0.03419680051109959
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543]
------------------------------
Epoch: 134
Training loss: 0.03136087996760515 | Validation loss: 0.04011434100700163
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543]
------------------------------
Epoch: 135
Training loss: 0.033034575681429444 | Validation loss: 0.040510393481338974
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543]
------------------------------
Epoch: 136
Training loss: 0.03180332440336594 | Validation loss: 0.03888363813140751
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543]
------------------------------
Epoch: 137
Training loss: 0.030477852163667694 | Validation loss: 0.03862581931186461
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543]
------------------------------
Epoch: 138
Training loss: 0.0292505194876695 | Validation loss: 0.03288874167751158
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543]
------------------------------
Epoch: 139
Training loss: 0.027981118863727165 | Validation loss: 0.03388794129961623
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543]
------------------------------
Epoch: 140
Training loss: 0.027372297242386486 | Validation loss: 0.031212307529243748
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 141
Training loss: 0.0276586958958848 | Validation loss: 0.03246931614667441
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 142
Training loss: 0.028360963867808595 | Validation loss: 0.034810532453115536
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 143
Training loss: 0.029655160085393453 | Validation loss: 0.03974294517420034
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 144
Training loss: 0.030910991771168655 | Validation loss: 0.04399756938878414
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 145
Training loss: 0.0325270962652083 | Validation loss: 0.061640194658420785
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 146
Training loss: 0.03149095214683357 | Validation loss: 0.03760540243073375
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 147
Training loss: 0.029875266428039535 | Validation loss: 0.035697247552792584
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 148
Training loss: 0.028500897502783305 | Validation loss: 0.03381003710581402
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 149
Training loss: 0.027681679243045883 | Validation loss: 0.032980115103444695
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231]
------------------------------
Epoch: 150
Training loss: 0.026877311056509146 | Validation loss: 0.031143162226452765
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 151
Training loss: 0.026977618478832576 | Validation loss: 0.03266228110719044
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 152
Training loss: 0.027789666411818893 | Validation loss: 0.034961970028492204
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 153
Training loss: 0.02894545074526631 | Validation loss: 0.03532901065077929
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 154
Training loss: 0.030454562234464945 | Validation loss: 0.040225627527168364
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 155
Training loss: 0.03164946059806375 | Validation loss: 0.037975336828854234
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 156
Training loss: 0.03068030412000875 | Validation loss: 0.04220050962361614
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 157
Training loss: 0.029348198092527922 | Validation loss: 0.03511155945603299
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 158
Training loss: 0.028144483990769277 | Validation loss: 0.03515485296668732
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 159
Training loss: 0.027031416854432482 | Validation loss: 0.034209251535677276
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316]
------------------------------
Epoch: 160
Training loss: 0.026445907158373787 | Validation loss: 0.030806712303475467
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 161
Training loss: 0.026552394952801033 | Validation loss: 0.03321779652836576
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 162
Training loss: 0.027329204999108424 | Validation loss: 0.035172222956883166
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 163
Training loss: 0.028377699224813657 | Validation loss: 0.03415004107936294
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 164
Training loss: 0.02962424848998684 | Validation loss: 0.0380124523576382
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 165
Training loss: 0.031245342592386104 | Validation loss: 0.03870395737477636
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 166
Training loss: 0.03022429422295733 | Validation loss: 0.03497945312080921
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 167
Training loss: 0.02867190224232196 | Validation loss: 0.034943901974938615
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 168
Training loss: 0.02769064278008167 | Validation loss: 0.0328868226667421
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 169
Training loss: 0.026110646025255674 | Validation loss: 0.032032206835868084
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671]
------------------------------
Epoch: 170
Training loss: 0.025901495435964463 | Validation loss: 0.03086405956244047
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 171
Training loss: 0.025832738417959734 | Validation loss: 0.03191902321161686
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 172
Training loss: 0.02658675519811855 | Validation loss: 0.032392649120131956
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 173
Training loss: 0.02785930638549864 | Validation loss: 0.04055770080272866
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 174
Training loss: 0.029304441305676724 | Validation loss: 0.03938528715707032
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 175
Training loss: 0.030693826810126817 | Validation loss: 0.042542108787899525
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 176
Training loss: 0.02968439479967154 | Validation loss: 0.03487302249182114
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 177
Training loss: 0.028509433252010934 | Validation loss: 0.035466368128834046
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 178
Training loss: 0.027170329408786133 | Validation loss: 0.03241105497180097
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 179
Training loss: 0.026051353732871964 | Validation loss: 0.031891154834127
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406]
------------------------------
Epoch: 180
Training loss: 0.025475371672082546 | Validation loss: 0.030502695054541118
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 ]
------------------------------
Epoch: 181
Training loss: 0.02571635264591644 | Validation loss: 0.03273352388852993
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 ]
------------------------------
Epoch: 182
Training loss: 0.02636262705855761 | Validation loss: 0.03367737308968749
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 ]
------------------------------
Epoch: 183
Training loss: 0.02711716690467392 | Validation loss: 0.03316663689416858
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 ]
------------------------------
Epoch: 184
Training loss: 0.028923610236319737 | Validation loss: 0.03617072011451278
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 ]
------------------------------
Epoch: 185
Training loss: 0.029695096961955915 | Validation loss: 0.03914791269771821
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 ]
------------------------------
Epoch: 186
Training loss: 0.02894029467949629 | Validation loss: 0.033959697874312376
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 ]
------------------------------
Epoch: 187
Training loss: 0.02806392555055392 | Validation loss: 0.032832851015295074
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 ]
------------------------------
Epoch: 188
Training loss: 0.026699587195840348 | Validation loss: 0.03351602898663388
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 ]
------------------------------
Epoch: 189
Training loss: 0.02575078903052105 | Validation loss: 0.032145418597599576
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 ]
------------------------------
Epoch: 190
Training loss: 0.025277233614740293 | Validation loss: 0.030475220247377866
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 0.03047522]
------------------------------
Epoch: 191
Training loss: 0.02529354150293351 | Validation loss: 0.031475040253944105
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 0.03047522]
------------------------------
Epoch: 192
Training loss: 0.02586796419332024 | Validation loss: 0.03231935241811835
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 0.03047522]
------------------------------
Epoch: 193
Training loss: 0.026974914326904503 | Validation loss: 0.03522281436068294
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 0.03047522]
------------------------------
Epoch: 194
Training loss: 0.028522872728090294 | Validation loss: 0.03937122993896493
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 0.03047522]
------------------------------
Epoch: 195
Training loss: 0.029662396922876224 | Validation loss: 0.04080926054940287
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 0.03047522]
------------------------------
Epoch: 196
Training loss: 0.02874099444799624 | Validation loss: 0.03661016435815697
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 0.03047522]
------------------------------
Epoch: 197
Training loss: 0.027630861122601144 | Validation loss: 0.03896229999321225
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 0.03047522]
------------------------------
Epoch: 198
Training loss: 0.026198979643151515 | Validation loss: 0.03245780790900498
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 0.03047522]
------------------------------
Epoch: 199
Training loss: 0.025587171060399806 | Validation loss: 0.0329771255332548
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 0.03047522]
------------------------------
Epoch: 200
Training loss: 0.02489157276609399 | Validation loss: 0.030435980206965346
Validation loss (ends of cycles): [0.11181737 0.0504285 0.04389677 0.04077262 0.03826001 0.03640073
0.03542221 0.03462614 0.03391641 0.03305692 0.03262452 0.03212512
0.03185629 0.03151543 0.03121231 0.03114316 0.03080671 0.03086406
0.0305027 0.03047522 0.03043598]
--------------------------------------------------------------------------------
Seed: 19
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.13013751572364662 | Validation loss: 0.10866411221502102
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 1
Training loss: 0.0962619811872123 | Validation loss: 0.08431469047425595
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 2
Training loss: 0.08808117919403502 | Validation loss: 0.08335764244594406
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 3
Training loss: 0.08320651522573583 | Validation loss: 0.07264648778446481
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 4
Training loss: 0.078821630328967 | Validation loss: 0.086400652452644
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 5
Training loss: 0.07551912005668081 | Validation loss: 0.07294148150666625
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 6
Training loss: 0.06994353534147789 | Validation loss: 0.07389080859061363
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 7
Training loss: 0.06581751633926815 | Validation loss: 0.05924121888799477
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 8
Training loss: 0.061769033043359324 | Validation loss: 0.05550177810730132
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 9
Training loss: 0.05849388224901411 | Validation loss: 0.05526111535398306
Validation loss (ends of cycles): [0.10866411]
------------------------------
Epoch: 10
Training loss: 0.05599109683640477 | Validation loss: 0.05028300414240993
Validation loss (ends of cycles): [0.10866411 0.050283 ]
------------------------------
Epoch: 11
Training loss: 0.05651184237070792 | Validation loss: 0.05119481994317169
Validation loss (ends of cycles): [0.10866411 0.050283 ]
------------------------------
Epoch: 12
Training loss: 0.05796342328608799 | Validation loss: 0.05256845900970223
Validation loss (ends of cycles): [0.10866411 0.050283 ]
------------------------------
Epoch: 13
Training loss: 0.05864124310452227 | Validation loss: 0.05311432339053238
Validation loss (ends of cycles): [0.10866411 0.050283 ]
------------------------------
Epoch: 14
Training loss: 0.05971920895060216 | Validation loss: 0.053835257805422344
Validation loss (ends of cycles): [0.10866411 0.050283 ]
------------------------------
Epoch: 15
Training loss: 0.0600251086206564 | Validation loss: 0.05795030089804029
Validation loss (ends of cycles): [0.10866411 0.050283 ]
------------------------------
Epoch: 16
Training loss: 0.05733736497020041 | Validation loss: 0.05345632078175524
Validation loss (ends of cycles): [0.10866411 0.050283 ]
------------------------------
Epoch: 17
Training loss: 0.05464613109489712 | Validation loss: 0.05284101431942092
Validation loss (ends of cycles): [0.10866411 0.050283 ]
------------------------------
Epoch: 18
Training loss: 0.05230115667557505 | Validation loss: 0.04781449882857568
Validation loss (ends of cycles): [0.10866411 0.050283 ]
------------------------------
Epoch: 19
Training loss: 0.04979933290119012 | Validation loss: 0.04415781805868697
Validation loss (ends of cycles): [0.10866411 0.050283 ]
------------------------------
Epoch: 20
Training loss: 0.048008861920363674 | Validation loss: 0.04327868562257659
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869]
------------------------------
Epoch: 21
Training loss: 0.04882487160425017 | Validation loss: 0.043836381352317016
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869]
------------------------------
Epoch: 22
Training loss: 0.049844463726168305 | Validation loss: 0.044874199962787395
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869]
------------------------------
Epoch: 23
Training loss: 0.05121253936104183 | Validation loss: 0.04625493370458088
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869]
------------------------------
Epoch: 24
Training loss: 0.05227855082021982 | Validation loss: 0.06580113441543242
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869]
------------------------------
Epoch: 25
Training loss: 0.05329341503725512 | Validation loss: 0.04736212479461611
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869]
------------------------------
Epoch: 26
Training loss: 0.05157633041174657 | Validation loss: 0.04704422052059554
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869]
------------------------------
Epoch: 27
Training loss: 0.04926364007405937 | Validation loss: 0.05678830411186261
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869]
------------------------------
Epoch: 28
Training loss: 0.047098851822294646 | Validation loss: 0.043971751379755744
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869]
------------------------------
Epoch: 29
Training loss: 0.044588426964936825 | Validation loss: 0.041111332207786296
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869]
------------------------------
Epoch: 30
Training loss: 0.043297484392432244 | Validation loss: 0.04007098248508652
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098]
------------------------------
Epoch: 31
Training loss: 0.044140496467218154 | Validation loss: 0.041449001322673484
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098]
------------------------------
Epoch: 32
Training loss: 0.04528621606360094 | Validation loss: 0.043515847155214414
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098]
------------------------------
Epoch: 33
Training loss: 0.046532035345123623 | Validation loss: 0.04557813516100951
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098]
------------------------------
Epoch: 34
Training loss: 0.04797187640779014 | Validation loss: 0.04983178259129018
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098]
------------------------------
Epoch: 35
Training loss: 0.04931510559569194 | Validation loss: 0.047406337331090356
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098]
------------------------------
Epoch: 36
Training loss: 0.04766238858651986 | Validation loss: 0.045556718137411946
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098]
------------------------------
Epoch: 37
Training loss: 0.045729071468364184 | Validation loss: 0.043208298611298074
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098]
------------------------------
Epoch: 38
Training loss: 0.04352147483237557 | Validation loss: 0.04185216669487742
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098]
------------------------------
Epoch: 39
Training loss: 0.041625120196726144 | Validation loss: 0.03896412177555329
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098]
------------------------------
Epoch: 40
Training loss: 0.04038160263494713 | Validation loss: 0.03796320309681175
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 41
Training loss: 0.04075280056100368 | Validation loss: 0.03851696993924875
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 42
Training loss: 0.04215318926217724 | Validation loss: 0.03938400522863443
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 43
Training loss: 0.04349201472703455 | Validation loss: 0.04077490941679056
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 44
Training loss: 0.04480456488250982 | Validation loss: 0.04196629445769091
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 45
Training loss: 0.046417191071897804 | Validation loss: 0.04734774543780141
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 46
Training loss: 0.044565415281186425 | Validation loss: 0.042343570760129824
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 47
Training loss: 0.04292275310736003 | Validation loss: 0.046334737252477
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 48
Training loss: 0.04080903969382442 | Validation loss: 0.039976048242069975
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 49
Training loss: 0.038945722352442015 | Validation loss: 0.03901533678635559
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 ]
------------------------------
Epoch: 50
Training loss: 0.03775392111123165 | Validation loss: 0.03635692800832006
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693]
------------------------------
Epoch: 51
Training loss: 0.03847538466898974 | Validation loss: 0.037682123169039204
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693]
------------------------------
Epoch: 52
Training loss: 0.03952817849660894 | Validation loss: 0.038835352138344166
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693]
------------------------------
Epoch: 53
Training loss: 0.040770487166853164 | Validation loss: 0.04136987426112183
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693]
------------------------------
Epoch: 54
Training loss: 0.04249348099743141 | Validation loss: 0.04505128795857978
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693]
------------------------------
Epoch: 55
Training loss: 0.04351341669029725 | Validation loss: 0.054758314850979144
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693]
------------------------------
Epoch: 56
Training loss: 0.04219748325440593 | Validation loss: 0.044231936352047245
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693]
------------------------------
Epoch: 57
Training loss: 0.040449189506177825 | Validation loss: 0.04580309190911002
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693]
------------------------------
Epoch: 58
Training loss: 0.03866173452184367 | Validation loss: 0.0378917996994162
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693]
------------------------------
Epoch: 59
Training loss: 0.037179108895178094 | Validation loss: 0.03695562399462261
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693]
------------------------------
Epoch: 60
Training loss: 0.035941219553131405 | Validation loss: 0.034984574340020134
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457]
------------------------------
Epoch: 61
Training loss: 0.03639172680041657 | Validation loss: 0.03606705074157335
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457]
------------------------------
Epoch: 62
Training loss: 0.0375638076628551 | Validation loss: 0.03725699788635283
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457]
------------------------------
Epoch: 63
Training loss: 0.039039047017847515 | Validation loss: 0.038210892637746526
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457]
------------------------------
Epoch: 64
Training loss: 0.04020949403707701 | Validation loss: 0.04285801162498187
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457]
------------------------------
Epoch: 65
Training loss: 0.041740124291689024 | Validation loss: 0.041410352474292825
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457]
------------------------------
Epoch: 66
Training loss: 0.040310035649445054 | Validation loss: 0.05140106689877215
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457]
------------------------------
Epoch: 67
Training loss: 0.03865856288173273 | Validation loss: 0.04588074636010997
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457]
------------------------------
Epoch: 68
Training loss: 0.036848236220030806 | Validation loss: 0.039373264755163576
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457]
------------------------------
Epoch: 69
Training loss: 0.03542020992562908 | Validation loss: 0.036005012377832844
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457]
------------------------------
Epoch: 70
Training loss: 0.034385565286098915 | Validation loss: 0.03407285567702709
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286]
------------------------------
Epoch: 71
Training loss: 0.034998325296442985 | Validation loss: 0.03499888496852554
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286]
------------------------------
Epoch: 72
Training loss: 0.03608989844824679 | Validation loss: 0.037975290641847965
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286]
------------------------------
Epoch: 73
Training loss: 0.037150994481291534 | Validation loss: 0.03802902005876588
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286]
------------------------------
Epoch: 74
Training loss: 0.03874208623778046 | Validation loss: 0.041800051696796334
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286]
------------------------------
Epoch: 75
Training loss: 0.04030366619135891 | Validation loss: 0.04402384821292574
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286]
------------------------------
Epoch: 76
Training loss: 0.03897080734265807 | Validation loss: 0.038405700957616876
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286]
------------------------------
Epoch: 77
Training loss: 0.03700368836206773 | Validation loss: 0.03706241132839855
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286]
------------------------------
Epoch: 78
Training loss: 0.03561326385912023 | Validation loss: 0.03773653503935949
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286]
------------------------------
Epoch: 79
Training loss: 0.03413904608890971 | Validation loss: 0.034595715953449235
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286]
------------------------------
Epoch: 80
Training loss: 0.033077710618880964 | Validation loss: 0.03346761355621625
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 81
Training loss: 0.03339501076990255 | Validation loss: 0.03464310675595714
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 82
Training loss: 0.03449362341413553 | Validation loss: 0.03528704326105329
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 83
Training loss: 0.03574517990213152 | Validation loss: 0.03956749140227263
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 84
Training loss: 0.03712140995547117 | Validation loss: 0.04075488192647432
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 85
Training loss: 0.03851752373111236 | Validation loss: 0.04053122321127263
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 86
Training loss: 0.037289504063550354 | Validation loss: 0.038621348683285504
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 87
Training loss: 0.03606580819487278 | Validation loss: 0.03755889262641426
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 88
Training loss: 0.03403875287750545 | Validation loss: 0.03664958490207132
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 89
Training loss: 0.032725286617779764 | Validation loss: 0.03518285962497502
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761]
------------------------------
Epoch: 90
Training loss: 0.03179220572630985 | Validation loss: 0.03308977194097454
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 91
Training loss: 0.03204954804313611 | Validation loss: 0.03412141737453969
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 92
Training loss: 0.033113976240187415 | Validation loss: 0.03650290599768668
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 93
Training loss: 0.03438642540282944 | Validation loss: 0.03640461193842698
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 94
Training loss: 0.035784698719356765 | Validation loss: 0.03660603991783826
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 95
Training loss: 0.03738688092631119 | Validation loss: 0.04603489715836744
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 96
Training loss: 0.0360823278718694 | Validation loss: 0.03907236244232781
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 97
Training loss: 0.03440698718571463 | Validation loss: 0.03614757999579991
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 98
Training loss: 0.032923828401624805 | Validation loss: 0.03688841782905887
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 99
Training loss: 0.03173442275582365 | Validation loss: 0.034847187158544506
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977]
------------------------------
Epoch: 100
Training loss: 0.030615978687268307 | Validation loss: 0.03256259926190946
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 101
Training loss: 0.030597679183165742 | Validation loss: 0.03352231219971338
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 102
Training loss: 0.03192661127614547 | Validation loss: 0.034362965502438295
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 103
Training loss: 0.03307850291681161 | Validation loss: 0.035471899154703175
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 104
Training loss: 0.03460319688741675 | Validation loss: 0.04395314779099638
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 105
Training loss: 0.03603993922812996 | Validation loss: 0.03989762382631281
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 106
Training loss: 0.03485340225568965 | Validation loss: 0.03633215443222924
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 107
Training loss: 0.03364605744420661 | Validation loss: 0.03456024982167029
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 108
Training loss: 0.03178448307852253 | Validation loss: 0.03427424061779691
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 109
Training loss: 0.03050562876870665 | Validation loss: 0.03418495161541268
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 ]
------------------------------
Epoch: 110
Training loss: 0.029790386914158255 | Validation loss: 0.03241037907886558
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038]
------------------------------
Epoch: 111
Training loss: 0.03013055137904933 | Validation loss: 0.03372434017339111
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038]
------------------------------
Epoch: 112
Training loss: 0.030869591665842872 | Validation loss: 0.03451865892527641
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038]
------------------------------
Epoch: 113
Training loss: 0.032268728440312124 | Validation loss: 0.036281384115593625
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038]
------------------------------
Epoch: 114
Training loss: 0.03397129182373797 | Validation loss: 0.037896803192860255
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038]
------------------------------
Epoch: 115
Training loss: 0.03532382988418621 | Validation loss: 0.04804187614174016
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038]
------------------------------
Epoch: 116
Training loss: 0.03400072505730608 | Validation loss: 0.03747494302821898
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038]
------------------------------
Epoch: 117
Training loss: 0.032401689846163956 | Validation loss: 0.03554237289436623
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038]
------------------------------
Epoch: 118
Training loss: 0.03092662235776945 | Validation loss: 0.03437768438814488
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038]
------------------------------
Epoch: 119
Training loss: 0.029526078819634583 | Validation loss: 0.03346088269840827
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038]
------------------------------
Epoch: 120
Training loss: 0.028847131215395245 | Validation loss: 0.03171695443282349
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695]
------------------------------
Epoch: 121
Training loss: 0.029099270120248433 | Validation loss: 0.03273795564057289
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695]
------------------------------
Epoch: 122
Training loss: 0.03011367364243905 | Validation loss: 0.033902542846920215
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695]
------------------------------
Epoch: 123
Training loss: 0.03149592533238291 | Validation loss: 0.04488189828343096
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695]
------------------------------
Epoch: 124
Training loss: 0.03292691715219329 | Validation loss: 0.03982129518305306
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695]
------------------------------
Epoch: 125
Training loss: 0.03405568103294995 | Validation loss: 0.049960068979226384
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695]
------------------------------
Epoch: 126
Training loss: 0.0330583095444205 | Validation loss: 0.03734910000214534
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695]
------------------------------
Epoch: 127
Training loss: 0.03171656856324348 | Validation loss: 0.04288117178773458
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695]
------------------------------
Epoch: 128
Training loss: 0.03015985843582504 | Validation loss: 0.03478364519511176
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695]
------------------------------
Epoch: 129
Training loss: 0.02886176664715882 | Validation loss: 0.033183983189210425
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695]
------------------------------
Epoch: 130
Training loss: 0.02814806290759199 | Validation loss: 0.03104466441359404
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695 0.03104466]
------------------------------
Epoch: 131
Training loss: 0.028255866756149398 | Validation loss: 0.0327843960325143
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695 0.03104466]
------------------------------
Epoch: 132
Training loss: 0.02914011667071893 | Validation loss: 0.033506754794017934
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695 0.03104466]
------------------------------
Epoch: 133
Training loss: 0.030517671123425676 | Validation loss: 0.034617729789981275
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695 0.03104466]
------------------------------
Epoch: 134
Training loss: 0.03195116536941115 | Validation loss: 0.03802656727653425
Validation loss (ends of cycles): [0.10866411 0.050283 0.04327869 0.04007098 0.0379632 0.03635693
0.03498457 0.03407286 0.03346761 0.03308977 0.0325626 0.03241038
0.03171695 0.03104466]
------------------------------
Epoch: 135
Evaluate on all
# Replace following Paths with yourssrc_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/all/models')seeds =range(20)learners = Learners(Model, tax_lookup, seeds=seeds, device=device)perfs_global_all, y_hats_all, y_trues_all, ns_all = learners.evaluate((X, y, depth_order[:, -1]), src_dir_model=src_dir_model)
print(f'# of test samples: {ns_all.mean().item()}')
# of test samples: 4032.0
# Save spectific seed y_hat, y_true to plot "Observed vs. predicted" scatterplots# Replace following Paths with yoursdest_dir_predicted = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/')seed =1withopen(dest_dir_predicted/f'predicted-true-cnn-seed-{seed}.pickle', 'wb') as f: pickle.dump((y_hats_all[seed].to_numpy(), y_trues_all[seed].to_numpy()), f)
perfs_global_all.describe()
rpd
rpiq
r2
lccc
rmse
mse
mae
mape
bias
stb
count
20.000000
20.000000
20.000000
20.000000
20.000000
20.000000
20.000000
20.000000
20.000000
20.000000
mean
2.184830
2.992808
0.790243
0.884580
0.595624
0.377839
0.237318
30.946025
0.004749
0.009024
std
0.041494
0.067646
0.007929
0.004986
0.155836
0.259428
0.012061
0.932127
0.012705
0.024443
min
2.114635
2.889447
0.776315
0.874575
0.459073
0.210748
0.221478
29.163000
-0.012504
-0.024802
25%
2.159791
2.930840
0.785559
0.881369
0.504516
0.254547
0.225294
30.303971
-0.005159
-0.010186
50%
2.183314
3.015281
0.790165
0.884643
0.571964
0.327175
0.236342
30.827668
0.004057
0.007958
75%
2.205628
3.028623
0.794389
0.887016
0.617017
0.380868
0.246811
31.663001
0.016258
0.031279
max
2.262492
3.141478
0.804596
0.895479
1.197708
1.434504
0.260846
32.642108
0.032962
0.062948
Evaluate on Soil Tax. Orders
# Replace following Paths with yourssrc_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/all/models')seeds =range(20)for k, v in tax_lookup.items():print(80*'-')print(f'Test metrics on {k}')print(80*'-') learners = Learners(Model, tax_lookup, seeds=seeds, device=device) perfs_global, _, _, ns = learners.evaluate((X, y, depth_order[:, -1]), order=v, src_dir_model=src_dir_model)print(f'# of test samples: {ns.mean().item()}')print(perfs_global.describe())
# Replace following Paths with yoursdest_dir_loss = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/vertisols/losses')dest_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/vertisols/models')order =10seeds =range(20) n_epochs =31learners = Learners(Model, tax_lookup, seeds=seeds, device=device)learners.train((X, y, depth_order[:, -1]), order=order, dest_dir_loss=dest_dir_loss, dest_dir_model=dest_dir_model, n_epochs=n_epochs, sc_kwargs=params_scheduler)
--------------------------------------------------------------------------------
Seed: 0
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.161204216511626 | Validation loss: 0.1679159700870514
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 1
Training loss: 0.15437143099935433 | Validation loss: 0.15762153267860413
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 2
Training loss: 0.13889641942162262 | Validation loss: 0.13728273659944534
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 3
Training loss: 0.11834369207683362 | Validation loss: 0.10279234126210213
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 4
Training loss: 0.09297808573434227 | Validation loss: 0.09408992528915405
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 5
Training loss: 0.07264437330396552 | Validation loss: 0.055685702711343765
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 6
Training loss: 0.06562904072435279 | Validation loss: 0.08587116375565529
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 7
Training loss: 0.055855809269767055 | Validation loss: 0.0615625474601984
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 8
Training loss: 0.05285423092151943 | Validation loss: 0.05325787328183651
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 9
Training loss: 0.05118850050003905 | Validation loss: 0.06249404326081276
Validation loss (ends of cycles): [0.16791597]
------------------------------
Epoch: 10
Training loss: 0.04790492297003144 | Validation loss: 0.05141059495508671
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 11
Training loss: 0.04692842046681203 | Validation loss: 0.05575957149267197
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 12
Training loss: 0.04742170605612429 | Validation loss: 0.048377299681305885
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 13
Training loss: 0.045442073164801845 | Validation loss: 0.05056057125329971
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 14
Training loss: 0.049288692442994365 | Validation loss: 0.045949578285217285
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 15
Training loss: 0.04942320875431362 | Validation loss: 0.046182602643966675
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 16
Training loss: 0.048454557790568 | Validation loss: 0.061180008575320244
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 17
Training loss: 0.04570011774960317 | Validation loss: 0.05048673413693905
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 18
Training loss: 0.04413507094508723 | Validation loss: 0.049398086965084076
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 19
Training loss: 0.03897152105836492 | Validation loss: 0.04086455702781677
Validation loss (ends of cycles): [0.16791597 0.05141059]
------------------------------
Epoch: 20
Training loss: 0.03762085471106203 | Validation loss: 0.04161454364657402
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 21
Training loss: 0.03897195544682051 | Validation loss: 0.04463193938136101
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 22
Training loss: 0.03908969050175265 | Validation loss: 0.047162629663944244
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 23
Training loss: 0.03908744160281984 | Validation loss: 0.05538894981145859
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 24
Training loss: 0.03990407954705389 | Validation loss: 0.04268927872180939
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 25
Training loss: 0.03997622842067167 | Validation loss: 0.045263996347784996
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 26
Training loss: 0.04153041404328848 | Validation loss: 0.042628781870007515
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 27
Training loss: 0.034978562671887245 | Validation loss: 0.043797941878437996
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 28
Training loss: 0.03331961276891984 | Validation loss: 0.038327883929014206
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 29
Training loss: 0.03273361440944044 | Validation loss: 0.03366365935653448
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454]
------------------------------
Epoch: 30
Training loss: 0.029028360584848804 | Validation loss: 0.0335803534835577
Validation loss (ends of cycles): [0.16791597 0.05141059 0.04161454 0.03358035]
--------------------------------------------------------------------------------
Seed: 1
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08880109026243813 | Validation loss: 0.08211258985102177
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 1
Training loss: 0.08559589107569895 | Validation loss: 0.07982487976551056
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 2
Training loss: 0.08120929841932498 | Validation loss: 0.07661886140704155
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 3
Training loss: 0.07261521545679946 | Validation loss: 0.07628549262881279
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 4
Training loss: 0.06738892393676858 | Validation loss: 0.06264040432870388
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 5
Training loss: 0.06283781206921528 | Validation loss: 0.05332220159471035
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 6
Training loss: 0.0563666181344735 | Validation loss: 0.052990976721048355
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 7
Training loss: 0.053898568314157035 | Validation loss: 0.05359513498842716
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 8
Training loss: 0.0495483853707188 | Validation loss: 0.04845046065747738
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 9
Training loss: 0.04790328227375683 | Validation loss: 0.04489264823496342
Validation loss (ends of cycles): [0.08211259]
------------------------------
Epoch: 10
Training loss: 0.0470738457025666 | Validation loss: 0.044814372435212135
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 11
Training loss: 0.04557334945390099 | Validation loss: 0.045520488172769547
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 12
Training loss: 0.04488633739712991 | Validation loss: 0.045287614688277245
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 13
Training loss: 0.04274003678246548 | Validation loss: 0.05250200070440769
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 14
Training loss: 0.04469038634315917 | Validation loss: 0.06699041835963726
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 15
Training loss: 0.04659118973895123 | Validation loss: 0.0444390494376421
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 16
Training loss: 0.045118058786580435 | Validation loss: 0.04487036541104317
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 17
Training loss: 0.04162646614407238 | Validation loss: 0.04729745723307133
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 18
Training loss: 0.040547048084829986 | Validation loss: 0.039926101453602314
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 19
Training loss: 0.03623779441573118 | Validation loss: 0.03939523547887802
Validation loss (ends of cycles): [0.08211259 0.04481437]
------------------------------
Epoch: 20
Training loss: 0.03616418807130111 | Validation loss: 0.03911025729030371
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 21
Training loss: 0.035900585745510305 | Validation loss: 0.03916540555655956
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 22
Training loss: 0.03519123988716226 | Validation loss: 0.03917317185550928
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 23
Training loss: 0.03432324999257138 | Validation loss: 0.04085913486778736
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 24
Training loss: 0.036748546616811505 | Validation loss: 0.041731780394911766
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 25
Training loss: 0.037517647111886425 | Validation loss: 0.09109430015087128
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 26
Training loss: 0.03717169126397685 | Validation loss: 0.04643022455275059
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 27
Training loss: 0.03365675114879483 | Validation loss: 0.039381884038448334
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 28
Training loss: 0.03359140867465421 | Validation loss: 0.035449360497295856
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 29
Training loss: 0.029328686920435804 | Validation loss: 0.03771654795855284
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026]
------------------------------
Epoch: 30
Training loss: 0.029815007001161575 | Validation loss: 0.035608227364718914
Validation loss (ends of cycles): [0.08211259 0.04481437 0.03911026 0.03560823]
--------------------------------------------------------------------------------
Seed: 2
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08840935884250535 | Validation loss: 0.08210575208067894
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 1
Training loss: 0.08507650593916576 | Validation loss: 0.08108918741345406
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 2
Training loss: 0.07737381735609637 | Validation loss: 0.0804343322912852
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 3
Training loss: 0.06796439374900526 | Validation loss: 0.08102053900559743
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 4
Training loss: 0.05701384869300657 | Validation loss: 0.06451516598463058
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 5
Training loss: 0.04790849404202567 | Validation loss: 0.04559866711497307
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 6
Training loss: 0.0425061976744069 | Validation loss: 0.05457633485396703
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 7
Training loss: 0.03917014981723494 | Validation loss: 0.10548477371533711
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 8
Training loss: 0.03815334942191839 | Validation loss: 0.07415188476443291
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 9
Training loss: 0.03634079255991512 | Validation loss: 0.03436201065778732
Validation loss (ends of cycles): [0.08210575]
------------------------------
Epoch: 10
Training loss: 0.03238243547578653 | Validation loss: 0.03496560640633106
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 11
Training loss: 0.031512254331674844 | Validation loss: 0.03387966255346934
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 12
Training loss: 0.03323024997694625 | Validation loss: 0.06266245618462563
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 13
Training loss: 0.03258724557235837 | Validation loss: 0.07572312156359355
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 14
Training loss: 0.03392490858419074 | Validation loss: 0.08760666350523631
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 15
Training loss: 0.032957878481182784 | Validation loss: 0.06539637347062428
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 16
Training loss: 0.03240760095003578 | Validation loss: 0.030444981530308723
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 17
Training loss: 0.030572137277987268 | Validation loss: 0.028518366316954296
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 18
Training loss: 0.02843097411096096 | Validation loss: 0.028718551620841026
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 19
Training loss: 0.025377622495094936 | Validation loss: 0.03340643892685572
Validation loss (ends of cycles): [0.08210575 0.03496561]
------------------------------
Epoch: 20
Training loss: 0.02524500247091055 | Validation loss: 0.027380989864468575
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 21
Training loss: 0.025558336534433894 | Validation loss: 0.026460225383440655
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 22
Training loss: 0.023630426679220464 | Validation loss: 0.028433510412772495
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 23
Training loss: 0.02497979895108276 | Validation loss: 0.032619635264078774
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 24
Training loss: 0.026406725351181295 | Validation loss: 0.035625407472252846
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 25
Training loss: 0.026628990140226152 | Validation loss: 0.12885981798171997
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 26
Training loss: 0.026922656533618767 | Validation loss: 0.055682502686977386
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 27
Training loss: 0.025312546226713393 | Validation loss: 0.04670518139998118
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 28
Training loss: 0.023591533665441804 | Validation loss: 0.075415700674057
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 29
Training loss: 0.02143574645742774 | Validation loss: 0.021575671931107838
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099]
------------------------------
Epoch: 30
Training loss: 0.019714292811436787 | Validation loss: 0.024841646663844585
Validation loss (ends of cycles): [0.08210575 0.03496561 0.02738099 0.02484165]
--------------------------------------------------------------------------------
Seed: 3
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08366356927313302 | Validation loss: 0.06628272930781047
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 1
Training loss: 0.08258866577556259 | Validation loss: 0.06598960359891255
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 2
Training loss: 0.08011062364829213 | Validation loss: 0.06656766682863235
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 3
Training loss: 0.07609734664622106 | Validation loss: 0.07429493218660355
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 4
Training loss: 0.06887183612898777 | Validation loss: 0.08531180272499721
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 5
Training loss: 0.062297113827968896 | Validation loss: 0.05507988358537356
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 6
Training loss: 0.057449167300211754 | Validation loss: 0.05169703687230746
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 7
Training loss: 0.05063312598749211 | Validation loss: 0.044730848322312035
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 8
Training loss: 0.04662537633588439 | Validation loss: 0.05200311293204626
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 9
Training loss: 0.042545238313706296 | Validation loss: 0.040968768298625946
Validation loss (ends of cycles): [0.06628273]
------------------------------
Epoch: 10
Training loss: 0.040791098518591175 | Validation loss: 0.04076941559712092
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 11
Training loss: 0.03988300430539407 | Validation loss: 0.04062818984190623
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 12
Training loss: 0.041134035116747805 | Validation loss: 0.06347007056077321
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 13
Training loss: 0.040732931933904946 | Validation loss: 0.053751084953546524
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 14
Training loss: 0.0412690176775581 | Validation loss: 0.04342729101578394
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 15
Training loss: 0.04279994739121512 | Validation loss: 0.03779313713312149
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 16
Training loss: 0.04107574108791979 | Validation loss: 0.05226917316516241
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 17
Training loss: 0.03682027963039122 | Validation loss: 0.04407886415719986
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 18
Training loss: 0.03602115907951405 | Validation loss: 0.0353589312483867
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 19
Training loss: 0.03072228841483593 | Validation loss: 0.03361495025455952
Validation loss (ends of cycles): [0.06628273 0.04076942]
------------------------------
Epoch: 20
Training loss: 0.03127634593922841 | Validation loss: 0.03227363092203935
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 21
Training loss: 0.030668180729997784 | Validation loss: 0.03339084858695666
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 22
Training loss: 0.03049330334914358 | Validation loss: 0.033799403036634125
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 23
Training loss: 0.030560468372545745 | Validation loss: 0.052750845750172935
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 24
Training loss: 0.032240519398137144 | Validation loss: 0.11552038788795471
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 25
Training loss: 0.03203717256455045 | Validation loss: 0.03324045240879059
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 26
Training loss: 0.03040875906222745 | Validation loss: 0.04665656387805939
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 27
Training loss: 0.029416492551957305 | Validation loss: 0.04413521351913611
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 28
Training loss: 0.029010920001095848 | Validation loss: 0.03647958238919576
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 29
Training loss: 0.02751604928389976 | Validation loss: 0.050870560109615326
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363]
------------------------------
Epoch: 30
Training loss: 0.02429521475967608 | Validation loss: 0.028451986610889435
Validation loss (ends of cycles): [0.06628273 0.04076942 0.03227363 0.02845199]
--------------------------------------------------------------------------------
Seed: 4
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.19473344401309364 | Validation loss: 0.15656746923923492
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 1
Training loss: 0.18613588496258385 | Validation loss: 0.14746378362178802
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 2
Training loss: 0.16706412207139165 | Validation loss: 0.12539705261588097
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 3
Training loss: 0.13910072571352908 | Validation loss: 0.08593828231096268
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 4
Training loss: 0.10771784460858295 | Validation loss: 0.06520361453294754
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 5
Training loss: 0.07648744355691106 | Validation loss: 0.07435402646660805
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 6
Training loss: 0.06046923661702558 | Validation loss: 0.08778238669037819
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 7
Training loss: 0.05391533515955273 | Validation loss: 0.06520635634660721
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 8
Training loss: 0.04810122106420366 | Validation loss: 0.06413957849144936
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 9
Training loss: 0.044574140816142686 | Validation loss: 0.045684026554226875
Validation loss (ends of cycles): [0.15656747]
------------------------------
Epoch: 10
Training loss: 0.045094708470921764 | Validation loss: 0.04497688636183739
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 11
Training loss: 0.043865959503148734 | Validation loss: 0.04489790461957455
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 12
Training loss: 0.044169505273825245 | Validation loss: 0.04815280996263027
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 13
Training loss: 0.04281844169293579 | Validation loss: 0.08249081298708916
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 14
Training loss: 0.043214576024758186 | Validation loss: 0.05961386486887932
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 15
Training loss: 0.042521203917108084 | Validation loss: 0.0713415015488863
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 16
Training loss: 0.0419846284938486 | Validation loss: 0.07541047409176826
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 17
Training loss: 0.03957709375964968 | Validation loss: 0.05406338535249233
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 18
Training loss: 0.035520012049298534 | Validation loss: 0.10436020791530609
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 19
Training loss: 0.034939819456715336 | Validation loss: 0.03864527679979801
Validation loss (ends of cycles): [0.15656747 0.04497689]
------------------------------
Epoch: 20
Training loss: 0.03136764094233513 | Validation loss: 0.03485617786645889
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 21
Training loss: 0.031533444790463695 | Validation loss: 0.032069167122244835
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 22
Training loss: 0.03128002505553396 | Validation loss: 0.03990967012941837
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 23
Training loss: 0.032222791231776536 | Validation loss: 0.06194067373871803
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 24
Training loss: 0.030879733495806392 | Validation loss: 0.04783654771745205
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 25
Training loss: 0.034811057640533695 | Validation loss: 0.19700831919908524
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 26
Training loss: 0.03208483088957636 | Validation loss: 0.04298360459506512
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 27
Training loss: 0.02921375377397788 | Validation loss: 0.04345952346920967
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 28
Training loss: 0.02759896424648009 | Validation loss: 0.030260787345468998
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 29
Training loss: 0.02629519714728782 | Validation loss: 0.04085123725235462
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618]
------------------------------
Epoch: 30
Training loss: 0.02536522258857363 | Validation loss: 0.028398994356393814
Validation loss (ends of cycles): [0.15656747 0.04497689 0.03485618 0.02839899]
--------------------------------------------------------------------------------
Seed: 5
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.09135209562049972 | Validation loss: 0.06946399062871933
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 1
Training loss: 0.0878267308904065 | Validation loss: 0.0675109475851059
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 2
Training loss: 0.07988109977708922 | Validation loss: 0.06493373587727547
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 3
Training loss: 0.0666891232960754 | Validation loss: 0.06385781802237034
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 4
Training loss: 0.05818599214156469 | Validation loss: 0.06167275831103325
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 5
Training loss: 0.05256716679367754 | Validation loss: 0.07637954130768776
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 6
Training loss: 0.049445088331898056 | Validation loss: 0.22612106055021286
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 7
Training loss: 0.045817383771969214 | Validation loss: 0.050643378868699074
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 8
Training loss: 0.040001612777511276 | Validation loss: 0.0446147657930851
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 9
Training loss: 0.03829288575798273 | Validation loss: 0.04114661552011967
Validation loss (ends of cycles): [0.06946399]
------------------------------
Epoch: 10
Training loss: 0.03613738570776251 | Validation loss: 0.040317755192518234
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 11
Training loss: 0.034891982562839985 | Validation loss: 0.039226071909070015
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 12
Training loss: 0.03366432442433304 | Validation loss: 0.046774642542004585
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 13
Training loss: 0.03640781891428762 | Validation loss: 0.04789281450212002
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 14
Training loss: 0.03705685358080599 | Validation loss: 0.05857366323471069
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 15
Training loss: 0.035437823894123234 | Validation loss: 0.07393408939242363
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 16
Training loss: 0.03706711303028795 | Validation loss: 0.06966803222894669
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 17
Training loss: 0.03401203018923601 | Validation loss: 0.04148573614656925
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 18
Training loss: 0.03279454085148043 | Validation loss: 0.03339186776429415
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 19
Training loss: 0.029390734827352896 | Validation loss: 0.03046796005219221
Validation loss (ends of cycles): [0.06946399 0.04031776]
------------------------------
Epoch: 20
Training loss: 0.029600716920362577 | Validation loss: 0.03065457008779049
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 21
Training loss: 0.029213293352060847 | Validation loss: 0.031029099598526955
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 22
Training loss: 0.028610273264348507 | Validation loss: 0.030708318576216698
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 23
Training loss: 0.028793518121043842 | Validation loss: 0.046449968591332436
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 24
Training loss: 0.029298070300784376 | Validation loss: 0.04168690741062164
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 25
Training loss: 0.030492768364234105 | Validation loss: 0.0755428783595562
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 26
Training loss: 0.030982901031772297 | Validation loss: 0.030249490402638912
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 27
Training loss: 0.028873344521141715 | Validation loss: 0.03989887796342373
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 28
Training loss: 0.02943373481846518 | Validation loss: 0.029455197043716908
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 29
Training loss: 0.025847461229811113 | Validation loss: 0.03161353338509798
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457]
------------------------------
Epoch: 30
Training loss: 0.024220067593786452 | Validation loss: 0.027339047752320766
Validation loss (ends of cycles): [0.06946399 0.04031776 0.03065457 0.02733905]
--------------------------------------------------------------------------------
Seed: 6
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08758864708636936 | Validation loss: 0.09045941010117531
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 1
Training loss: 0.08511891274860031 | Validation loss: 0.08673127368092537
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 2
Training loss: 0.0793429758203657 | Validation loss: 0.08125056326389313
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 3
Training loss: 0.07344777999739897 | Validation loss: 0.07784003764390945
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 4
Training loss: 0.06621623725483292 | Validation loss: 0.08580468967556953
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 5
Training loss: 0.06146987037439095 | Validation loss: 0.08310152217745781
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 6
Training loss: 0.05917700419300481 | Validation loss: 0.06960192322731018
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 7
Training loss: 0.054859228236110585 | Validation loss: 0.067630959674716
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 8
Training loss: 0.05065024114753071 | Validation loss: 0.055472830310463905
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 9
Training loss: 0.04813684611336181 | Validation loss: 0.05709882639348507
Validation loss (ends of cycles): [0.09045941]
------------------------------
Epoch: 10
Training loss: 0.04541594505702194 | Validation loss: 0.053741781041026115
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 11
Training loss: 0.043925387784838676 | Validation loss: 0.055874619632959366
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 12
Training loss: 0.043821888142510465 | Validation loss: 0.05342106893658638
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 13
Training loss: 0.04354088537787136 | Validation loss: 0.05569586902856827
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 14
Training loss: 0.04596223790002497 | Validation loss: 0.08136944100260735
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 15
Training loss: 0.04429164842555398 | Validation loss: 0.056143974885344505
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 16
Training loss: 0.04604476484421052 | Validation loss: 0.058117739856243134
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 17
Training loss: 0.04353812356528483 | Validation loss: 0.06109851598739624
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 18
Training loss: 0.03982813224980706 | Validation loss: 0.04927295260131359
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 19
Training loss: 0.03627215433669718 | Validation loss: 0.047167809680104256
Validation loss (ends of cycles): [0.09045941 0.05374178]
------------------------------
Epoch: 20
Training loss: 0.034992945233457966 | Validation loss: 0.047399308532476425
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 21
Training loss: 0.03621590147285085 | Validation loss: 0.04751015082001686
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 22
Training loss: 0.033725014152495486 | Validation loss: 0.047297827899456024
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 23
Training loss: 0.03643197546664037 | Validation loss: 0.045731207355856895
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 24
Training loss: 0.03781851704575514 | Validation loss: 0.04651406966149807
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 25
Training loss: 0.03732413926014775 | Validation loss: 0.04698087275028229
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 26
Training loss: 0.037296586522930546 | Validation loss: 0.04507102258503437
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 27
Training loss: 0.03556570381318268 | Validation loss: 0.042184218764305115
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 28
Training loss: 0.030846391461397473 | Validation loss: 0.04002702981233597
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 29
Training loss: 0.03090105475367684 | Validation loss: 0.038508640602231026
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931]
------------------------------
Epoch: 30
Training loss: 0.028801383156525463 | Validation loss: 0.03822813369333744
Validation loss (ends of cycles): [0.09045941 0.05374178 0.04739931 0.03822813]
--------------------------------------------------------------------------------
Seed: 7
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08331045665239033 | Validation loss: 0.06520787129799525
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 1
Training loss: 0.08203087198106866 | Validation loss: 0.06532517820596695
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 2
Training loss: 0.08000684745217625 | Validation loss: 0.06518079092105229
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 3
Training loss: 0.0739791032515074 | Validation loss: 0.06506842374801636
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 4
Training loss: 0.06385869787711847 | Validation loss: 0.05250853920976321
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 5
Training loss: 0.057416989615089016 | Validation loss: 0.05340991293390592
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 6
Training loss: 0.052579465861383234 | Validation loss: 0.029369194293394685
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 7
Training loss: 0.04429109718062376 | Validation loss: 0.04665235554178556
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 8
Training loss: 0.04031965685518164 | Validation loss: 0.06348493695259094
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 9
Training loss: 0.03789979886067541 | Validation loss: 0.03177877189591527
Validation loss (ends of cycles): [0.06520787]
------------------------------
Epoch: 10
Training loss: 0.03492777098558451 | Validation loss: 0.030528849456459284
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 11
Training loss: 0.03243052214384079 | Validation loss: 0.025340224984878052
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 12
Training loss: 0.033256117744665394 | Validation loss: 0.030804906350870926
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 13
Training loss: 0.03333615707723718 | Validation loss: 0.035566401512672506
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 14
Training loss: 0.03184130630995098 | Validation loss: 0.03293635222750405
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 15
Training loss: 0.03302473133723987 | Validation loss: 0.028592127503846616
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 16
Training loss: 0.030961162704778344 | Validation loss: 0.02682249341160059
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 17
Training loss: 0.028243662789463997 | Validation loss: 0.027744205047686894
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 18
Training loss: 0.02676465332900223 | Validation loss: 0.02321198567127188
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 19
Training loss: 0.02272553192941766 | Validation loss: 0.02984648073712985
Validation loss (ends of cycles): [0.06520787 0.03052885]
------------------------------
Epoch: 20
Training loss: 0.021555469567446334 | Validation loss: 0.023247383224467438
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 21
Training loss: 0.0220749583340397 | Validation loss: 0.021610831453775365
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 22
Training loss: 0.020930324485035318 | Validation loss: 0.023576893222828705
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 23
Training loss: 0.021814725116679545 | Validation loss: 0.025978229319055874
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 24
Training loss: 0.02283184808727942 | Validation loss: 0.025270794207851093
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 25
Training loss: 0.02307317018704979 | Validation loss: 0.08468196541070938
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 26
Training loss: 0.021557157663138288 | Validation loss: 0.027905408913890522
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 27
Training loss: 0.021273777182949215 | Validation loss: 0.02349347559114297
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 28
Training loss: 0.01986664131675896 | Validation loss: 0.0233243799302727
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 29
Training loss: 0.017681540676245566 | Validation loss: 0.022845523431897163
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738]
------------------------------
Epoch: 30
Training loss: 0.016350375782502324 | Validation loss: 0.02178852337722977
Validation loss (ends of cycles): [0.06520787 0.03052885 0.02324738 0.02178852]
--------------------------------------------------------------------------------
Seed: 8
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.14074529629004628 | Validation loss: 0.13468989357352257
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 1
Training loss: 0.13303319051077492 | Validation loss: 0.1283198483288288
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 2
Training loss: 0.11730245461589411 | Validation loss: 0.1113440953195095
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 3
Training loss: 0.09925832560187892 | Validation loss: 0.0883958488702774
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 4
Training loss: 0.08102804579232868 | Validation loss: 0.0767072718590498
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 5
Training loss: 0.06754080539471224 | Validation loss: 0.0630732923746109
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 6
Training loss: 0.0626953479490782 | Validation loss: 0.05822800286114216
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 7
Training loss: 0.05980836462817694 | Validation loss: 0.057572031393647194
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 8
Training loss: 0.05771961749384278 | Validation loss: 0.06076772231608629
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 9
Training loss: 0.0532009103580525 | Validation loss: 0.05623008869588375
Validation loss (ends of cycles): [0.13468989]
------------------------------
Epoch: 10
Training loss: 0.05379903512565713 | Validation loss: 0.05519082024693489
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 11
Training loss: 0.052072555807076 | Validation loss: 0.05436007305979729
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 12
Training loss: 0.051279104265727495 | Validation loss: 0.06033742055296898
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 13
Training loss: 0.05221212537665116 | Validation loss: 0.05254641734063625
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 14
Training loss: 0.052044751024559924 | Validation loss: 0.05064303055405617
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 15
Training loss: 0.04771482081789719 | Validation loss: 0.18143466114997864
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 16
Training loss: 0.048779759654089025 | Validation loss: 0.049892572686076164
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 17
Training loss: 0.04602725470536634 | Validation loss: 0.0489846533164382
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 18
Training loss: 0.043393145756501904 | Validation loss: 0.04021776653826237
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 19
Training loss: 0.037118491863733845 | Validation loss: 0.043297613970935345
Validation loss (ends of cycles): [0.13468989 0.05519082]
------------------------------
Epoch: 20
Training loss: 0.036233729535811825 | Validation loss: 0.04204665496945381
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 21
Training loss: 0.036370223486109785 | Validation loss: 0.04038366116583347
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 22
Training loss: 0.03642301369262369 | Validation loss: 0.04990543611347675
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 23
Training loss: 0.03510999748189198 | Validation loss: 0.035950854420661926
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 24
Training loss: 0.03829076278366541 | Validation loss: 0.08307567611336708
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 25
Training loss: 0.04044385832783423 | Validation loss: 0.047016918659210205
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 26
Training loss: 0.037926588403551204 | Validation loss: 0.037543052807450294
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 27
Training loss: 0.034744165445628916 | Validation loss: 0.05590544827282429
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 28
Training loss: 0.03150788292680916 | Validation loss: 0.03907494433224201
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 29
Training loss: 0.029333851839366713 | Validation loss: 0.030293073505163193
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665]
------------------------------
Epoch: 30
Training loss: 0.027970748414334497 | Validation loss: 0.030819999054074287
Validation loss (ends of cycles): [0.13468989 0.05519082 0.04204665 0.03082 ]
--------------------------------------------------------------------------------
Seed: 9
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.12100787852939807 | Validation loss: 0.14878312995036444
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 1
Training loss: 0.1156453094200084 | Validation loss: 0.14218211422363916
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 2
Training loss: 0.10426287549106698 | Validation loss: 0.12800817439953485
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 3
Training loss: 0.08943178112569608 | Validation loss: 0.10652916630109151
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 4
Training loss: 0.07398556525769986 | Validation loss: 0.0644477941095829
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 5
Training loss: 0.06363658705040028 | Validation loss: 0.05572609603404999
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 6
Training loss: 0.06081610623943178 | Validation loss: 0.09725653131802876
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 7
Training loss: 0.05732113220974019 | Validation loss: 0.06353205566604932
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 8
Training loss: 0.053295500772564036 | Validation loss: 0.05046262095371882
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 9
Training loss: 0.05417528699495291 | Validation loss: 0.047901748990019165
Validation loss (ends of cycles): [0.14878313]
------------------------------
Epoch: 10
Training loss: 0.0489277304395249 | Validation loss: 0.046974229936798416
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 11
Training loss: 0.05104770844704226 | Validation loss: 0.046928669015566506
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 12
Training loss: 0.04871545045783645 | Validation loss: 0.04629917008181413
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 13
Training loss: 0.04950790577813199 | Validation loss: 0.06741906702518463
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 14
Training loss: 0.050524057428303515 | Validation loss: 0.0722799909611543
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 15
Training loss: 0.051111385226249695 | Validation loss: 0.08795048048098882
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 16
Training loss: 0.049861222603603414 | Validation loss: 0.04207110404968262
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 17
Training loss: 0.04603122537465472 | Validation loss: 0.03900467542310556
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 18
Training loss: 0.043004892276305905 | Validation loss: 0.0376884446789821
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 19
Training loss: 0.03993555070146134 | Validation loss: 0.03346223756670952
Validation loss (ends of cycles): [0.14878313 0.04697423]
------------------------------
Epoch: 20
Training loss: 0.038372725444404704 | Validation loss: 0.033292777525881924
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 21
Training loss: 0.03876232497982288 | Validation loss: 0.03255048921952645
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 22
Training loss: 0.038259138892355715 | Validation loss: 0.03326376589636008
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 23
Training loss: 0.039389371773914286 | Validation loss: 0.03649425941208998
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 24
Training loss: 0.037030863918756186 | Validation loss: 0.03978176477054755
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 25
Training loss: 0.04088841525739745 | Validation loss: 0.04172546664873759
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 26
Training loss: 0.038719205234787966 | Validation loss: 0.03826622168223063
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 27
Training loss: 0.03795238160283158 | Validation loss: 0.039025234058499336
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 28
Training loss: 0.0355978360105502 | Validation loss: 0.035915122057000794
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 29
Training loss: 0.03182907368203527 | Validation loss: 0.03283580827216307
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278]
------------------------------
Epoch: 30
Training loss: 0.03164514409084069 | Validation loss: 0.03219876562555631
Validation loss (ends of cycles): [0.14878313 0.04697423 0.03329278 0.03219877]
--------------------------------------------------------------------------------
Seed: 10
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.09328515749228627 | Validation loss: 0.0794174075126648
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 1
Training loss: 0.08947268716598812 | Validation loss: 0.07702409103512764
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 2
Training loss: 0.08151930766670328 | Validation loss: 0.07422645390033722
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 3
Training loss: 0.07421657756755226 | Validation loss: 0.0915425568819046
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 4
Training loss: 0.06644049580944211 | Validation loss: 0.15212642401456833
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 5
Training loss: 0.06289171956871685 | Validation loss: 0.06643170863389969
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 6
Training loss: 0.060507005570750484 | Validation loss: 0.07660464942455292
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 7
Training loss: 0.05770743657883845 | Validation loss: 0.04702441208064556
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 8
Training loss: 0.055238479160164534 | Validation loss: 0.04257218353450298
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 9
Training loss: 0.04844514770727409 | Validation loss: 0.038986045867204666
Validation loss (ends of cycles): [0.07941741]
------------------------------
Epoch: 10
Training loss: 0.04857376590371132 | Validation loss: 0.03860069438815117
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 11
Training loss: 0.0471659146837498 | Validation loss: 0.03836953267455101
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 12
Training loss: 0.048829075243127976 | Validation loss: 0.03513345122337341
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 13
Training loss: 0.05086563390336538 | Validation loss: 0.03178618475794792
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 14
Training loss: 0.04892818766989206 | Validation loss: 0.07313217967748642
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 15
Training loss: 0.05062789891503359 | Validation loss: 0.04181492328643799
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 16
Training loss: 0.04422282143250892 | Validation loss: 0.031231501139700413
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 17
Training loss: 0.0414473704601589 | Validation loss: 0.0330012571066618
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 18
Training loss: 0.041221494541356436 | Validation loss: 0.03016264084726572
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 19
Training loss: 0.035920314490795135 | Validation loss: 0.024509469978511333
Validation loss (ends of cycles): [0.07941741 0.03860069]
------------------------------
Epoch: 20
Training loss: 0.03617992938349122 | Validation loss: 0.023955611512064934
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 21
Training loss: 0.03439663468222869 | Validation loss: 0.024173706769943237
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 22
Training loss: 0.03432707321879111 | Validation loss: 0.03172864858061075
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 23
Training loss: 0.035520500748565324 | Validation loss: 0.02993414457887411
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 24
Training loss: 0.03720426637875406 | Validation loss: 0.036875439807772636
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 25
Training loss: 0.03747940367381824 | Validation loss: 0.055036623030900955
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 26
Training loss: 0.035825867597994054 | Validation loss: 0.04328293725848198
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 27
Training loss: 0.03354562186685048 | Validation loss: 0.025072680786252022
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 28
Training loss: 0.03464834401874166 | Validation loss: 0.024809451773762703
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 29
Training loss: 0.03116374404022568 | Validation loss: 0.033944932743906975
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561]
------------------------------
Epoch: 30
Training loss: 0.02939593855683741 | Validation loss: 0.022330881096422672
Validation loss (ends of cycles): [0.07941741 0.03860069 0.02395561 0.02233088]
--------------------------------------------------------------------------------
Seed: 11
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.12622165640718058 | Validation loss: 0.13938650488853455
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 1
Training loss: 0.1200494174110262 | Validation loss: 0.13313303887844086
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 2
Training loss: 0.10607607701891347 | Validation loss: 0.12224111333489418
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 3
Training loss: 0.09052326138082303 | Validation loss: 0.10416682437062263
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 4
Training loss: 0.07436744202124446 | Validation loss: 0.07375293970108032
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 5
Training loss: 0.063938575747766 | Validation loss: 0.07426713779568672
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 6
Training loss: 0.057538029590719623 | Validation loss: 0.06209025718271732
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 7
Training loss: 0.05095285353691954 | Validation loss: 0.06709360145032406
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 8
Training loss: 0.04912557217635607 | Validation loss: 0.0595396663993597
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 9
Training loss: 0.04791054472719368 | Validation loss: 0.05402009002864361
Validation loss (ends of cycles): [0.1393865]
------------------------------
Epoch: 10
Training loss: 0.04601734866829295 | Validation loss: 0.0537868607789278
Validation loss (ends of cycles): [0.1393865 0.05378686]
------------------------------
Epoch: 11
Training loss: 0.04642667229238309 | Validation loss: 0.05364326946437359
Validation loss (ends of cycles): [0.1393865 0.05378686]
------------------------------
Epoch: 12
Training loss: 0.04651502166923724 | Validation loss: 0.05703286826610565
Validation loss (ends of cycles): [0.1393865 0.05378686]
------------------------------
Epoch: 13
Training loss: 0.04464602313543621 | Validation loss: 0.07510419934988022
Validation loss (ends of cycles): [0.1393865 0.05378686]
------------------------------
Epoch: 14
Training loss: 0.04594061886401553 | Validation loss: 0.0557715930044651
Validation loss (ends of cycles): [0.1393865 0.05378686]
------------------------------
Epoch: 15
Training loss: 0.0421954180653158 | Validation loss: 0.04985055699944496
Validation loss (ends of cycles): [0.1393865 0.05378686]
------------------------------
Epoch: 16
Training loss: 0.04223741483139364 | Validation loss: 0.04710370860993862
Validation loss (ends of cycles): [0.1393865 0.05378686]
------------------------------
Epoch: 17
Training loss: 0.038117557764053345 | Validation loss: 0.04934592917561531
Validation loss (ends of cycles): [0.1393865 0.05378686]
------------------------------
Epoch: 18
Training loss: 0.03437867579295447 | Validation loss: 0.05659086816012859
Validation loss (ends of cycles): [0.1393865 0.05378686]
------------------------------
Epoch: 19
Training loss: 0.03409294313506076 | Validation loss: 0.04514491185545921
Validation loss (ends of cycles): [0.1393865 0.05378686]
------------------------------
Epoch: 20
Training loss: 0.03242792100890687 | Validation loss: 0.04265304282307625
Validation loss (ends of cycles): [0.1393865 0.05378686 0.04265304]
------------------------------
Epoch: 21
Training loss: 0.03009496952750181 | Validation loss: 0.04248355142772198
Validation loss (ends of cycles): [0.1393865 0.05378686 0.04265304]
------------------------------
Epoch: 22
Training loss: 0.03202511644677112 | Validation loss: 0.0439732950180769
Validation loss (ends of cycles): [0.1393865 0.05378686 0.04265304]
------------------------------
Epoch: 23
Training loss: 0.03216276111963548 | Validation loss: 0.05873432569205761
Validation loss (ends of cycles): [0.1393865 0.05378686 0.04265304]
------------------------------
Epoch: 24
Training loss: 0.032698783141217734 | Validation loss: 0.04619231075048447
Validation loss (ends of cycles): [0.1393865 0.05378686 0.04265304]
------------------------------
Epoch: 25
Training loss: 0.03633682802319527 | Validation loss: 0.072358887642622
Validation loss (ends of cycles): [0.1393865 0.05378686 0.04265304]
------------------------------
Epoch: 26
Training loss: 0.033581405681999105 | Validation loss: 0.05562468618154526
Validation loss (ends of cycles): [0.1393865 0.05378686 0.04265304]
------------------------------
Epoch: 27
Training loss: 0.030268098472764616 | Validation loss: 0.059526894241571426
Validation loss (ends of cycles): [0.1393865 0.05378686 0.04265304]
------------------------------
Epoch: 28
Training loss: 0.02835421930802496 | Validation loss: 0.035784799605607986
Validation loss (ends of cycles): [0.1393865 0.05378686 0.04265304]
------------------------------
Epoch: 29
Training loss: 0.024994256367024622 | Validation loss: 0.036349328234791756
Validation loss (ends of cycles): [0.1393865 0.05378686 0.04265304]
------------------------------
Epoch: 30
Training loss: 0.023506488503986282 | Validation loss: 0.03648699168115854
Validation loss (ends of cycles): [0.1393865 0.05378686 0.04265304 0.03648699]
--------------------------------------------------------------------------------
Seed: 12
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.12465888732358028 | Validation loss: 0.11083460599184036
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 1
Training loss: 0.11687581594053068 | Validation loss: 0.1044333999355634
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 2
Training loss: 0.10341314363636468 | Validation loss: 0.09221626569827397
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 3
Training loss: 0.08686307386348122 | Validation loss: 0.07851269468665123
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 4
Training loss: 0.07083727340949209 | Validation loss: 0.0551288320372502
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 5
Training loss: 0.06321360847275508 | Validation loss: 0.05201607135434946
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 6
Training loss: 0.059471154487446734 | Validation loss: 0.050572953497370086
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 7
Training loss: 0.05677154197014476 | Validation loss: 0.048783741891384125
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 8
Training loss: 0.05208255586483957 | Validation loss: 0.05494089797139168
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 9
Training loss: 0.05054084868415406 | Validation loss: 0.042090740675727524
Validation loss (ends of cycles): [0.11083461]
------------------------------
Epoch: 10
Training loss: 0.046194194313628895 | Validation loss: 0.0418690579632918
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 11
Training loss: 0.04912359110618893 | Validation loss: 0.03736517330010732
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 12
Training loss: 0.050159576250926444 | Validation loss: 0.03728324609498183
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 13
Training loss: 0.04813679552784091 | Validation loss: 0.06099647656083107
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 14
Training loss: 0.05095356658689285 | Validation loss: 0.042035351817806564
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 15
Training loss: 0.05249298383530818 | Validation loss: 0.08353547627727191
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 16
Training loss: 0.04622210692124147 | Validation loss: 0.05047812437017759
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 17
Training loss: 0.04407538248128012 | Validation loss: 0.03657640082140764
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 18
Training loss: 0.04126803819580298 | Validation loss: 0.045112963765859604
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 19
Training loss: 0.03931908642775134 | Validation loss: 0.03320226073265076
Validation loss (ends of cycles): [0.11083461 0.04186906]
------------------------------
Epoch: 20
Training loss: 0.040036032294952555 | Validation loss: 0.034371147553126015
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 21
Training loss: 0.03779972744737997 | Validation loss: 0.034147227803866066
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 22
Training loss: 0.03736349481991247 | Validation loss: 0.03392460756003857
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 23
Training loss: 0.036734214554981964 | Validation loss: 0.05457255865136782
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 24
Training loss: 0.03623275096699791 | Validation loss: 0.041262177750468254
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 25
Training loss: 0.039135233744194635 | Validation loss: 0.06989621991912524
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 26
Training loss: 0.041533306927273146 | Validation loss: 0.08880491803089778
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 27
Training loss: 0.036787426638367926 | Validation loss: 0.05547218148907026
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 28
Training loss: 0.037730952602271974 | Validation loss: 0.04338609303037325
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 29
Training loss: 0.03741005190501088 | Validation loss: 0.03517623494068781
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115]
------------------------------
Epoch: 30
Training loss: 0.03459033566085916 | Validation loss: 0.029467060541113217
Validation loss (ends of cycles): [0.11083461 0.04186906 0.03437115 0.02946706]
--------------------------------------------------------------------------------
Seed: 13
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.14787319733908302 | Validation loss: 0.12134905159473419
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 1
Training loss: 0.13971965289429614 | Validation loss: 0.1133270300924778
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 2
Training loss: 0.12202531177746623 | Validation loss: 0.09741818159818649
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 3
Training loss: 0.09899224105634187 | Validation loss: 0.0680413693189621
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 4
Training loss: 0.07547486730312046 | Validation loss: 0.05193481966853142
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 5
Training loss: 0.059965328754563084 | Validation loss: 0.1478968784213066
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 6
Training loss: 0.058371061949353466 | Validation loss: 0.060820143669843674
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 7
Training loss: 0.0518569956092458 | Validation loss: 0.04691869765520096
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 8
Training loss: 0.04747700201053368 | Validation loss: 0.05568346567451954
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 9
Training loss: 0.04481482358747407 | Validation loss: 0.044492047280073166
Validation loss (ends of cycles): [0.12134905]
------------------------------
Epoch: 10
Training loss: 0.04322212032581631 | Validation loss: 0.041569143533706665
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 11
Training loss: 0.04246603638718003 | Validation loss: 0.04150853492319584
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 12
Training loss: 0.04089725978280369 | Validation loss: 0.03653017058968544
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 13
Training loss: 0.041098617801540775 | Validation loss: 0.04898947477340698
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 14
Training loss: 0.04085124303635798 | Validation loss: 0.04935206472873688
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 15
Training loss: 0.041323604552369365 | Validation loss: 0.04239597171545029
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 16
Training loss: 0.039808995531577816 | Validation loss: 0.0840645469725132
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 17
Training loss: 0.03703288322216586 | Validation loss: 0.03245018795132637
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 18
Training loss: 0.03433209422387575 | Validation loss: 0.03348969016224146
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 19
Training loss: 0.03210164145811608 | Validation loss: 0.030444078147411346
Validation loss (ends of cycles): [0.12134905 0.04156914]
------------------------------
Epoch: 20
Training loss: 0.03053837034263109 | Validation loss: 0.03115426003932953
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 21
Training loss: 0.031159722393280583 | Validation loss: 0.03271864727139473
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 22
Training loss: 0.03416347729140207 | Validation loss: 0.045318085700273514
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 23
Training loss: 0.03107438314902155 | Validation loss: 0.056804386898875237
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 24
Training loss: 0.033158473474414724 | Validation loss: 0.05310705862939358
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 25
Training loss: 0.03283156532990305 | Validation loss: 0.05297096632421017
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 26
Training loss: 0.033077974362592945 | Validation loss: 0.03136811312288046
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 27
Training loss: 0.03158471576477352 | Validation loss: 0.058228276669979095
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 28
Training loss: 0.02844038595886607 | Validation loss: 0.046033360064029694
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 29
Training loss: 0.026794205566770153 | Validation loss: 0.02960763592272997
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426]
------------------------------
Epoch: 30
Training loss: 0.025866988360097532 | Validation loss: 0.030549601651728153
Validation loss (ends of cycles): [0.12134905 0.04156914 0.03115426 0.0305496 ]
--------------------------------------------------------------------------------
Seed: 14
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.2685273501433824 | Validation loss: 0.2683800756931305
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 1
Training loss: 0.25670922037802246 | Validation loss: 0.2526206970214844
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 2
Training loss: 0.22681226542121485 | Validation loss: 0.21906188130378723
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 3
Training loss: 0.18514166144948258 | Validation loss: 0.16225957870483398
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 4
Training loss: 0.13773585306970695 | Validation loss: 0.0982358418405056
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 5
Training loss: 0.09757452673817936 | Validation loss: 0.16215666383504868
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 6
Training loss: 0.0740412101149559 | Validation loss: 0.055965250357985497
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 7
Training loss: 0.06161656583610334 | Validation loss: 0.062003035098314285
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 8
Training loss: 0.056791181431004874 | Validation loss: 0.05057838559150696
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 9
Training loss: 0.05035581047597684 | Validation loss: 0.04332583770155907
Validation loss (ends of cycles): [0.26838008]
------------------------------
Epoch: 10
Training loss: 0.047245774467132594 | Validation loss: 0.04338065907359123
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 11
Training loss: 0.048102253459786116 | Validation loss: 0.04022406227886677
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 12
Training loss: 0.04582173936069012 | Validation loss: 0.03690587542951107
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 13
Training loss: 0.041917656352253335 | Validation loss: 0.04160160571336746
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 14
Training loss: 0.04336070749712618 | Validation loss: 0.0791056714951992
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 15
Training loss: 0.04261575992170133 | Validation loss: 0.053798090666532516
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 16
Training loss: 0.04363253075433405 | Validation loss: 0.040439238771796227
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 17
Training loss: 0.04248141987543357 | Validation loss: 0.03731362521648407
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 18
Training loss: 0.038024303209232654 | Validation loss: 0.041773609817028046
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 19
Training loss: 0.03374856283986255 | Validation loss: 0.03551979921758175
Validation loss (ends of cycles): [0.26838008 0.04338066]
------------------------------
Epoch: 20
Training loss: 0.033305737042897625 | Validation loss: 0.03380383178591728
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 21
Training loss: 0.03389084388158823 | Validation loss: 0.03323566913604736
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 22
Training loss: 0.0329377921788316 | Validation loss: 0.03338051959872246
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 23
Training loss: 0.03327037482277343 | Validation loss: 0.032685402780771255
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 24
Training loss: 0.03479252481146863 | Validation loss: 0.0644093994051218
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 25
Training loss: 0.03432726771815827 | Validation loss: 0.049457062035799026
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 26
Training loss: 0.03283493897240413 | Validation loss: 0.03840087540447712
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 27
Training loss: 0.029340636769407673 | Validation loss: 0.036333074793219566
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 28
Training loss: 0.028881219558809932 | Validation loss: 0.03344106115400791
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 29
Training loss: 0.026324676251725146 | Validation loss: 0.029291590675711632
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383]
------------------------------
Epoch: 30
Training loss: 0.02539735272722809 | Validation loss: 0.029340913519263268
Validation loss (ends of cycles): [0.26838008 0.04338066 0.03380383 0.02934091]
--------------------------------------------------------------------------------
Seed: 15
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.0841492728183144 | Validation loss: 0.06075831688940525
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 1
Training loss: 0.08169916311376973 | Validation loss: 0.06054982356727123
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 2
Training loss: 0.07849763216156709 | Validation loss: 0.06086615286767483
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 3
Training loss: 0.07423726802593783 | Validation loss: 0.06112533435225487
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 4
Training loss: 0.06984295362704679 | Validation loss: 0.04825133830308914
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 5
Training loss: 0.06283724523688618 | Validation loss: 0.04184510372579098
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 6
Training loss: 0.058544049725720755 | Validation loss: 0.041651615872979164
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 7
Training loss: 0.05212017580082542 | Validation loss: 0.04360814392566681
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 8
Training loss: 0.052451391067159805 | Validation loss: 0.04157676547765732
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 9
Training loss: 0.04793435246928742 | Validation loss: 0.03575167618691921
Validation loss (ends of cycles): [0.06075832]
------------------------------
Epoch: 10
Training loss: 0.04477887306558458 | Validation loss: 0.035469865426421165
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 11
Training loss: 0.045943650466046836 | Validation loss: 0.0336600337177515
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 12
Training loss: 0.04350362703400223 | Validation loss: 0.03219062741845846
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 13
Training loss: 0.04283701863728071 | Validation loss: 0.03756018541753292
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 14
Training loss: 0.04464582922427278 | Validation loss: 0.0390226636081934
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 15
Training loss: 0.046644166406047974 | Validation loss: 0.03757801093161106
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 16
Training loss: 0.04258477227076104 | Validation loss: 0.03985445946455002
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 17
Training loss: 0.04088323328055834 | Validation loss: 0.026787959039211273
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 18
Training loss: 0.03871416957362702 | Validation loss: 0.025218220427632332
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 19
Training loss: 0.03509655153672946 | Validation loss: 0.02421511523425579
Validation loss (ends of cycles): [0.06075832 0.03546987]
------------------------------
Epoch: 20
Training loss: 0.035071449550358874 | Validation loss: 0.02311981562525034
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 21
Training loss: 0.034543048394353765 | Validation loss: 0.022894551046192646
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 22
Training loss: 0.03377740447850604 | Validation loss: 0.024682712741196156
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 23
Training loss: 0.03353702306355301 | Validation loss: 0.029586568474769592
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 24
Training loss: 0.03612418242387081 | Validation loss: 0.027507783845067024
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 25
Training loss: 0.03587194492942408 | Validation loss: 0.027790222316980362
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 26
Training loss: 0.03557291837703241 | Validation loss: 0.03710603527724743
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 27
Training loss: 0.033871732284560016 | Validation loss: 0.03677363134920597
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 28
Training loss: 0.03167560118201532 | Validation loss: 0.032423168420791626
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 29
Training loss: 0.02956923759101253 | Validation loss: 0.019483156502246857
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982]
------------------------------
Epoch: 30
Training loss: 0.02946120588795135 | Validation loss: 0.018310876563191414
Validation loss (ends of cycles): [0.06075832 0.03546987 0.02311982 0.01831088]
--------------------------------------------------------------------------------
Seed: 16
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08156146030676992 | Validation loss: 0.0705137016872565
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 1
Training loss: 0.07990784237259313 | Validation loss: 0.06988636900981267
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 2
Training loss: 0.07819349220708798 | Validation loss: 0.06863330366710822
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 3
Training loss: 0.07480346470286972 | Validation loss: 0.0666625127196312
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 4
Training loss: 0.06917970745187056 | Validation loss: 0.058160472040375076
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 5
Training loss: 0.06264691466563627 | Validation loss: 0.08878007034460704
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 6
Training loss: 0.06039848516842252 | Validation loss: 0.05209088449676832
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 7
Training loss: 0.05595046673950396 | Validation loss: 0.04798128828406334
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 8
Training loss: 0.05155527258389875 | Validation loss: 0.041437882309158645
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 9
Training loss: 0.04763707479363993 | Validation loss: 0.043172294894854225
Validation loss (ends of cycles): [0.0705137]
------------------------------
Epoch: 10
Training loss: 0.04461290363810564 | Validation loss: 0.04293485110004743
Validation loss (ends of cycles): [0.0705137 0.04293485]
------------------------------
Epoch: 11
Training loss: 0.04245269229929698 | Validation loss: 0.03906371258199215
Validation loss (ends of cycles): [0.0705137 0.04293485]
------------------------------
Epoch: 12
Training loss: 0.043166923287667726 | Validation loss: 0.04080717754550278
Validation loss (ends of cycles): [0.0705137 0.04293485]
------------------------------
Epoch: 13
Training loss: 0.042604215247066396 | Validation loss: 0.03541523963212967
Validation loss (ends of cycles): [0.0705137 0.04293485]
------------------------------
Epoch: 14
Training loss: 0.04206815513929254 | Validation loss: 0.03434837299088637
Validation loss (ends of cycles): [0.0705137 0.04293485]
------------------------------
Epoch: 15
Training loss: 0.040814362760437164 | Validation loss: 0.034936813535750844
Validation loss (ends of cycles): [0.0705137 0.04293485]
------------------------------
Epoch: 16
Training loss: 0.04256990846050413 | Validation loss: 0.05865098908543587
Validation loss (ends of cycles): [0.0705137 0.04293485]
------------------------------
Epoch: 17
Training loss: 0.038062922558502146 | Validation loss: 0.032220245649417244
Validation loss (ends of cycles): [0.0705137 0.04293485]
------------------------------
Epoch: 18
Training loss: 0.03491674932210069 | Validation loss: 0.03063141368329525
Validation loss (ends of cycles): [0.0705137 0.04293485]
------------------------------
Epoch: 19
Training loss: 0.032406675727351716 | Validation loss: 0.031299490481615067
Validation loss (ends of cycles): [0.0705137 0.04293485]
------------------------------
Epoch: 20
Training loss: 0.030975149993441607 | Validation loss: 0.03276701706151167
Validation loss (ends of cycles): [0.0705137 0.04293485 0.03276702]
------------------------------
Epoch: 21
Training loss: 0.03167034911089822 | Validation loss: 0.031768561651309334
Validation loss (ends of cycles): [0.0705137 0.04293485 0.03276702]
------------------------------
Epoch: 22
Training loss: 0.02994556332889356 | Validation loss: 0.030046330144008
Validation loss (ends of cycles): [0.0705137 0.04293485 0.03276702]
------------------------------
Epoch: 23
Training loss: 0.03042360659884779 | Validation loss: 0.02618786444266637
Validation loss (ends of cycles): [0.0705137 0.04293485 0.03276702]
------------------------------
Epoch: 24
Training loss: 0.033261800380913836 | Validation loss: 0.03181804623454809
Validation loss (ends of cycles): [0.0705137 0.04293485 0.03276702]
------------------------------
Epoch: 25
Training loss: 0.03491548017451638 | Validation loss: 0.03237322314331929
Validation loss (ends of cycles): [0.0705137 0.04293485 0.03276702]
------------------------------
Epoch: 26
Training loss: 0.03453618023348482 | Validation loss: 0.030259561104079086
Validation loss (ends of cycles): [0.0705137 0.04293485 0.03276702]
------------------------------
Epoch: 27
Training loss: 0.02996809525709403 | Validation loss: 0.02521776221692562
Validation loss (ends of cycles): [0.0705137 0.04293485 0.03276702]
------------------------------
Epoch: 28
Training loss: 0.029768657135336024 | Validation loss: 0.027145131180683773
Validation loss (ends of cycles): [0.0705137 0.04293485 0.03276702]
------------------------------
Epoch: 29
Training loss: 0.02678287053774846 | Validation loss: 0.02985484277208646
Validation loss (ends of cycles): [0.0705137 0.04293485 0.03276702]
------------------------------
Epoch: 30
Training loss: 0.02542775310575962 | Validation loss: 0.029113321254650753
Validation loss (ends of cycles): [0.0705137 0.04293485 0.03276702 0.02911332]
--------------------------------------------------------------------------------
Seed: 17
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.18578452575537893 | Validation loss: 0.17607577641805014
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 1
Training loss: 0.17636994024117789 | Validation loss: 0.16514772176742554
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 2
Training loss: 0.15567576388518015 | Validation loss: 0.14257992307345072
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 3
Training loss: 0.1261256126066049 | Validation loss: 0.11006870617469151
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 4
Training loss: 0.09516125255160862 | Validation loss: 0.07898629705111186
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 5
Training loss: 0.07224813931518131 | Validation loss: 0.07449610034624736
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 6
Training loss: 0.06180248782038689 | Validation loss: 0.059722560147444405
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 7
Training loss: 0.05495261256065634 | Validation loss: 0.05743814756472906
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 8
Training loss: 0.0541409340997537 | Validation loss: 0.05083008110523224
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 9
Training loss: 0.0495873944212993 | Validation loss: 0.05104871218403181
Validation loss (ends of cycles): [0.17607578]
------------------------------
Epoch: 10
Training loss: 0.04791343791617288 | Validation loss: 0.049677314857641854
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 11
Training loss: 0.048474044952955514 | Validation loss: 0.04772906253735224
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 12
Training loss: 0.04561085191865762 | Validation loss: 0.04465216274062792
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 13
Training loss: 0.04276795002321402 | Validation loss: 0.044079518566528954
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 14
Training loss: 0.046003543875283666 | Validation loss: 0.05234615504741669
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 15
Training loss: 0.04754653500599994 | Validation loss: 0.057119290033976235
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 16
Training loss: 0.04541455095426904 | Validation loss: 0.051875809828440346
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 17
Training loss: 0.04212984825587935 | Validation loss: 0.061386716862519584
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 18
Training loss: 0.04025129984236426 | Validation loss: 0.0377594760308663
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 19
Training loss: 0.037910942195190325 | Validation loss: 0.03676092314223448
Validation loss (ends of cycles): [0.17607578 0.04967731]
------------------------------
Epoch: 20
Training loss: 0.035172241946889296 | Validation loss: 0.03536786511540413
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 21
Training loss: 0.03531419661723905 | Validation loss: 0.03570879126588503
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 22
Training loss: 0.0355098739059435 | Validation loss: 0.033749821285406746
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 23
Training loss: 0.03708219093581041 | Validation loss: 0.034131928657492004
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 24
Training loss: 0.036728250690632396 | Validation loss: 0.08373745282491048
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 25
Training loss: 0.037703154815567866 | Validation loss: 0.16881321867307028
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 26
Training loss: 0.04052116773608658 | Validation loss: 0.035450027945140995
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 27
Training loss: 0.03702345759504371 | Validation loss: 0.04713146264354388
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 28
Training loss: 0.033382929033703275 | Validation loss: 0.033809199929237366
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 29
Training loss: 0.030483958828780387 | Validation loss: 0.03201883099973202
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787]
------------------------------
Epoch: 30
Training loss: 0.028836593238843813 | Validation loss: 0.029221948857108753
Validation loss (ends of cycles): [0.17607578 0.04967731 0.03536787 0.02922195]
--------------------------------------------------------------------------------
Seed: 18
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.08226940329921872 | Validation loss: 0.08771190047264099
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 1
Training loss: 0.08080815602290004 | Validation loss: 0.08779796585440636
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 2
Training loss: 0.07893975392768257 | Validation loss: 0.08810674771666527
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 3
Training loss: 0.07525817559737909 | Validation loss: 0.08791105076670647
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 4
Training loss: 0.06839138034142946 | Validation loss: 0.0787418819963932
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 5
Training loss: 0.06047361873482403 | Validation loss: 0.057681020349264145
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 6
Training loss: 0.05439754487260392 | Validation loss: 0.10234533622860909
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 7
Training loss: 0.048915984697247804 | Validation loss: 0.09288699552416801
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 8
Training loss: 0.05011697858572006 | Validation loss: 0.07140225917100906
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 9
Training loss: 0.04586619089700674 | Validation loss: 0.054284341633319855
Validation loss (ends of cycles): [0.0877119]
------------------------------
Epoch: 10
Training loss: 0.04105820320546627 | Validation loss: 0.05086086876690388
Validation loss (ends of cycles): [0.0877119 0.05086087]
------------------------------
Epoch: 11
Training loss: 0.040411756717060744 | Validation loss: 0.05188886821269989
Validation loss (ends of cycles): [0.0877119 0.05086087]
------------------------------
Epoch: 12
Training loss: 0.04021171529434229 | Validation loss: 0.052854619920253754
Validation loss (ends of cycles): [0.0877119 0.05086087]
------------------------------
Epoch: 13
Training loss: 0.039001153291840306 | Validation loss: 0.053517796099185944
Validation loss (ends of cycles): [0.0877119 0.05086087]
------------------------------
Epoch: 14
Training loss: 0.03992528409550065 | Validation loss: 0.11048462241888046
Validation loss (ends of cycles): [0.0877119 0.05086087]
------------------------------
Epoch: 15
Training loss: 0.0428880665843424 | Validation loss: 0.048895107582211494
Validation loss (ends of cycles): [0.0877119 0.05086087]
------------------------------
Epoch: 16
Training loss: 0.03952205651684811 | Validation loss: 0.09723616763949394
Validation loss (ends of cycles): [0.0877119 0.05086087]
------------------------------
Epoch: 17
Training loss: 0.03598170609850632 | Validation loss: 0.054928792640566826
Validation loss (ends of cycles): [0.0877119 0.05086087]
------------------------------
Epoch: 18
Training loss: 0.034703530959392846 | Validation loss: 0.04207434877753258
Validation loss (ends of cycles): [0.0877119 0.05086087]
------------------------------
Epoch: 19
Training loss: 0.03246637443570714 | Validation loss: 0.04125319607555866
Validation loss (ends of cycles): [0.0877119 0.05086087]
------------------------------
Epoch: 20
Training loss: 0.03022692215285803 | Validation loss: 0.03823002055287361
Validation loss (ends of cycles): [0.0877119 0.05086087 0.03823002]
------------------------------
Epoch: 21
Training loss: 0.031208522029613193 | Validation loss: 0.04041556641459465
Validation loss (ends of cycles): [0.0877119 0.05086087 0.03823002]
------------------------------
Epoch: 22
Training loss: 0.029294442140350218 | Validation loss: 0.0428590402007103
Validation loss (ends of cycles): [0.0877119 0.05086087 0.03823002]
------------------------------
Epoch: 23
Training loss: 0.0297195372220717 | Validation loss: 0.04256322421133518
Validation loss (ends of cycles): [0.0877119 0.05086087 0.03823002]
------------------------------
Epoch: 24
Training loss: 0.03125792741775513 | Validation loss: 0.10660433769226074
Validation loss (ends of cycles): [0.0877119 0.05086087 0.03823002]
------------------------------
Epoch: 25
Training loss: 0.03019392794292224 | Validation loss: 0.06545785069465637
Validation loss (ends of cycles): [0.0877119 0.05086087 0.03823002]
------------------------------
Epoch: 26
Training loss: 0.033481411537841746 | Validation loss: 0.03471088223159313
Validation loss (ends of cycles): [0.0877119 0.05086087 0.03823002]
------------------------------
Epoch: 27
Training loss: 0.027845038640263834 | Validation loss: 0.07449803873896599
Validation loss (ends of cycles): [0.0877119 0.05086087 0.03823002]
------------------------------
Epoch: 28
Training loss: 0.027143595397080247 | Validation loss: 0.03891387768089771
Validation loss (ends of cycles): [0.0877119 0.05086087 0.03823002]
------------------------------
Epoch: 29
Training loss: 0.025757619964056892 | Validation loss: 0.041501617059111595
Validation loss (ends of cycles): [0.0877119 0.05086087 0.03823002]
------------------------------
Epoch: 30
Training loss: 0.02374906617363817 | Validation loss: 0.04013838246464729
Validation loss (ends of cycles): [0.0877119 0.05086087 0.03823002 0.04013838]
--------------------------------------------------------------------------------
Seed: 19
--------------------------------------------------------------------------------
------------------------------
Epoch: 0
Training loss: 0.16255763173103333 | Validation loss: 0.14109364648660025
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 1
Training loss: 0.15516356999675432 | Validation loss: 0.13523547103007635
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 2
Training loss: 0.14172234551774132 | Validation loss: 0.12201743572950363
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 3
Training loss: 0.12399974134233263 | Validation loss: 0.09632302448153496
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 4
Training loss: 0.10233049053284857 | Validation loss: 0.06885181864102681
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 5
Training loss: 0.0784348054892487 | Validation loss: 0.05762290582060814
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 6
Training loss: 0.06391300840510263 | Validation loss: 0.0725318193435669
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 7
Training loss: 0.05626908710433377 | Validation loss: 0.04346885159611702
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 8
Training loss: 0.051691514543361135 | Validation loss: 0.04434716080625852
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 9
Training loss: 0.04746196946750084 | Validation loss: 0.04489594325423241
Validation loss (ends of cycles): [0.14109365]
------------------------------
Epoch: 10
Training loss: 0.043907886577977076 | Validation loss: 0.04307339588801066
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 11
Training loss: 0.04457749778197871 | Validation loss: 0.04406307637691498
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 12
Training loss: 0.044939831313159734 | Validation loss: 0.08915746957063675
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 13
Training loss: 0.044208405539393425 | Validation loss: 0.036763026068607964
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 14
Training loss: 0.04433420538488361 | Validation loss: 0.04416805567840735
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 15
Training loss: 0.04537590737971994 | Validation loss: 0.4176284372806549
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 16
Training loss: 0.04425143709199296 | Validation loss: 0.18371537327766418
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 17
Training loss: 0.03873702914764484 | Validation loss: 0.0807472715775172
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 18
Training loss: 0.03545353727208243 | Validation loss: 0.042597355941931404
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 19
Training loss: 0.032974150549206466 | Validation loss: 0.033211088428894676
Validation loss (ends of cycles): [0.14109365 0.0430734 ]
------------------------------
Epoch: 20
Training loss: 0.030094111027816933 | Validation loss: 0.033144605035583176
Validation loss (ends of cycles): [0.14109365 0.0430734 0.03314461]
------------------------------
Epoch: 21
Training loss: 0.03217791558967696 | Validation loss: 0.03846348077058792
Validation loss (ends of cycles): [0.14109365 0.0430734 0.03314461]
------------------------------
Epoch: 22
Training loss: 0.031263780780136585 | Validation loss: 0.030576524635155995
Validation loss (ends of cycles): [0.14109365 0.0430734 0.03314461]
------------------------------
Epoch: 23
Training loss: 0.030434809832109347 | Validation loss: 0.04343028490742048
Validation loss (ends of cycles): [0.14109365 0.0430734 0.03314461]
------------------------------
Epoch: 24
Training loss: 0.032326566986739635 | Validation loss: 0.13629954804976782
Validation loss (ends of cycles): [0.14109365 0.0430734 0.03314461]
------------------------------
Epoch: 25
Training loss: 0.03342853072616789 | Validation loss: 0.06467030942440033
Validation loss (ends of cycles): [0.14109365 0.0430734 0.03314461]
------------------------------
Epoch: 26
Training loss: 0.03306691503773133 | Validation loss: 0.041692071904738746
Validation loss (ends of cycles): [0.14109365 0.0430734 0.03314461]
------------------------------
Epoch: 27
Training loss: 0.030296926179693803 | Validation loss: 0.049537912011146545
Validation loss (ends of cycles): [0.14109365 0.0430734 0.03314461]
------------------------------
Epoch: 28
Training loss: 0.026243075573196013 | Validation loss: 0.031099140644073486
Validation loss (ends of cycles): [0.14109365 0.0430734 0.03314461]
------------------------------
Epoch: 29
Training loss: 0.024759062979784276 | Validation loss: 0.026985854531327885
Validation loss (ends of cycles): [0.14109365 0.0430734 0.03314461]
------------------------------
Epoch: 30
Training loss: 0.02487910890744792 | Validation loss: 0.029790397733449936
Validation loss (ends of cycles): [0.14109365 0.0430734 0.03314461 0.0297904 ]
# Replace following Paths with yourssrc_dir_model = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/vertisols/models')order =10seeds =range(20)learners = Learners(Model, tax_lookup, seeds=seeds, device=device)perfs_local_vertisols, _, _, _ = learners.evaluate((X, y, depth_order[:, -1]), order = order, src_dir_model=src_dir_model)perfs_local_vertisols.describe()
rpd
rpiq
r2
lccc
rmse
mse
mae
mape
bias
stb
count
20.000000
20.000000
20.000000
20.000000
20.000000
20.000000
20.000000
20.000000
20.000000
20.000000
mean
1.777816
2.448334
0.669128
0.800569
0.328906
0.110845
0.213575
30.958227
-0.001265
-0.003632
std
0.184440
0.352951
0.069418
0.041342
0.052977
0.037720
0.023262
4.237755
0.024421
0.065963
min
1.473109
1.825536
0.532598
0.716435
0.252563
0.063788
0.179316
24.304497
-0.041362
-0.115813
25%
1.615667
2.245532
0.611233
0.773600
0.292217
0.085391
0.197175
28.024661
-0.016770
-0.041078
50%
1.788574
2.443511
0.683310
0.807425
0.318792
0.101629
0.209331
29.388569
0.000355
0.000905
75%
1.925041
2.621849
0.726185
0.828116
0.349539
0.122219
0.231883
34.220246
0.018196
0.041607
max
2.113319
3.132748
0.772750
0.861943
0.458266
0.210007
0.255553
40.589359
0.043430
0.129690
Checking losses
from mirzai.training.core import load_dumps# Replace following Paths with yoursdest_dir_loss = Path('/content/drive/MyDrive/research/predict-k-mirs-dl/dumps/cnn/train_eval/vertisols/losses')losses = load_dumps(dest_dir_loss)