2012 lines
36 KiB
JSON
2012 lines
36 KiB
JSON
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 2.0,
|
|
"eval_steps": 500,
|
|
"global_step": 664,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 1.3082402064781276e-06,
|
|
"loss": 0.7935,
|
|
"step": 2
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"learning_rate": 2.6164804129562553e-06,
|
|
"loss": 0.7,
|
|
"step": 4
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 3.381751875681663e-06,
|
|
"loss": 0.6316,
|
|
"step": 6
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"learning_rate": 3.924720619434383e-06,
|
|
"loss": 0.5405,
|
|
"step": 8
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"learning_rate": 4.345879896760937e-06,
|
|
"loss": 0.522,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 4.689992082159791e-06,
|
|
"loss": 0.4827,
|
|
"step": 12
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"learning_rate": 4.980934789368156e-06,
|
|
"loss": 0.4555,
|
|
"step": 14
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 5.2329608259125105e-06,
|
|
"loss": 0.45,
|
|
"step": 16
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"learning_rate": 5.4552635448851985e-06,
|
|
"loss": 0.4315,
|
|
"step": 18
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"learning_rate": 5.6541201032390644e-06,
|
|
"loss": 0.4153,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 5.83400774154115e-06,
|
|
"loss": 0.4374,
|
|
"step": 22
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"learning_rate": 5.99823228863792e-06,
|
|
"loss": 0.4171,
|
|
"step": 24
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 6.149304227398896e-06,
|
|
"loss": 0.41,
|
|
"step": 26
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"learning_rate": 6.289174995846284e-06,
|
|
"loss": 0.4101,
|
|
"step": 28
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"learning_rate": 6.419391565964472e-06,
|
|
"loss": 0.4075,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 6.541201032390639e-06,
|
|
"loss": 0.395,
|
|
"step": 32
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"learning_rate": 6.655623437887147e-06,
|
|
"loss": 0.3873,
|
|
"step": 34
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 6.763503751363326e-06,
|
|
"loss": 0.4005,
|
|
"step": 36
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"learning_rate": 6.865549773769684e-06,
|
|
"loss": 0.3754,
|
|
"step": 38
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"learning_rate": 6.9623603097171925e-06,
|
|
"loss": 0.3733,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 7.054446458571692e-06,
|
|
"loss": 0.3622,
|
|
"step": 42
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"learning_rate": 7.1422479480192775e-06,
|
|
"loss": 0.369,
|
|
"step": 44
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 7.226145833886759e-06,
|
|
"loss": 0.3745,
|
|
"step": 46
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"learning_rate": 7.306472495116047e-06,
|
|
"loss": 0.3775,
|
|
"step": 48
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"learning_rate": 7.3835195870437456e-06,
|
|
"loss": 0.3652,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 7.457544433877025e-06,
|
|
"loss": 0.3645,
|
|
"step": 52
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"learning_rate": 7.528775214088733e-06,
|
|
"loss": 0.3554,
|
|
"step": 54
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 7.597415202324413e-06,
|
|
"loss": 0.3591,
|
|
"step": 56
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"learning_rate": 7.663646266610644e-06,
|
|
"loss": 0.3797,
|
|
"step": 58
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"learning_rate": 7.7276317724426e-06,
|
|
"loss": 0.363,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 7.789519010511834e-06,
|
|
"loss": 0.367,
|
|
"step": 62
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"learning_rate": 7.849441238868767e-06,
|
|
"loss": 0.367,
|
|
"step": 64
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 7.907519410744684e-06,
|
|
"loss": 0.3696,
|
|
"step": 66
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"learning_rate": 7.963863644365277e-06,
|
|
"loss": 0.3627,
|
|
"step": 68
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"learning_rate": 8.018574479650967e-06,
|
|
"loss": 0.3689,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 8.071743957841455e-06,
|
|
"loss": 0.344,
|
|
"step": 72
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"learning_rate": 8.123456553166724e-06,
|
|
"loss": 0.3412,
|
|
"step": 74
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 8.173789980247812e-06,
|
|
"loss": 0.3574,
|
|
"step": 76
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"learning_rate": 8.222815896602431e-06,
|
|
"loss": 0.339,
|
|
"step": 78
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"learning_rate": 8.270600516195319e-06,
|
|
"loss": 0.3547,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 8.317205147216999e-06,
|
|
"loss": 0.358,
|
|
"step": 82
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"learning_rate": 8.36268666504982e-06,
|
|
"loss": 0.3475,
|
|
"step": 84
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"learning_rate": 8.407097929574588e-06,
|
|
"loss": 0.3554,
|
|
"step": 86
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 8.450488154497406e-06,
|
|
"loss": 0.3406,
|
|
"step": 88
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"learning_rate": 8.492903235168008e-06,
|
|
"loss": 0.3267,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 8.534386040364887e-06,
|
|
"loss": 0.341,
|
|
"step": 92
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"learning_rate": 8.574976672697987e-06,
|
|
"loss": 0.3524,
|
|
"step": 94
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"learning_rate": 8.614712701594175e-06,
|
|
"loss": 0.3584,
|
|
"step": 96
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 8.653629372258186e-06,
|
|
"loss": 0.3351,
|
|
"step": 98
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"learning_rate": 8.691759793521874e-06,
|
|
"loss": 0.3522,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 8.729135107090682e-06,
|
|
"loss": 0.3544,
|
|
"step": 102
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"learning_rate": 8.765784640355151e-06,
|
|
"loss": 0.3408,
|
|
"step": 104
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"learning_rate": 8.80173604464618e-06,
|
|
"loss": 0.3519,
|
|
"step": 106
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 8.837015420566862e-06,
|
|
"loss": 0.3417,
|
|
"step": 108
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"learning_rate": 8.87164743182396e-06,
|
|
"loss": 0.3555,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 8.90565540880254e-06,
|
|
"loss": 0.3477,
|
|
"step": 112
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"learning_rate": 8.93906144297322e-06,
|
|
"loss": 0.3298,
|
|
"step": 114
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"learning_rate": 8.971886473088772e-06,
|
|
"loss": 0.3431,
|
|
"step": 116
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 9.004150364012388e-06,
|
|
"loss": 0.3504,
|
|
"step": 118
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"learning_rate": 9.035871978920727e-06,
|
|
"loss": 0.3327,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 9.067069245538941e-06,
|
|
"loss": 0.3284,
|
|
"step": 122
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"learning_rate": 9.09775921698996e-06,
|
|
"loss": 0.3345,
|
|
"step": 124
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"learning_rate": 9.127958127775227e-06,
|
|
"loss": 0.3464,
|
|
"step": 126
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 9.157681445346895e-06,
|
|
"loss": 0.336,
|
|
"step": 128
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"learning_rate": 9.186943917681705e-06,
|
|
"loss": 0.3546,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 9.215759617222812e-06,
|
|
"loss": 0.3194,
|
|
"step": 132
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"learning_rate": 9.244141981517345e-06,
|
|
"loss": 0.3296,
|
|
"step": 134
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"learning_rate": 9.272103850843403e-06,
|
|
"loss": 0.3331,
|
|
"step": 136
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 9.299657503090295e-06,
|
|
"loss": 0.3362,
|
|
"step": 138
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"learning_rate": 9.326814686129093e-06,
|
|
"loss": 0.3414,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 9.353586647887207e-06,
|
|
"loss": 0.3249,
|
|
"step": 142
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"learning_rate": 9.379984164319582e-06,
|
|
"loss": 0.3334,
|
|
"step": 144
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"learning_rate": 9.406017565450707e-06,
|
|
"loss": 0.3294,
|
|
"step": 146
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 9.43169675964485e-06,
|
|
"loss": 0.3436,
|
|
"step": 148
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"learning_rate": 9.457031256247281e-06,
|
|
"loss": 0.3216,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 9.48203018672594e-06,
|
|
"loss": 0.3509,
|
|
"step": 152
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"learning_rate": 9.50670232443118e-06,
|
|
"loss": 0.326,
|
|
"step": 154
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"learning_rate": 9.53105610308056e-06,
|
|
"loss": 0.3391,
|
|
"step": 156
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 9.555099634066188e-06,
|
|
"loss": 0.3264,
|
|
"step": 158
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"learning_rate": 9.578840722673449e-06,
|
|
"loss": 0.3245,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 9.602286883292267e-06,
|
|
"loss": 0.333,
|
|
"step": 162
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"learning_rate": 9.625445353695127e-06,
|
|
"loss": 0.3491,
|
|
"step": 164
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"learning_rate": 9.648323108449636e-06,
|
|
"loss": 0.34,
|
|
"step": 166
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 9.670926871527948e-06,
|
|
"loss": 0.3343,
|
|
"step": 168
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"learning_rate": 9.693263128169957e-06,
|
|
"loss": 0.3311,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 9.715338136052716e-06,
|
|
"loss": 0.333,
|
|
"step": 172
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"learning_rate": 9.73715793581418e-06,
|
|
"loss": 0.3002,
|
|
"step": 174
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"learning_rate": 9.758728360975532e-06,
|
|
"loss": 0.3271,
|
|
"step": 176
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 9.780055047302923e-06,
|
|
"loss": 0.323,
|
|
"step": 178
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"learning_rate": 9.801143441646136e-06,
|
|
"loss": 0.3562,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 9.821998810288924e-06,
|
|
"loss": 0.3228,
|
|
"step": 182
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"learning_rate": 9.842626246843015e-06,
|
|
"loss": 0.3422,
|
|
"step": 184
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"learning_rate": 9.863030679715369e-06,
|
|
"loss": 0.3296,
|
|
"step": 186
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 9.883216879176116e-06,
|
|
"loss": 0.3176,
|
|
"step": 188
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"learning_rate": 9.903189464052494e-06,
|
|
"loss": 0.3297,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 9.922952908072303e-06,
|
|
"loss": 0.3246,
|
|
"step": 192
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"learning_rate": 9.942511545878664e-06,
|
|
"loss": 0.3261,
|
|
"step": 194
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"learning_rate": 9.961869578736312e-06,
|
|
"loss": 0.3258,
|
|
"step": 196
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 9.98103107994822e-06,
|
|
"loss": 0.3269,
|
|
"step": 198
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.324,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3106,
|
|
"step": 202
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3086,
|
|
"step": 204
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3269,
|
|
"step": 206
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3396,
|
|
"step": 208
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3249,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3294,
|
|
"step": 212
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3502,
|
|
"step": 214
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3329,
|
|
"step": 216
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3501,
|
|
"step": 218
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3375,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3326,
|
|
"step": 222
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3312,
|
|
"step": 224
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3246,
|
|
"step": 226
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3142,
|
|
"step": 228
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3246,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.329,
|
|
"step": 232
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3446,
|
|
"step": 234
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3205,
|
|
"step": 236
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3066,
|
|
"step": 238
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3267,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3169,
|
|
"step": 242
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.333,
|
|
"step": 244
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3155,
|
|
"step": 246
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3263,
|
|
"step": 248
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3154,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3303,
|
|
"step": 252
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3298,
|
|
"step": 254
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3319,
|
|
"step": 256
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3178,
|
|
"step": 258
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3178,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3163,
|
|
"step": 262
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2977,
|
|
"step": 264
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3184,
|
|
"step": 266
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3359,
|
|
"step": 268
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.321,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3275,
|
|
"step": 272
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3198,
|
|
"step": 274
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3352,
|
|
"step": 276
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3387,
|
|
"step": 278
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3176,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3278,
|
|
"step": 282
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2959,
|
|
"step": 284
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3213,
|
|
"step": 286
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3154,
|
|
"step": 288
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3158,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3068,
|
|
"step": 292
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3178,
|
|
"step": 294
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3241,
|
|
"step": 296
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3192,
|
|
"step": 298
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3238,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3259,
|
|
"step": 302
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2884,
|
|
"step": 304
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3148,
|
|
"step": 306
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3133,
|
|
"step": 308
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3141,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3187,
|
|
"step": 312
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3211,
|
|
"step": 314
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3198,
|
|
"step": 316
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.316,
|
|
"step": 318
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.329,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3008,
|
|
"step": 322
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3184,
|
|
"step": 324
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3329,
|
|
"step": 326
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3106,
|
|
"step": 328
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3133,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3238,
|
|
"step": 332
|
|
},
|
|
{
|
|
"epoch": 1.01,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3145,
|
|
"step": 334
|
|
},
|
|
{
|
|
"epoch": 1.01,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3053,
|
|
"step": 336
|
|
},
|
|
{
|
|
"epoch": 1.02,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.3025,
|
|
"step": 338
|
|
},
|
|
{
|
|
"epoch": 1.02,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2953,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 1.03,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.302,
|
|
"step": 342
|
|
},
|
|
{
|
|
"epoch": 1.04,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2956,
|
|
"step": 344
|
|
},
|
|
{
|
|
"epoch": 1.04,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2763,
|
|
"step": 346
|
|
},
|
|
{
|
|
"epoch": 1.05,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2848,
|
|
"step": 348
|
|
},
|
|
{
|
|
"epoch": 1.05,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2664,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 1.06,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2641,
|
|
"step": 352
|
|
},
|
|
{
|
|
"epoch": 1.07,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2783,
|
|
"step": 354
|
|
},
|
|
{
|
|
"epoch": 1.07,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2757,
|
|
"step": 356
|
|
},
|
|
{
|
|
"epoch": 1.08,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2647,
|
|
"step": 358
|
|
},
|
|
{
|
|
"epoch": 1.08,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2725,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 1.09,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.277,
|
|
"step": 362
|
|
},
|
|
{
|
|
"epoch": 1.1,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2648,
|
|
"step": 364
|
|
},
|
|
{
|
|
"epoch": 1.1,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2593,
|
|
"step": 366
|
|
},
|
|
{
|
|
"epoch": 1.11,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2654,
|
|
"step": 368
|
|
},
|
|
{
|
|
"epoch": 1.11,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2574,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 1.12,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2528,
|
|
"step": 372
|
|
},
|
|
{
|
|
"epoch": 1.13,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2415,
|
|
"step": 374
|
|
},
|
|
{
|
|
"epoch": 1.13,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2493,
|
|
"step": 376
|
|
},
|
|
{
|
|
"epoch": 1.14,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2543,
|
|
"step": 378
|
|
},
|
|
{
|
|
"epoch": 1.14,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.259,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 1.15,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2445,
|
|
"step": 382
|
|
},
|
|
{
|
|
"epoch": 1.16,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2476,
|
|
"step": 384
|
|
},
|
|
{
|
|
"epoch": 1.16,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2388,
|
|
"step": 386
|
|
},
|
|
{
|
|
"epoch": 1.17,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2398,
|
|
"step": 388
|
|
},
|
|
{
|
|
"epoch": 1.17,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2559,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 1.18,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2398,
|
|
"step": 392
|
|
},
|
|
{
|
|
"epoch": 1.19,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.245,
|
|
"step": 394
|
|
},
|
|
{
|
|
"epoch": 1.19,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2502,
|
|
"step": 396
|
|
},
|
|
{
|
|
"epoch": 1.2,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2541,
|
|
"step": 398
|
|
},
|
|
{
|
|
"epoch": 1.2,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2433,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 1.21,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2486,
|
|
"step": 402
|
|
},
|
|
{
|
|
"epoch": 1.22,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2297,
|
|
"step": 404
|
|
},
|
|
{
|
|
"epoch": 1.22,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2286,
|
|
"step": 406
|
|
},
|
|
{
|
|
"epoch": 1.23,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2425,
|
|
"step": 408
|
|
},
|
|
{
|
|
"epoch": 1.23,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2271,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 1.24,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2394,
|
|
"step": 412
|
|
},
|
|
{
|
|
"epoch": 1.25,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2446,
|
|
"step": 414
|
|
},
|
|
{
|
|
"epoch": 1.25,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2342,
|
|
"step": 416
|
|
},
|
|
{
|
|
"epoch": 1.26,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2423,
|
|
"step": 418
|
|
},
|
|
{
|
|
"epoch": 1.27,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2334,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 1.27,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2213,
|
|
"step": 422
|
|
},
|
|
{
|
|
"epoch": 1.28,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2254,
|
|
"step": 424
|
|
},
|
|
{
|
|
"epoch": 1.28,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2385,
|
|
"step": 426
|
|
},
|
|
{
|
|
"epoch": 1.29,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2439,
|
|
"step": 428
|
|
},
|
|
{
|
|
"epoch": 1.3,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2314,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 1.3,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2423,
|
|
"step": 432
|
|
},
|
|
{
|
|
"epoch": 1.31,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2387,
|
|
"step": 434
|
|
},
|
|
{
|
|
"epoch": 1.31,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2283,
|
|
"step": 436
|
|
},
|
|
{
|
|
"epoch": 1.32,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2358,
|
|
"step": 438
|
|
},
|
|
{
|
|
"epoch": 1.33,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2271,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 1.33,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2403,
|
|
"step": 442
|
|
},
|
|
{
|
|
"epoch": 1.34,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2365,
|
|
"step": 444
|
|
},
|
|
{
|
|
"epoch": 1.34,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2206,
|
|
"step": 446
|
|
},
|
|
{
|
|
"epoch": 1.35,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2329,
|
|
"step": 448
|
|
},
|
|
{
|
|
"epoch": 1.36,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.23,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 1.36,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2177,
|
|
"step": 452
|
|
},
|
|
{
|
|
"epoch": 1.37,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.219,
|
|
"step": 454
|
|
},
|
|
{
|
|
"epoch": 1.37,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.226,
|
|
"step": 456
|
|
},
|
|
{
|
|
"epoch": 1.38,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2267,
|
|
"step": 458
|
|
},
|
|
{
|
|
"epoch": 1.39,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2249,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 1.39,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2368,
|
|
"step": 462
|
|
},
|
|
{
|
|
"epoch": 1.4,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2146,
|
|
"step": 464
|
|
},
|
|
{
|
|
"epoch": 1.4,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.218,
|
|
"step": 466
|
|
},
|
|
{
|
|
"epoch": 1.41,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2304,
|
|
"step": 468
|
|
},
|
|
{
|
|
"epoch": 1.42,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2314,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 1.42,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2295,
|
|
"step": 472
|
|
},
|
|
{
|
|
"epoch": 1.43,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.22,
|
|
"step": 474
|
|
},
|
|
{
|
|
"epoch": 1.43,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2172,
|
|
"step": 476
|
|
},
|
|
{
|
|
"epoch": 1.44,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2244,
|
|
"step": 478
|
|
},
|
|
{
|
|
"epoch": 1.45,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2301,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 1.45,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2069,
|
|
"step": 482
|
|
},
|
|
{
|
|
"epoch": 1.46,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2342,
|
|
"step": 484
|
|
},
|
|
{
|
|
"epoch": 1.46,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2162,
|
|
"step": 486
|
|
},
|
|
{
|
|
"epoch": 1.47,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2184,
|
|
"step": 488
|
|
},
|
|
{
|
|
"epoch": 1.48,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2369,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 1.48,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2053,
|
|
"step": 492
|
|
},
|
|
{
|
|
"epoch": 1.49,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2148,
|
|
"step": 494
|
|
},
|
|
{
|
|
"epoch": 1.49,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.227,
|
|
"step": 496
|
|
},
|
|
{
|
|
"epoch": 1.5,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2252,
|
|
"step": 498
|
|
},
|
|
{
|
|
"epoch": 1.51,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2183,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 1.51,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2222,
|
|
"step": 502
|
|
},
|
|
{
|
|
"epoch": 1.52,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2158,
|
|
"step": 504
|
|
},
|
|
{
|
|
"epoch": 1.52,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.1991,
|
|
"step": 506
|
|
},
|
|
{
|
|
"epoch": 1.53,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2093,
|
|
"step": 508
|
|
},
|
|
{
|
|
"epoch": 1.54,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2021,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 1.54,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2392,
|
|
"step": 512
|
|
},
|
|
{
|
|
"epoch": 1.55,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2119,
|
|
"step": 514
|
|
},
|
|
{
|
|
"epoch": 1.55,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2261,
|
|
"step": 516
|
|
},
|
|
{
|
|
"epoch": 1.56,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2138,
|
|
"step": 518
|
|
},
|
|
{
|
|
"epoch": 1.57,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2029,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 1.57,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2107,
|
|
"step": 522
|
|
},
|
|
{
|
|
"epoch": 1.58,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2078,
|
|
"step": 524
|
|
},
|
|
{
|
|
"epoch": 1.58,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2187,
|
|
"step": 526
|
|
},
|
|
{
|
|
"epoch": 1.59,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2174,
|
|
"step": 528
|
|
},
|
|
{
|
|
"epoch": 1.6,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2165,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 1.6,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2098,
|
|
"step": 532
|
|
},
|
|
{
|
|
"epoch": 1.61,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.199,
|
|
"step": 534
|
|
},
|
|
{
|
|
"epoch": 1.61,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2002,
|
|
"step": 536
|
|
},
|
|
{
|
|
"epoch": 1.62,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2137,
|
|
"step": 538
|
|
},
|
|
{
|
|
"epoch": 1.63,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2176,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 1.63,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2153,
|
|
"step": 542
|
|
},
|
|
{
|
|
"epoch": 1.64,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2164,
|
|
"step": 544
|
|
},
|
|
{
|
|
"epoch": 1.64,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2305,
|
|
"step": 546
|
|
},
|
|
{
|
|
"epoch": 1.65,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2182,
|
|
"step": 548
|
|
},
|
|
{
|
|
"epoch": 1.66,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2228,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 1.66,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2207,
|
|
"step": 552
|
|
},
|
|
{
|
|
"epoch": 1.67,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.224,
|
|
"step": 554
|
|
},
|
|
{
|
|
"epoch": 1.67,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2201,
|
|
"step": 556
|
|
},
|
|
{
|
|
"epoch": 1.68,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2137,
|
|
"step": 558
|
|
},
|
|
{
|
|
"epoch": 1.69,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2097,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 1.69,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2155,
|
|
"step": 562
|
|
},
|
|
{
|
|
"epoch": 1.7,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2166,
|
|
"step": 564
|
|
},
|
|
{
|
|
"epoch": 1.7,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2302,
|
|
"step": 566
|
|
},
|
|
{
|
|
"epoch": 1.71,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2078,
|
|
"step": 568
|
|
},
|
|
{
|
|
"epoch": 1.72,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.1972,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 1.72,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2198,
|
|
"step": 572
|
|
},
|
|
{
|
|
"epoch": 1.73,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.213,
|
|
"step": 574
|
|
},
|
|
{
|
|
"epoch": 1.73,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2183,
|
|
"step": 576
|
|
},
|
|
{
|
|
"epoch": 1.74,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2044,
|
|
"step": 578
|
|
},
|
|
{
|
|
"epoch": 1.75,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2163,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 1.75,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2031,
|
|
"step": 582
|
|
},
|
|
{
|
|
"epoch": 1.76,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2228,
|
|
"step": 584
|
|
},
|
|
{
|
|
"epoch": 1.77,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2271,
|
|
"step": 586
|
|
},
|
|
{
|
|
"epoch": 1.77,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2196,
|
|
"step": 588
|
|
},
|
|
{
|
|
"epoch": 1.78,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2134,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 1.78,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2019,
|
|
"step": 592
|
|
},
|
|
{
|
|
"epoch": 1.79,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.1986,
|
|
"step": 594
|
|
},
|
|
{
|
|
"epoch": 1.8,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.1943,
|
|
"step": 596
|
|
},
|
|
{
|
|
"epoch": 1.8,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.206,
|
|
"step": 598
|
|
},
|
|
{
|
|
"epoch": 1.81,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2154,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 1.81,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2103,
|
|
"step": 602
|
|
},
|
|
{
|
|
"epoch": 1.82,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2167,
|
|
"step": 604
|
|
},
|
|
{
|
|
"epoch": 1.83,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.209,
|
|
"step": 606
|
|
},
|
|
{
|
|
"epoch": 1.83,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2208,
|
|
"step": 608
|
|
},
|
|
{
|
|
"epoch": 1.84,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2252,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 1.84,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2092,
|
|
"step": 612
|
|
},
|
|
{
|
|
"epoch": 1.85,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2099,
|
|
"step": 614
|
|
},
|
|
{
|
|
"epoch": 1.86,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.1955,
|
|
"step": 616
|
|
},
|
|
{
|
|
"epoch": 1.86,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2176,
|
|
"step": 618
|
|
},
|
|
{
|
|
"epoch": 1.87,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2053,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 1.87,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2119,
|
|
"step": 622
|
|
},
|
|
{
|
|
"epoch": 1.88,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.1986,
|
|
"step": 624
|
|
},
|
|
{
|
|
"epoch": 1.89,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2136,
|
|
"step": 626
|
|
},
|
|
{
|
|
"epoch": 1.89,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2154,
|
|
"step": 628
|
|
},
|
|
{
|
|
"epoch": 1.9,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2091,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 1.9,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2161,
|
|
"step": 632
|
|
},
|
|
{
|
|
"epoch": 1.91,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2107,
|
|
"step": 634
|
|
},
|
|
{
|
|
"epoch": 1.92,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.1814,
|
|
"step": 636
|
|
},
|
|
{
|
|
"epoch": 1.92,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2097,
|
|
"step": 638
|
|
},
|
|
{
|
|
"epoch": 1.93,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2078,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 1.93,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.1985,
|
|
"step": 642
|
|
},
|
|
{
|
|
"epoch": 1.94,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2115,
|
|
"step": 644
|
|
},
|
|
{
|
|
"epoch": 1.95,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2128,
|
|
"step": 646
|
|
},
|
|
{
|
|
"epoch": 1.95,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2133,
|
|
"step": 648
|
|
},
|
|
{
|
|
"epoch": 1.96,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2109,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 1.96,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2133,
|
|
"step": 652
|
|
},
|
|
{
|
|
"epoch": 1.97,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.198,
|
|
"step": 654
|
|
},
|
|
{
|
|
"epoch": 1.98,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2068,
|
|
"step": 656
|
|
},
|
|
{
|
|
"epoch": 1.98,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2222,
|
|
"step": 658
|
|
},
|
|
{
|
|
"epoch": 1.99,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2035,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 1.99,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.2067,
|
|
"step": 662
|
|
},
|
|
{
|
|
"epoch": 2.0,
|
|
"learning_rate": 1e-05,
|
|
"loss": 0.207,
|
|
"step": 664
|
|
}
|
|
],
|
|
"logging_steps": 2,
|
|
"max_steps": 1992,
|
|
"num_train_epochs": 6,
|
|
"save_steps": 64.0,
|
|
"total_flos": 148901009686528.0,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|