1260 lines
28 KiB
JSON
1260 lines
28 KiB
JSON
|
|
{
|
||
|
|
"best_global_step": null,
|
||
|
|
"best_metric": null,
|
||
|
|
"best_model_checkpoint": null,
|
||
|
|
"epoch": 2.0,
|
||
|
|
"eval_steps": 500,
|
||
|
|
"global_step": 1750,
|
||
|
|
"is_hyper_param_search": false,
|
||
|
|
"is_local_process_zero": true,
|
||
|
|
"is_world_process_zero": true,
|
||
|
|
"log_history": [
|
||
|
|
{
|
||
|
|
"epoch": 0.011428571428571429,
|
||
|
|
"grad_norm": 8.8125,
|
||
|
|
"learning_rate": 1.9931428571428572e-05,
|
||
|
|
"loss": 2.0244,
|
||
|
|
"step": 10
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.022857142857142857,
|
||
|
|
"grad_norm": 12.4375,
|
||
|
|
"learning_rate": 1.9855238095238097e-05,
|
||
|
|
"loss": 2.0268,
|
||
|
|
"step": 20
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.03428571428571429,
|
||
|
|
"grad_norm": 5.6875,
|
||
|
|
"learning_rate": 1.977904761904762e-05,
|
||
|
|
"loss": 2.0396,
|
||
|
|
"step": 30
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.045714285714285714,
|
||
|
|
"grad_norm": 6.03125,
|
||
|
|
"learning_rate": 1.9702857142857144e-05,
|
||
|
|
"loss": 1.8937,
|
||
|
|
"step": 40
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.05714285714285714,
|
||
|
|
"grad_norm": 10.0,
|
||
|
|
"learning_rate": 1.9626666666666666e-05,
|
||
|
|
"loss": 1.8841,
|
||
|
|
"step": 50
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.06857142857142857,
|
||
|
|
"grad_norm": 8.625,
|
||
|
|
"learning_rate": 1.955047619047619e-05,
|
||
|
|
"loss": 1.8955,
|
||
|
|
"step": 60
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.08,
|
||
|
|
"grad_norm": 8.1875,
|
||
|
|
"learning_rate": 1.9474285714285717e-05,
|
||
|
|
"loss": 1.8829,
|
||
|
|
"step": 70
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.09142857142857143,
|
||
|
|
"grad_norm": 5.5625,
|
||
|
|
"learning_rate": 1.9398095238095242e-05,
|
||
|
|
"loss": 1.9389,
|
||
|
|
"step": 80
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.10285714285714286,
|
||
|
|
"grad_norm": 5.625,
|
||
|
|
"learning_rate": 1.9321904761904764e-05,
|
||
|
|
"loss": 1.9335,
|
||
|
|
"step": 90
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.11428571428571428,
|
||
|
|
"grad_norm": 11.4375,
|
||
|
|
"learning_rate": 1.924571428571429e-05,
|
||
|
|
"loss": 1.8842,
|
||
|
|
"step": 100
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.12571428571428572,
|
||
|
|
"grad_norm": 6.03125,
|
||
|
|
"learning_rate": 1.916952380952381e-05,
|
||
|
|
"loss": 1.9089,
|
||
|
|
"step": 110
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.13714285714285715,
|
||
|
|
"grad_norm": 9.625,
|
||
|
|
"learning_rate": 1.9093333333333336e-05,
|
||
|
|
"loss": 1.7602,
|
||
|
|
"step": 120
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.14857142857142858,
|
||
|
|
"grad_norm": 9.4375,
|
||
|
|
"learning_rate": 1.9017142857142858e-05,
|
||
|
|
"loss": 1.721,
|
||
|
|
"step": 130
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.16,
|
||
|
|
"grad_norm": 10.375,
|
||
|
|
"learning_rate": 1.8940952380952383e-05,
|
||
|
|
"loss": 1.8059,
|
||
|
|
"step": 140
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.17142857142857143,
|
||
|
|
"grad_norm": 9.0625,
|
||
|
|
"learning_rate": 1.8864761904761905e-05,
|
||
|
|
"loss": 1.7363,
|
||
|
|
"step": 150
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.18285714285714286,
|
||
|
|
"grad_norm": 11.0625,
|
||
|
|
"learning_rate": 1.878857142857143e-05,
|
||
|
|
"loss": 1.7634,
|
||
|
|
"step": 160
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.19428571428571428,
|
||
|
|
"grad_norm": 5.46875,
|
||
|
|
"learning_rate": 1.871238095238095e-05,
|
||
|
|
"loss": 1.7298,
|
||
|
|
"step": 170
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.2057142857142857,
|
||
|
|
"grad_norm": 5.375,
|
||
|
|
"learning_rate": 1.8636190476190477e-05,
|
||
|
|
"loss": 1.7439,
|
||
|
|
"step": 180
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.21714285714285714,
|
||
|
|
"grad_norm": 8.125,
|
||
|
|
"learning_rate": 1.8560000000000002e-05,
|
||
|
|
"loss": 1.8381,
|
||
|
|
"step": 190
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.22857142857142856,
|
||
|
|
"grad_norm": 7.65625,
|
||
|
|
"learning_rate": 1.8483809523809527e-05,
|
||
|
|
"loss": 1.6778,
|
||
|
|
"step": 200
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.24,
|
||
|
|
"grad_norm": 9.25,
|
||
|
|
"learning_rate": 1.840761904761905e-05,
|
||
|
|
"loss": 1.6378,
|
||
|
|
"step": 210
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.25142857142857145,
|
||
|
|
"grad_norm": 8.25,
|
||
|
|
"learning_rate": 1.8331428571428574e-05,
|
||
|
|
"loss": 1.6447,
|
||
|
|
"step": 220
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.26285714285714284,
|
||
|
|
"grad_norm": 10.9375,
|
||
|
|
"learning_rate": 1.8255238095238096e-05,
|
||
|
|
"loss": 1.7685,
|
||
|
|
"step": 230
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.2742857142857143,
|
||
|
|
"grad_norm": 11.0,
|
||
|
|
"learning_rate": 1.817904761904762e-05,
|
||
|
|
"loss": 1.6609,
|
||
|
|
"step": 240
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.2857142857142857,
|
||
|
|
"grad_norm": 4.75,
|
||
|
|
"learning_rate": 1.8102857142857143e-05,
|
||
|
|
"loss": 1.6822,
|
||
|
|
"step": 250
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.29714285714285715,
|
||
|
|
"grad_norm": 7.28125,
|
||
|
|
"learning_rate": 1.8026666666666668e-05,
|
||
|
|
"loss": 1.6153,
|
||
|
|
"step": 260
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.30857142857142855,
|
||
|
|
"grad_norm": 7.9375,
|
||
|
|
"learning_rate": 1.7950476190476193e-05,
|
||
|
|
"loss": 1.7153,
|
||
|
|
"step": 270
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.32,
|
||
|
|
"grad_norm": 9.0625,
|
||
|
|
"learning_rate": 1.7874285714285715e-05,
|
||
|
|
"loss": 1.7104,
|
||
|
|
"step": 280
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.3314285714285714,
|
||
|
|
"grad_norm": 7.90625,
|
||
|
|
"learning_rate": 1.779809523809524e-05,
|
||
|
|
"loss": 1.6752,
|
||
|
|
"step": 290
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.34285714285714286,
|
||
|
|
"grad_norm": 7.03125,
|
||
|
|
"learning_rate": 1.7721904761904762e-05,
|
||
|
|
"loss": 1.6701,
|
||
|
|
"step": 300
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.35428571428571426,
|
||
|
|
"grad_norm": 7.5625,
|
||
|
|
"learning_rate": 1.7645714285714287e-05,
|
||
|
|
"loss": 1.6393,
|
||
|
|
"step": 310
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.3657142857142857,
|
||
|
|
"grad_norm": 7.65625,
|
||
|
|
"learning_rate": 1.7569523809523812e-05,
|
||
|
|
"loss": 1.5169,
|
||
|
|
"step": 320
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.37714285714285717,
|
||
|
|
"grad_norm": 4.6875,
|
||
|
|
"learning_rate": 1.7493333333333334e-05,
|
||
|
|
"loss": 1.5707,
|
||
|
|
"step": 330
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.38857142857142857,
|
||
|
|
"grad_norm": 4.78125,
|
||
|
|
"learning_rate": 1.741714285714286e-05,
|
||
|
|
"loss": 1.5962,
|
||
|
|
"step": 340
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.4,
|
||
|
|
"grad_norm": 7.59375,
|
||
|
|
"learning_rate": 1.734095238095238e-05,
|
||
|
|
"loss": 1.7404,
|
||
|
|
"step": 350
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.4114285714285714,
|
||
|
|
"grad_norm": 5.21875,
|
||
|
|
"learning_rate": 1.7264761904761906e-05,
|
||
|
|
"loss": 1.6232,
|
||
|
|
"step": 360
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.4228571428571429,
|
||
|
|
"grad_norm": 4.46875,
|
||
|
|
"learning_rate": 1.718857142857143e-05,
|
||
|
|
"loss": 1.7321,
|
||
|
|
"step": 370
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.4342857142857143,
|
||
|
|
"grad_norm": 4.40625,
|
||
|
|
"learning_rate": 1.7112380952380953e-05,
|
||
|
|
"loss": 1.5638,
|
||
|
|
"step": 380
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.44571428571428573,
|
||
|
|
"grad_norm": 4.625,
|
||
|
|
"learning_rate": 1.703619047619048e-05,
|
||
|
|
"loss": 1.6591,
|
||
|
|
"step": 390
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.45714285714285713,
|
||
|
|
"grad_norm": 9.0625,
|
||
|
|
"learning_rate": 1.696e-05,
|
||
|
|
"loss": 1.5868,
|
||
|
|
"step": 400
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.4685714285714286,
|
||
|
|
"grad_norm": 9.125,
|
||
|
|
"learning_rate": 1.6883809523809525e-05,
|
||
|
|
"loss": 1.6323,
|
||
|
|
"step": 410
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.48,
|
||
|
|
"grad_norm": 7.5,
|
||
|
|
"learning_rate": 1.6807619047619047e-05,
|
||
|
|
"loss": 1.6079,
|
||
|
|
"step": 420
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.49142857142857144,
|
||
|
|
"grad_norm": 9.9375,
|
||
|
|
"learning_rate": 1.6731428571428572e-05,
|
||
|
|
"loss": 1.6279,
|
||
|
|
"step": 430
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.5028571428571429,
|
||
|
|
"grad_norm": 4.90625,
|
||
|
|
"learning_rate": 1.6655238095238098e-05,
|
||
|
|
"loss": 1.6033,
|
||
|
|
"step": 440
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.5142857142857142,
|
||
|
|
"grad_norm": 9.375,
|
||
|
|
"learning_rate": 1.6579047619047623e-05,
|
||
|
|
"loss": 1.5813,
|
||
|
|
"step": 450
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.5257142857142857,
|
||
|
|
"grad_norm": 12.625,
|
||
|
|
"learning_rate": 1.6502857142857145e-05,
|
||
|
|
"loss": 1.7047,
|
||
|
|
"step": 460
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.5371428571428571,
|
||
|
|
"grad_norm": 7.75,
|
||
|
|
"learning_rate": 1.642666666666667e-05,
|
||
|
|
"loss": 1.6662,
|
||
|
|
"step": 470
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.5485714285714286,
|
||
|
|
"grad_norm": 7.03125,
|
||
|
|
"learning_rate": 1.635047619047619e-05,
|
||
|
|
"loss": 1.618,
|
||
|
|
"step": 480
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.56,
|
||
|
|
"grad_norm": 5.1875,
|
||
|
|
"learning_rate": 1.6274285714285717e-05,
|
||
|
|
"loss": 1.5469,
|
||
|
|
"step": 490
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.5714285714285714,
|
||
|
|
"grad_norm": 11.0625,
|
||
|
|
"learning_rate": 1.619809523809524e-05,
|
||
|
|
"loss": 1.5454,
|
||
|
|
"step": 500
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.5828571428571429,
|
||
|
|
"grad_norm": 7.84375,
|
||
|
|
"learning_rate": 1.6121904761904764e-05,
|
||
|
|
"loss": 1.4719,
|
||
|
|
"step": 510
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.5942857142857143,
|
||
|
|
"grad_norm": 4.53125,
|
||
|
|
"learning_rate": 1.6045714285714286e-05,
|
||
|
|
"loss": 1.4144,
|
||
|
|
"step": 520
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.6057142857142858,
|
||
|
|
"grad_norm": 8.125,
|
||
|
|
"learning_rate": 1.596952380952381e-05,
|
||
|
|
"loss": 1.5868,
|
||
|
|
"step": 530
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.6171428571428571,
|
||
|
|
"grad_norm": 6.9375,
|
||
|
|
"learning_rate": 1.5893333333333333e-05,
|
||
|
|
"loss": 1.5727,
|
||
|
|
"step": 540
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.6285714285714286,
|
||
|
|
"grad_norm": 9.3125,
|
||
|
|
"learning_rate": 1.5817142857142858e-05,
|
||
|
|
"loss": 1.6083,
|
||
|
|
"step": 550
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.64,
|
||
|
|
"grad_norm": 11.6875,
|
||
|
|
"learning_rate": 1.5740952380952383e-05,
|
||
|
|
"loss": 1.6169,
|
||
|
|
"step": 560
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.6514285714285715,
|
||
|
|
"grad_norm": 7.84375,
|
||
|
|
"learning_rate": 1.5664761904761908e-05,
|
||
|
|
"loss": 1.6161,
|
||
|
|
"step": 570
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.6628571428571428,
|
||
|
|
"grad_norm": 7.5,
|
||
|
|
"learning_rate": 1.558857142857143e-05,
|
||
|
|
"loss": 1.5572,
|
||
|
|
"step": 580
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.6742857142857143,
|
||
|
|
"grad_norm": 11.375,
|
||
|
|
"learning_rate": 1.5512380952380955e-05,
|
||
|
|
"loss": 1.4882,
|
||
|
|
"step": 590
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.6857142857142857,
|
||
|
|
"grad_norm": 7.59375,
|
||
|
|
"learning_rate": 1.5436190476190477e-05,
|
||
|
|
"loss": 1.6175,
|
||
|
|
"step": 600
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.6971428571428572,
|
||
|
|
"grad_norm": 4.59375,
|
||
|
|
"learning_rate": 1.5360000000000002e-05,
|
||
|
|
"loss": 1.5326,
|
||
|
|
"step": 610
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.7085714285714285,
|
||
|
|
"grad_norm": 4.3125,
|
||
|
|
"learning_rate": 1.5283809523809524e-05,
|
||
|
|
"loss": 1.5323,
|
||
|
|
"step": 620
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.72,
|
||
|
|
"grad_norm": 4.53125,
|
||
|
|
"learning_rate": 1.5207619047619049e-05,
|
||
|
|
"loss": 1.5665,
|
||
|
|
"step": 630
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.7314285714285714,
|
||
|
|
"grad_norm": 11.25,
|
||
|
|
"learning_rate": 1.5131428571428572e-05,
|
||
|
|
"loss": 1.4393,
|
||
|
|
"step": 640
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.7428571428571429,
|
||
|
|
"grad_norm": 7.03125,
|
||
|
|
"learning_rate": 1.5055238095238096e-05,
|
||
|
|
"loss": 1.528,
|
||
|
|
"step": 650
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.7542857142857143,
|
||
|
|
"grad_norm": 7.40625,
|
||
|
|
"learning_rate": 1.497904761904762e-05,
|
||
|
|
"loss": 1.5457,
|
||
|
|
"step": 660
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.7657142857142857,
|
||
|
|
"grad_norm": 9.4375,
|
||
|
|
"learning_rate": 1.4902857142857143e-05,
|
||
|
|
"loss": 1.5291,
|
||
|
|
"step": 670
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.7771428571428571,
|
||
|
|
"grad_norm": 9.375,
|
||
|
|
"learning_rate": 1.4826666666666666e-05,
|
||
|
|
"loss": 1.5229,
|
||
|
|
"step": 680
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.7885714285714286,
|
||
|
|
"grad_norm": 4.46875,
|
||
|
|
"learning_rate": 1.475047619047619e-05,
|
||
|
|
"loss": 1.4647,
|
||
|
|
"step": 690
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.8,
|
||
|
|
"grad_norm": 4.03125,
|
||
|
|
"learning_rate": 1.4674285714285717e-05,
|
||
|
|
"loss": 1.5565,
|
||
|
|
"step": 700
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.8114285714285714,
|
||
|
|
"grad_norm": 7.375,
|
||
|
|
"learning_rate": 1.459809523809524e-05,
|
||
|
|
"loss": 1.5298,
|
||
|
|
"step": 710
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.8228571428571428,
|
||
|
|
"grad_norm": 9.625,
|
||
|
|
"learning_rate": 1.4521904761904764e-05,
|
||
|
|
"loss": 1.5642,
|
||
|
|
"step": 720
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.8342857142857143,
|
||
|
|
"grad_norm": 7.4375,
|
||
|
|
"learning_rate": 1.4445714285714287e-05,
|
||
|
|
"loss": 1.5435,
|
||
|
|
"step": 730
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.8457142857142858,
|
||
|
|
"grad_norm": 12.375,
|
||
|
|
"learning_rate": 1.436952380952381e-05,
|
||
|
|
"loss": 1.4951,
|
||
|
|
"step": 740
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.8571428571428571,
|
||
|
|
"grad_norm": 7.9375,
|
||
|
|
"learning_rate": 1.4293333333333334e-05,
|
||
|
|
"loss": 1.5446,
|
||
|
|
"step": 750
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.8685714285714285,
|
||
|
|
"grad_norm": 7.21875,
|
||
|
|
"learning_rate": 1.4217142857142858e-05,
|
||
|
|
"loss": 1.5318,
|
||
|
|
"step": 760
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.88,
|
||
|
|
"grad_norm": 7.1875,
|
||
|
|
"learning_rate": 1.4140952380952381e-05,
|
||
|
|
"loss": 1.5279,
|
||
|
|
"step": 770
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.8914285714285715,
|
||
|
|
"grad_norm": 7.09375,
|
||
|
|
"learning_rate": 1.4064761904761905e-05,
|
||
|
|
"loss": 1.4712,
|
||
|
|
"step": 780
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.9028571428571428,
|
||
|
|
"grad_norm": 7.46875,
|
||
|
|
"learning_rate": 1.398857142857143e-05,
|
||
|
|
"loss": 1.566,
|
||
|
|
"step": 790
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.9142857142857143,
|
||
|
|
"grad_norm": 6.40625,
|
||
|
|
"learning_rate": 1.3912380952380953e-05,
|
||
|
|
"loss": 1.4607,
|
||
|
|
"step": 800
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.9257142857142857,
|
||
|
|
"grad_norm": 7.6875,
|
||
|
|
"learning_rate": 1.3836190476190477e-05,
|
||
|
|
"loss": 1.4935,
|
||
|
|
"step": 810
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.9371428571428572,
|
||
|
|
"grad_norm": 4.84375,
|
||
|
|
"learning_rate": 1.376e-05,
|
||
|
|
"loss": 1.4306,
|
||
|
|
"step": 820
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.9485714285714286,
|
||
|
|
"grad_norm": 9.75,
|
||
|
|
"learning_rate": 1.3683809523809526e-05,
|
||
|
|
"loss": 1.4454,
|
||
|
|
"step": 830
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.96,
|
||
|
|
"grad_norm": 7.5,
|
||
|
|
"learning_rate": 1.3607619047619049e-05,
|
||
|
|
"loss": 1.4285,
|
||
|
|
"step": 840
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.9714285714285714,
|
||
|
|
"grad_norm": 4.25,
|
||
|
|
"learning_rate": 1.3531428571428573e-05,
|
||
|
|
"loss": 1.5716,
|
||
|
|
"step": 850
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.9828571428571429,
|
||
|
|
"grad_norm": 10.8125,
|
||
|
|
"learning_rate": 1.3455238095238096e-05,
|
||
|
|
"loss": 1.5742,
|
||
|
|
"step": 860
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.9942857142857143,
|
||
|
|
"grad_norm": 4.375,
|
||
|
|
"learning_rate": 1.3379047619047621e-05,
|
||
|
|
"loss": 1.5958,
|
||
|
|
"step": 870
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.0057142857142858,
|
||
|
|
"grad_norm": 10.3125,
|
||
|
|
"learning_rate": 1.3302857142857145e-05,
|
||
|
|
"loss": 1.4893,
|
||
|
|
"step": 880
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.0171428571428571,
|
||
|
|
"grad_norm": 6.6875,
|
||
|
|
"learning_rate": 1.3226666666666668e-05,
|
||
|
|
"loss": 1.4789,
|
||
|
|
"step": 890
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.0285714285714285,
|
||
|
|
"grad_norm": 9.0625,
|
||
|
|
"learning_rate": 1.3150476190476192e-05,
|
||
|
|
"loss": 1.4813,
|
||
|
|
"step": 900
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.04,
|
||
|
|
"grad_norm": 7.21875,
|
||
|
|
"learning_rate": 1.3074285714285715e-05,
|
||
|
|
"loss": 1.4797,
|
||
|
|
"step": 910
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.0514285714285714,
|
||
|
|
"grad_norm": 7.21875,
|
||
|
|
"learning_rate": 1.2998095238095239e-05,
|
||
|
|
"loss": 1.5455,
|
||
|
|
"step": 920
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.062857142857143,
|
||
|
|
"grad_norm": 10.8125,
|
||
|
|
"learning_rate": 1.2921904761904762e-05,
|
||
|
|
"loss": 1.44,
|
||
|
|
"step": 930
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.0742857142857143,
|
||
|
|
"grad_norm": 4.21875,
|
||
|
|
"learning_rate": 1.2845714285714286e-05,
|
||
|
|
"loss": 1.53,
|
||
|
|
"step": 940
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.0857142857142856,
|
||
|
|
"grad_norm": 6.625,
|
||
|
|
"learning_rate": 1.2769523809523811e-05,
|
||
|
|
"loss": 1.4885,
|
||
|
|
"step": 950
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.0971428571428572,
|
||
|
|
"grad_norm": 9.8125,
|
||
|
|
"learning_rate": 1.2693333333333336e-05,
|
||
|
|
"loss": 1.4834,
|
||
|
|
"step": 960
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.1085714285714285,
|
||
|
|
"grad_norm": 7.59375,
|
||
|
|
"learning_rate": 1.261714285714286e-05,
|
||
|
|
"loss": 1.4658,
|
||
|
|
"step": 970
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.12,
|
||
|
|
"grad_norm": 9.9375,
|
||
|
|
"learning_rate": 1.2540952380952383e-05,
|
||
|
|
"loss": 1.5289,
|
||
|
|
"step": 980
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.1314285714285715,
|
||
|
|
"grad_norm": 4.5625,
|
||
|
|
"learning_rate": 1.2464761904761907e-05,
|
||
|
|
"loss": 1.5231,
|
||
|
|
"step": 990
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.1428571428571428,
|
||
|
|
"grad_norm": 4.34375,
|
||
|
|
"learning_rate": 1.238857142857143e-05,
|
||
|
|
"loss": 1.5437,
|
||
|
|
"step": 1000
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.1542857142857144,
|
||
|
|
"grad_norm": 8.125,
|
||
|
|
"learning_rate": 1.2312380952380953e-05,
|
||
|
|
"loss": 1.4484,
|
||
|
|
"step": 1010
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.1657142857142857,
|
||
|
|
"grad_norm": 7.5,
|
||
|
|
"learning_rate": 1.2236190476190477e-05,
|
||
|
|
"loss": 1.5001,
|
||
|
|
"step": 1020
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.177142857142857,
|
||
|
|
"grad_norm": 7.84375,
|
||
|
|
"learning_rate": 1.216e-05,
|
||
|
|
"loss": 1.5136,
|
||
|
|
"step": 1030
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.1885714285714286,
|
||
|
|
"grad_norm": 10.1875,
|
||
|
|
"learning_rate": 1.2083809523809524e-05,
|
||
|
|
"loss": 1.5088,
|
||
|
|
"step": 1040
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.2,
|
||
|
|
"grad_norm": 6.6875,
|
||
|
|
"learning_rate": 1.2007619047619047e-05,
|
||
|
|
"loss": 1.4638,
|
||
|
|
"step": 1050
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.2114285714285715,
|
||
|
|
"grad_norm": 4.15625,
|
||
|
|
"learning_rate": 1.1931428571428571e-05,
|
||
|
|
"loss": 1.535,
|
||
|
|
"step": 1060
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.2228571428571429,
|
||
|
|
"grad_norm": 4.625,
|
||
|
|
"learning_rate": 1.1855238095238094e-05,
|
||
|
|
"loss": 1.5093,
|
||
|
|
"step": 1070
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.2342857142857142,
|
||
|
|
"grad_norm": 9.5,
|
||
|
|
"learning_rate": 1.1779047619047621e-05,
|
||
|
|
"loss": 1.48,
|
||
|
|
"step": 1080
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.2457142857142858,
|
||
|
|
"grad_norm": 6.84375,
|
||
|
|
"learning_rate": 1.1702857142857145e-05,
|
||
|
|
"loss": 1.4696,
|
||
|
|
"step": 1090
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.2571428571428571,
|
||
|
|
"grad_norm": 7.5,
|
||
|
|
"learning_rate": 1.1626666666666668e-05,
|
||
|
|
"loss": 1.4727,
|
||
|
|
"step": 1100
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.2685714285714287,
|
||
|
|
"grad_norm": 6.59375,
|
||
|
|
"learning_rate": 1.1550476190476192e-05,
|
||
|
|
"loss": 1.5753,
|
||
|
|
"step": 1110
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.28,
|
||
|
|
"grad_norm": 8.125,
|
||
|
|
"learning_rate": 1.1474285714285715e-05,
|
||
|
|
"loss": 1.4507,
|
||
|
|
"step": 1120
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.2914285714285714,
|
||
|
|
"grad_norm": 3.859375,
|
||
|
|
"learning_rate": 1.1398095238095239e-05,
|
||
|
|
"loss": 1.3666,
|
||
|
|
"step": 1130
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.302857142857143,
|
||
|
|
"grad_norm": 6.25,
|
||
|
|
"learning_rate": 1.1321904761904762e-05,
|
||
|
|
"loss": 1.4768,
|
||
|
|
"step": 1140
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.3142857142857143,
|
||
|
|
"grad_norm": 4.5625,
|
||
|
|
"learning_rate": 1.1245714285714286e-05,
|
||
|
|
"loss": 1.5304,
|
||
|
|
"step": 1150
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.3257142857142856,
|
||
|
|
"grad_norm": 8.1875,
|
||
|
|
"learning_rate": 1.1169523809523811e-05,
|
||
|
|
"loss": 1.5348,
|
||
|
|
"step": 1160
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.3371428571428572,
|
||
|
|
"grad_norm": 13.375,
|
||
|
|
"learning_rate": 1.1093333333333334e-05,
|
||
|
|
"loss": 1.4743,
|
||
|
|
"step": 1170
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.3485714285714285,
|
||
|
|
"grad_norm": 6.6875,
|
||
|
|
"learning_rate": 1.1017142857142858e-05,
|
||
|
|
"loss": 1.3598,
|
||
|
|
"step": 1180
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.3599999999999999,
|
||
|
|
"grad_norm": 7.875,
|
||
|
|
"learning_rate": 1.0940952380952381e-05,
|
||
|
|
"loss": 1.4255,
|
||
|
|
"step": 1190
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.3714285714285714,
|
||
|
|
"grad_norm": 9.6875,
|
||
|
|
"learning_rate": 1.0864761904761905e-05,
|
||
|
|
"loss": 1.6216,
|
||
|
|
"step": 1200
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.3828571428571428,
|
||
|
|
"grad_norm": 11.1875,
|
||
|
|
"learning_rate": 1.078857142857143e-05,
|
||
|
|
"loss": 1.4601,
|
||
|
|
"step": 1210
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.3942857142857144,
|
||
|
|
"grad_norm": 9.25,
|
||
|
|
"learning_rate": 1.0712380952380954e-05,
|
||
|
|
"loss": 1.4096,
|
||
|
|
"step": 1220
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.4057142857142857,
|
||
|
|
"grad_norm": 7.8125,
|
||
|
|
"learning_rate": 1.0636190476190477e-05,
|
||
|
|
"loss": 1.52,
|
||
|
|
"step": 1230
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.4171428571428573,
|
||
|
|
"grad_norm": 5.0,
|
||
|
|
"learning_rate": 1.056e-05,
|
||
|
|
"loss": 1.3965,
|
||
|
|
"step": 1240
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.4285714285714286,
|
||
|
|
"grad_norm": 8.875,
|
||
|
|
"learning_rate": 1.0483809523809526e-05,
|
||
|
|
"loss": 1.4751,
|
||
|
|
"step": 1250
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.44,
|
||
|
|
"grad_norm": 10.0625,
|
||
|
|
"learning_rate": 1.040761904761905e-05,
|
||
|
|
"loss": 1.451,
|
||
|
|
"step": 1260
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.4514285714285715,
|
||
|
|
"grad_norm": 7.5,
|
||
|
|
"learning_rate": 1.0331428571428573e-05,
|
||
|
|
"loss": 1.4057,
|
||
|
|
"step": 1270
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.4628571428571429,
|
||
|
|
"grad_norm": 9.0,
|
||
|
|
"learning_rate": 1.0255238095238096e-05,
|
||
|
|
"loss": 1.4995,
|
||
|
|
"step": 1280
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.4742857142857142,
|
||
|
|
"grad_norm": 6.3125,
|
||
|
|
"learning_rate": 1.017904761904762e-05,
|
||
|
|
"loss": 1.3841,
|
||
|
|
"step": 1290
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.4857142857142858,
|
||
|
|
"grad_norm": 8.3125,
|
||
|
|
"learning_rate": 1.0102857142857143e-05,
|
||
|
|
"loss": 1.5022,
|
||
|
|
"step": 1300
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.497142857142857,
|
||
|
|
"grad_norm": 7.1875,
|
||
|
|
"learning_rate": 1.0026666666666667e-05,
|
||
|
|
"loss": 1.416,
|
||
|
|
"step": 1310
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.5085714285714285,
|
||
|
|
"grad_norm": 4.1875,
|
||
|
|
"learning_rate": 9.950476190476192e-06,
|
||
|
|
"loss": 1.5316,
|
||
|
|
"step": 1320
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.52,
|
||
|
|
"grad_norm": 10.375,
|
||
|
|
"learning_rate": 9.874285714285715e-06,
|
||
|
|
"loss": 1.5099,
|
||
|
|
"step": 1330
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.5314285714285716,
|
||
|
|
"grad_norm": 9.9375,
|
||
|
|
"learning_rate": 9.798095238095239e-06,
|
||
|
|
"loss": 1.463,
|
||
|
|
"step": 1340
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.5428571428571427,
|
||
|
|
"grad_norm": 4.3125,
|
||
|
|
"learning_rate": 9.721904761904762e-06,
|
||
|
|
"loss": 1.4075,
|
||
|
|
"step": 1350
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.5542857142857143,
|
||
|
|
"grad_norm": 6.78125,
|
||
|
|
"learning_rate": 9.645714285714286e-06,
|
||
|
|
"loss": 1.437,
|
||
|
|
"step": 1360
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.5657142857142858,
|
||
|
|
"grad_norm": 9.6875,
|
||
|
|
"learning_rate": 9.569523809523811e-06,
|
||
|
|
"loss": 1.4965,
|
||
|
|
"step": 1370
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.5771428571428572,
|
||
|
|
"grad_norm": 9.875,
|
||
|
|
"learning_rate": 9.493333333333334e-06,
|
||
|
|
"loss": 1.3657,
|
||
|
|
"step": 1380
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.5885714285714285,
|
||
|
|
"grad_norm": 6.875,
|
||
|
|
"learning_rate": 9.417142857142858e-06,
|
||
|
|
"loss": 1.5096,
|
||
|
|
"step": 1390
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.6,
|
||
|
|
"grad_norm": 7.03125,
|
||
|
|
"learning_rate": 9.340952380952381e-06,
|
||
|
|
"loss": 1.4665,
|
||
|
|
"step": 1400
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.6114285714285714,
|
||
|
|
"grad_norm": 4.21875,
|
||
|
|
"learning_rate": 9.264761904761905e-06,
|
||
|
|
"loss": 1.4288,
|
||
|
|
"step": 1410
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.6228571428571428,
|
||
|
|
"grad_norm": 6.0625,
|
||
|
|
"learning_rate": 9.188571428571428e-06,
|
||
|
|
"loss": 1.3966,
|
||
|
|
"step": 1420
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.6342857142857143,
|
||
|
|
"grad_norm": 7.03125,
|
||
|
|
"learning_rate": 9.112380952380954e-06,
|
||
|
|
"loss": 1.376,
|
||
|
|
"step": 1430
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.6457142857142857,
|
||
|
|
"grad_norm": 8.625,
|
||
|
|
"learning_rate": 9.036190476190477e-06,
|
||
|
|
"loss": 1.4886,
|
||
|
|
"step": 1440
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.657142857142857,
|
||
|
|
"grad_norm": 7.03125,
|
||
|
|
"learning_rate": 8.96e-06,
|
||
|
|
"loss": 1.449,
|
||
|
|
"step": 1450
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.6685714285714286,
|
||
|
|
"grad_norm": 4.46875,
|
||
|
|
"learning_rate": 8.883809523809524e-06,
|
||
|
|
"loss": 1.5675,
|
||
|
|
"step": 1460
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.6800000000000002,
|
||
|
|
"grad_norm": 6.65625,
|
||
|
|
"learning_rate": 8.807619047619048e-06,
|
||
|
|
"loss": 1.4398,
|
||
|
|
"step": 1470
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.6914285714285713,
|
||
|
|
"grad_norm": 7.875,
|
||
|
|
"learning_rate": 8.731428571428571e-06,
|
||
|
|
"loss": 1.4698,
|
||
|
|
"step": 1480
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.7028571428571428,
|
||
|
|
"grad_norm": 6.5625,
|
||
|
|
"learning_rate": 8.655238095238096e-06,
|
||
|
|
"loss": 1.4593,
|
||
|
|
"step": 1490
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.7142857142857144,
|
||
|
|
"grad_norm": 4.3125,
|
||
|
|
"learning_rate": 8.57904761904762e-06,
|
||
|
|
"loss": 1.4622,
|
||
|
|
"step": 1500
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.7257142857142858,
|
||
|
|
"grad_norm": 4.34375,
|
||
|
|
"learning_rate": 8.502857142857143e-06,
|
||
|
|
"loss": 1.5299,
|
||
|
|
"step": 1510
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.737142857142857,
|
||
|
|
"grad_norm": 4.34375,
|
||
|
|
"learning_rate": 8.426666666666667e-06,
|
||
|
|
"loss": 1.4833,
|
||
|
|
"step": 1520
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.7485714285714287,
|
||
|
|
"grad_norm": 4.03125,
|
||
|
|
"learning_rate": 8.35047619047619e-06,
|
||
|
|
"loss": 1.3867,
|
||
|
|
"step": 1530
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.76,
|
||
|
|
"grad_norm": 4.1875,
|
||
|
|
"learning_rate": 8.274285714285715e-06,
|
||
|
|
"loss": 1.434,
|
||
|
|
"step": 1540
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.7714285714285714,
|
||
|
|
"grad_norm": 10.1875,
|
||
|
|
"learning_rate": 8.198095238095239e-06,
|
||
|
|
"loss": 1.4841,
|
||
|
|
"step": 1550
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.782857142857143,
|
||
|
|
"grad_norm": 11.125,
|
||
|
|
"learning_rate": 8.121904761904762e-06,
|
||
|
|
"loss": 1.4636,
|
||
|
|
"step": 1560
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.7942857142857143,
|
||
|
|
"grad_norm": 10.4375,
|
||
|
|
"learning_rate": 8.045714285714286e-06,
|
||
|
|
"loss": 1.4761,
|
||
|
|
"step": 1570
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.8057142857142856,
|
||
|
|
"grad_norm": 10.1875,
|
||
|
|
"learning_rate": 7.969523809523811e-06,
|
||
|
|
"loss": 1.5066,
|
||
|
|
"step": 1580
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.8171428571428572,
|
||
|
|
"grad_norm": 7.125,
|
||
|
|
"learning_rate": 7.893333333333335e-06,
|
||
|
|
"loss": 1.4319,
|
||
|
|
"step": 1590
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.8285714285714287,
|
||
|
|
"grad_norm": 8.75,
|
||
|
|
"learning_rate": 7.817142857142858e-06,
|
||
|
|
"loss": 1.4641,
|
||
|
|
"step": 1600
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.8399999999999999,
|
||
|
|
"grad_norm": 4.34375,
|
||
|
|
"learning_rate": 7.740952380952382e-06,
|
||
|
|
"loss": 1.4205,
|
||
|
|
"step": 1610
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.8514285714285714,
|
||
|
|
"grad_norm": 9.9375,
|
||
|
|
"learning_rate": 7.664761904761905e-06,
|
||
|
|
"loss": 1.5164,
|
||
|
|
"step": 1620
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.862857142857143,
|
||
|
|
"grad_norm": 6.875,
|
||
|
|
"learning_rate": 7.588571428571429e-06,
|
||
|
|
"loss": 1.5392,
|
||
|
|
"step": 1630
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.8742857142857143,
|
||
|
|
"grad_norm": 4.34375,
|
||
|
|
"learning_rate": 7.512380952380953e-06,
|
||
|
|
"loss": 1.4799,
|
||
|
|
"step": 1640
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.8857142857142857,
|
||
|
|
"grad_norm": 7.0,
|
||
|
|
"learning_rate": 7.436190476190477e-06,
|
||
|
|
"loss": 1.5299,
|
||
|
|
"step": 1650
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.8971428571428572,
|
||
|
|
"grad_norm": 7.96875,
|
||
|
|
"learning_rate": 7.360000000000001e-06,
|
||
|
|
"loss": 1.4444,
|
||
|
|
"step": 1660
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.9085714285714286,
|
||
|
|
"grad_norm": 4.34375,
|
||
|
|
"learning_rate": 7.283809523809524e-06,
|
||
|
|
"loss": 1.5101,
|
||
|
|
"step": 1670
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.92,
|
||
|
|
"grad_norm": 7.15625,
|
||
|
|
"learning_rate": 7.207619047619048e-06,
|
||
|
|
"loss": 1.4646,
|
||
|
|
"step": 1680
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.9314285714285715,
|
||
|
|
"grad_norm": 4.625,
|
||
|
|
"learning_rate": 7.131428571428573e-06,
|
||
|
|
"loss": 1.5244,
|
||
|
|
"step": 1690
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.9428571428571428,
|
||
|
|
"grad_norm": 10.0625,
|
||
|
|
"learning_rate": 7.055238095238096e-06,
|
||
|
|
"loss": 1.4385,
|
||
|
|
"step": 1700
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.9542857142857142,
|
||
|
|
"grad_norm": 7.78125,
|
||
|
|
"learning_rate": 6.97904761904762e-06,
|
||
|
|
"loss": 1.4275,
|
||
|
|
"step": 1710
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.9657142857142857,
|
||
|
|
"grad_norm": 8.6875,
|
||
|
|
"learning_rate": 6.902857142857143e-06,
|
||
|
|
"loss": 1.449,
|
||
|
|
"step": 1720
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.977142857142857,
|
||
|
|
"grad_norm": 4.4375,
|
||
|
|
"learning_rate": 6.826666666666667e-06,
|
||
|
|
"loss": 1.5125,
|
||
|
|
"step": 1730
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.9885714285714284,
|
||
|
|
"grad_norm": 6.28125,
|
||
|
|
"learning_rate": 6.75047619047619e-06,
|
||
|
|
"loss": 1.3555,
|
||
|
|
"step": 1740
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.0,
|
||
|
|
"grad_norm": 4.0625,
|
||
|
|
"learning_rate": 6.6742857142857155e-06,
|
||
|
|
"loss": 1.404,
|
||
|
|
"step": 1750
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"logging_steps": 10,
|
||
|
|
"max_steps": 2625,
|
||
|
|
"num_input_tokens_seen": 0,
|
||
|
|
"num_train_epochs": 3,
|
||
|
|
"save_steps": 500,
|
||
|
|
"stateful_callbacks": {
|
||
|
|
"TrainerControl": {
|
||
|
|
"args": {
|
||
|
|
"should_epoch_stop": false,
|
||
|
|
"should_evaluate": false,
|
||
|
|
"should_log": false,
|
||
|
|
"should_save": true,
|
||
|
|
"should_training_stop": false
|
||
|
|
},
|
||
|
|
"attributes": {}
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"total_flos": 1.080116057777111e+17,
|
||
|
|
"train_batch_size": 8,
|
||
|
|
"trial_name": null,
|
||
|
|
"trial_params": null
|
||
|
|
}
|