611 lines
15 KiB
JSON
611 lines
15 KiB
JSON
|
|
{
|
||
|
|
"best_global_step": null,
|
||
|
|
"best_metric": null,
|
||
|
|
"best_model_checkpoint": null,
|
||
|
|
"epoch": 3.0,
|
||
|
|
"eval_steps": 500,
|
||
|
|
"global_step": 819,
|
||
|
|
"is_hyper_param_search": false,
|
||
|
|
"is_local_process_zero": true,
|
||
|
|
"is_world_process_zero": true,
|
||
|
|
"log_history": [
|
||
|
|
{
|
||
|
|
"epoch": 0.03669724770642202,
|
||
|
|
"grad_norm": 45.576416015625,
|
||
|
|
"learning_rate": 1.0975609756097562e-06,
|
||
|
|
"loss": 1.7545,
|
||
|
|
"step": 10
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.07339449541284404,
|
||
|
|
"grad_norm": 10.582867622375488,
|
||
|
|
"learning_rate": 2.317073170731708e-06,
|
||
|
|
"loss": 1.6752,
|
||
|
|
"step": 20
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.11009174311926606,
|
||
|
|
"grad_norm": 9.395275115966797,
|
||
|
|
"learning_rate": 3.5365853658536588e-06,
|
||
|
|
"loss": 1.1488,
|
||
|
|
"step": 30
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.14678899082568808,
|
||
|
|
"grad_norm": 10.377723693847656,
|
||
|
|
"learning_rate": 4.75609756097561e-06,
|
||
|
|
"loss": 1.0,
|
||
|
|
"step": 40
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.1834862385321101,
|
||
|
|
"grad_norm": 8.154882431030273,
|
||
|
|
"learning_rate": 5.9756097560975615e-06,
|
||
|
|
"loss": 1.0514,
|
||
|
|
"step": 50
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.22018348623853212,
|
||
|
|
"grad_norm": 8.073173522949219,
|
||
|
|
"learning_rate": 7.1951219512195125e-06,
|
||
|
|
"loss": 1.0813,
|
||
|
|
"step": 60
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.25688073394495414,
|
||
|
|
"grad_norm": 5.49383020401001,
|
||
|
|
"learning_rate": 8.414634146341464e-06,
|
||
|
|
"loss": 1.147,
|
||
|
|
"step": 70
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.29357798165137616,
|
||
|
|
"grad_norm": 6.546836853027344,
|
||
|
|
"learning_rate": 9.634146341463415e-06,
|
||
|
|
"loss": 0.9877,
|
||
|
|
"step": 80
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.3302752293577982,
|
||
|
|
"grad_norm": 4.730407238006592,
|
||
|
|
"learning_rate": 9.997774289701648e-06,
|
||
|
|
"loss": 1.0414,
|
||
|
|
"step": 90
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.3669724770642202,
|
||
|
|
"grad_norm": 6.055862903594971,
|
||
|
|
"learning_rate": 9.986877621396878e-06,
|
||
|
|
"loss": 1.0481,
|
||
|
|
"step": 100
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.4036697247706422,
|
||
|
|
"grad_norm": 12.908507347106934,
|
||
|
|
"learning_rate": 9.966920963089052e-06,
|
||
|
|
"loss": 1.0144,
|
||
|
|
"step": 110
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.44036697247706424,
|
||
|
|
"grad_norm": 10.134546279907227,
|
||
|
|
"learning_rate": 9.937940571357636e-06,
|
||
|
|
"loss": 0.9278,
|
||
|
|
"step": 120
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.47706422018348627,
|
||
|
|
"grad_norm": 4.411650657653809,
|
||
|
|
"learning_rate": 9.899989096794704e-06,
|
||
|
|
"loss": 1.0083,
|
||
|
|
"step": 130
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.5137614678899083,
|
||
|
|
"grad_norm": 5.51439094543457,
|
||
|
|
"learning_rate": 9.853135488351135e-06,
|
||
|
|
"loss": 0.9663,
|
||
|
|
"step": 140
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.5504587155963303,
|
||
|
|
"grad_norm": 5.0115227699279785,
|
||
|
|
"learning_rate": 9.797464868072489e-06,
|
||
|
|
"loss": 0.7779,
|
||
|
|
"step": 150
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.5871559633027523,
|
||
|
|
"grad_norm": 10.19095516204834,
|
||
|
|
"learning_rate": 9.733078376452172e-06,
|
||
|
|
"loss": 0.9394,
|
||
|
|
"step": 160
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.6238532110091743,
|
||
|
|
"grad_norm": 5.4709553718566895,
|
||
|
|
"learning_rate": 9.660092988682822e-06,
|
||
|
|
"loss": 1.0285,
|
||
|
|
"step": 170
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.6605504587155964,
|
||
|
|
"grad_norm": 6.946798801422119,
|
||
|
|
"learning_rate": 9.578641302139741e-06,
|
||
|
|
"loss": 1.0908,
|
||
|
|
"step": 180
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.6972477064220184,
|
||
|
|
"grad_norm": 13.150769233703613,
|
||
|
|
"learning_rate": 9.488871295482492e-06,
|
||
|
|
"loss": 1.0413,
|
||
|
|
"step": 190
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.7339449541284404,
|
||
|
|
"grad_norm": 7.462345123291016,
|
||
|
|
"learning_rate": 9.390946059812289e-06,
|
||
|
|
"loss": 0.8251,
|
||
|
|
"step": 200
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.7706422018348624,
|
||
|
|
"grad_norm": 4.613824367523193,
|
||
|
|
"learning_rate": 9.285043502373616e-06,
|
||
|
|
"loss": 0.9724,
|
||
|
|
"step": 210
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.8073394495412844,
|
||
|
|
"grad_norm": 6.828705787658691,
|
||
|
|
"learning_rate": 9.171356023338395e-06,
|
||
|
|
"loss": 1.0862,
|
||
|
|
"step": 220
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.8440366972477065,
|
||
|
|
"grad_norm": 5.334705352783203,
|
||
|
|
"learning_rate": 9.050090166259873e-06,
|
||
|
|
"loss": 0.9361,
|
||
|
|
"step": 230
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.8807339449541285,
|
||
|
|
"grad_norm": 6.761337757110596,
|
||
|
|
"learning_rate": 8.921466242831305e-06,
|
||
|
|
"loss": 1.0586,
|
||
|
|
"step": 240
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.9174311926605505,
|
||
|
|
"grad_norm": 7.450784206390381,
|
||
|
|
"learning_rate": 8.785717932631157e-06,
|
||
|
|
"loss": 1.098,
|
||
|
|
"step": 250
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.9541284403669725,
|
||
|
|
"grad_norm": 6.033744812011719,
|
||
|
|
"learning_rate": 8.643091858581971e-06,
|
||
|
|
"loss": 0.9883,
|
||
|
|
"step": 260
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.9908256880733946,
|
||
|
|
"grad_norm": 3.838776111602783,
|
||
|
|
"learning_rate": 8.49384713889421e-06,
|
||
|
|
"loss": 0.9031,
|
||
|
|
"step": 270
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.0256880733944955,
|
||
|
|
"grad_norm": 7.545208930969238,
|
||
|
|
"learning_rate": 8.33825491630909e-06,
|
||
|
|
"loss": 0.5494,
|
||
|
|
"step": 280
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.0623853211009173,
|
||
|
|
"grad_norm": 8.683798789978027,
|
||
|
|
"learning_rate": 8.176597865495654e-06,
|
||
|
|
"loss": 0.3599,
|
||
|
|
"step": 290
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.0990825688073396,
|
||
|
|
"grad_norm": 4.903615474700928,
|
||
|
|
"learning_rate": 8.00916967949702e-06,
|
||
|
|
"loss": 0.4398,
|
||
|
|
"step": 300
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.1357798165137614,
|
||
|
|
"grad_norm": 6.409444332122803,
|
||
|
|
"learning_rate": 7.836274536158834e-06,
|
||
|
|
"loss": 0.4224,
|
||
|
|
"step": 310
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.1724770642201836,
|
||
|
|
"grad_norm": 4.087878704071045,
|
||
|
|
"learning_rate": 7.658226545509286e-06,
|
||
|
|
"loss": 0.4323,
|
||
|
|
"step": 320
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.2091743119266054,
|
||
|
|
"grad_norm": 5.2519707679748535,
|
||
|
|
"learning_rate": 7.475349179094661e-06,
|
||
|
|
"loss": 0.3882,
|
||
|
|
"step": 330
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.2458715596330276,
|
||
|
|
"grad_norm": 6.870487689971924,
|
||
|
|
"learning_rate": 7.287974682307226e-06,
|
||
|
|
"loss": 0.3982,
|
||
|
|
"step": 340
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.2825688073394494,
|
||
|
|
"grad_norm": 3.5328145027160645,
|
||
|
|
"learning_rate": 7.096443470773071e-06,
|
||
|
|
"loss": 0.4527,
|
||
|
|
"step": 350
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.3192660550458717,
|
||
|
|
"grad_norm": 6.628715991973877,
|
||
|
|
"learning_rate": 6.901103511896556e-06,
|
||
|
|
"loss": 0.403,
|
||
|
|
"step": 360
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.3559633027522935,
|
||
|
|
"grad_norm": 4.482091903686523,
|
||
|
|
"learning_rate": 6.702309692684938e-06,
|
||
|
|
"loss": 0.3596,
|
||
|
|
"step": 370
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.3926605504587157,
|
||
|
|
"grad_norm": 3.9413254261016846,
|
||
|
|
"learning_rate": 6.500423175001705e-06,
|
||
|
|
"loss": 0.4125,
|
||
|
|
"step": 380
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.4293577981651375,
|
||
|
|
"grad_norm": 4.185613632202148,
|
||
|
|
"learning_rate": 6.2958107394199556e-06,
|
||
|
|
"loss": 0.3864,
|
||
|
|
"step": 390
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.4660550458715598,
|
||
|
|
"grad_norm": 5.547845363616943,
|
||
|
|
"learning_rate": 6.088844118867912e-06,
|
||
|
|
"loss": 0.4277,
|
||
|
|
"step": 400
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.5027522935779816,
|
||
|
|
"grad_norm": 2.854694366455078,
|
||
|
|
"learning_rate": 5.879899323277142e-06,
|
||
|
|
"loss": 0.3528,
|
||
|
|
"step": 410
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.5394495412844038,
|
||
|
|
"grad_norm": 3.989434003829956,
|
||
|
|
"learning_rate": 5.669355956460483e-06,
|
||
|
|
"loss": 0.4281,
|
||
|
|
"step": 420
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.5761467889908256,
|
||
|
|
"grad_norm": 3.780809164047241,
|
||
|
|
"learning_rate": 5.457596526460722e-06,
|
||
|
|
"loss": 0.3896,
|
||
|
|
"step": 430
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.6128440366972479,
|
||
|
|
"grad_norm": 5.487133979797363,
|
||
|
|
"learning_rate": 5.245005750622961e-06,
|
||
|
|
"loss": 0.3418,
|
||
|
|
"step": 440
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.6495412844036696,
|
||
|
|
"grad_norm": 4.815535545349121,
|
||
|
|
"learning_rate": 5.031969856653204e-06,
|
||
|
|
"loss": 0.3813,
|
||
|
|
"step": 450
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.686238532110092,
|
||
|
|
"grad_norm": 4.234831809997559,
|
||
|
|
"learning_rate": 4.818875880932967e-06,
|
||
|
|
"loss": 0.4144,
|
||
|
|
"step": 460
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.7229357798165137,
|
||
|
|
"grad_norm": 5.422831058502197,
|
||
|
|
"learning_rate": 4.606110965364721e-06,
|
||
|
|
"loss": 0.3708,
|
||
|
|
"step": 470
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.759633027522936,
|
||
|
|
"grad_norm": 4.2414445877075195,
|
||
|
|
"learning_rate": 4.394061654025622e-06,
|
||
|
|
"loss": 0.474,
|
||
|
|
"step": 480
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.7963302752293577,
|
||
|
|
"grad_norm": 4.653387069702148,
|
||
|
|
"learning_rate": 4.183113190907349e-06,
|
||
|
|
"loss": 0.3914,
|
||
|
|
"step": 490
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.83302752293578,
|
||
|
|
"grad_norm": 3.6565375328063965,
|
||
|
|
"learning_rate": 3.9736488200179e-06,
|
||
|
|
"loss": 0.3545,
|
||
|
|
"step": 500
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.8697247706422018,
|
||
|
|
"grad_norm": 4.018605709075928,
|
||
|
|
"learning_rate": 3.7660490891168805e-06,
|
||
|
|
"loss": 0.2496,
|
||
|
|
"step": 510
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.906422018348624,
|
||
|
|
"grad_norm": 4.249181747436523,
|
||
|
|
"learning_rate": 3.5606911583492554e-06,
|
||
|
|
"loss": 0.3805,
|
||
|
|
"step": 520
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.9431192660550458,
|
||
|
|
"grad_norm": 4.361818790435791,
|
||
|
|
"learning_rate": 3.3579481150335914e-06,
|
||
|
|
"loss": 0.3653,
|
||
|
|
"step": 530
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.979816513761468,
|
||
|
|
"grad_norm": 6.0414347648620605,
|
||
|
|
"learning_rate": 3.158188295849689e-06,
|
||
|
|
"loss": 0.4381,
|
||
|
|
"step": 540
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.014678899082569,
|
||
|
|
"grad_norm": 4.7412800788879395,
|
||
|
|
"learning_rate": 2.9617746176569985e-06,
|
||
|
|
"loss": 0.3913,
|
||
|
|
"step": 550
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.051376146788991,
|
||
|
|
"grad_norm": 3.8450918197631836,
|
||
|
|
"learning_rate": 2.769063918159588e-06,
|
||
|
|
"loss": 0.1341,
|
||
|
|
"step": 560
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.088073394495413,
|
||
|
|
"grad_norm": 3.0494792461395264,
|
||
|
|
"learning_rate": 2.5804063076155143e-06,
|
||
|
|
"loss": 0.1474,
|
||
|
|
"step": 570
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.1247706422018346,
|
||
|
|
"grad_norm": 3.925137996673584,
|
||
|
|
"learning_rate": 2.3961445327683914e-06,
|
||
|
|
"loss": 0.0981,
|
||
|
|
"step": 580
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.161467889908257,
|
||
|
|
"grad_norm": 2.9667370319366455,
|
||
|
|
"learning_rate": 2.2166133541567376e-06,
|
||
|
|
"loss": 0.0771,
|
||
|
|
"step": 590
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.198165137614679,
|
||
|
|
"grad_norm": 8.277497291564941,
|
||
|
|
"learning_rate": 2.042138937932388e-06,
|
||
|
|
"loss": 0.1397,
|
||
|
|
"step": 600
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.234862385321101,
|
||
|
|
"grad_norm": 3.4491753578186035,
|
||
|
|
"learning_rate": 1.8730382632929107e-06,
|
||
|
|
"loss": 0.1724,
|
||
|
|
"step": 610
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.2715596330275227,
|
||
|
|
"grad_norm": 3.367347240447998,
|
||
|
|
"learning_rate": 1.7096185466045446e-06,
|
||
|
|
"loss": 0.1204,
|
||
|
|
"step": 620
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.308256880733945,
|
||
|
|
"grad_norm": 4.588388919830322,
|
||
|
|
"learning_rate": 1.5521766832619472e-06,
|
||
|
|
"loss": 0.1063,
|
||
|
|
"step": 630
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.344954128440367,
|
||
|
|
"grad_norm": 12.237441062927246,
|
||
|
|
"learning_rate": 1.4009987082987064e-06,
|
||
|
|
"loss": 0.1189,
|
||
|
|
"step": 640
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.381651376146789,
|
||
|
|
"grad_norm": 3.8440685272216797,
|
||
|
|
"learning_rate": 1.2563592767286087e-06,
|
||
|
|
"loss": 0.0995,
|
||
|
|
"step": 650
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.418348623853211,
|
||
|
|
"grad_norm": 3.6825640201568604,
|
||
|
|
"learning_rate": 1.1185211645617271e-06,
|
||
|
|
"loss": 0.1157,
|
||
|
|
"step": 660
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.455045871559633,
|
||
|
|
"grad_norm": 2.4990882873535156,
|
||
|
|
"learning_rate": 9.877347914018853e-07,
|
||
|
|
"loss": 0.1093,
|
||
|
|
"step": 670
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.4917431192660553,
|
||
|
|
"grad_norm": 8.954373359680176,
|
||
|
|
"learning_rate": 8.642377654928253e-07,
|
||
|
|
"loss": 0.0994,
|
||
|
|
"step": 680
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.528440366972477,
|
||
|
|
"grad_norm": 4.092501640319824,
|
||
|
|
"learning_rate": 7.482544520396057e-07,
|
||
|
|
"loss": 0.1265,
|
||
|
|
"step": 690
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.565137614678899,
|
||
|
|
"grad_norm": 2.5047497749328613,
|
||
|
|
"learning_rate": 6.39995565589524e-07,
|
||
|
|
"loss": 0.1134,
|
||
|
|
"step": 700
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.601834862385321,
|
||
|
|
"grad_norm": 3.976468801498413,
|
||
|
|
"learning_rate": 5.396577872130676e-07,
|
||
|
|
"loss": 0.0987,
|
||
|
|
"step": 710
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.6385321100917434,
|
||
|
|
"grad_norm": 2.5786423683166504,
|
||
|
|
"learning_rate": 4.474234071804312e-07,
|
||
|
|
"loss": 0.1145,
|
||
|
|
"step": 720
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.675229357798165,
|
||
|
|
"grad_norm": 0.8380916118621826,
|
||
|
|
"learning_rate": 3.63459993782731e-07,
|
||
|
|
"loss": 0.1119,
|
||
|
|
"step": 730
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.711926605504587,
|
||
|
|
"grad_norm": 7.706490516662598,
|
||
|
|
"learning_rate": 2.879200888996286e-07,
|
||
|
|
"loss": 0.1588,
|
||
|
|
"step": 740
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.748623853211009,
|
||
|
|
"grad_norm": 4.898327827453613,
|
||
|
|
"learning_rate": 2.2094093086641844e-07,
|
||
|
|
"loss": 0.1061,
|
||
|
|
"step": 750
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.7853211009174315,
|
||
|
|
"grad_norm": 1.936378002166748,
|
||
|
|
"learning_rate": 1.6264420514407574e-07,
|
||
|
|
"loss": 0.1122,
|
||
|
|
"step": 760
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.8220183486238533,
|
||
|
|
"grad_norm": 9.376118659973145,
|
||
|
|
"learning_rate": 1.1313582324524564e-07,
|
||
|
|
"loss": 0.143,
|
||
|
|
"step": 770
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.858715596330275,
|
||
|
|
"grad_norm": 2.7180027961730957,
|
||
|
|
"learning_rate": 7.250573031779895e-08,
|
||
|
|
"loss": 0.0635,
|
||
|
|
"step": 780
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.8954128440366973,
|
||
|
|
"grad_norm": 8.308465957641602,
|
||
|
|
"learning_rate": 4.082774173554127e-08,
|
||
|
|
"loss": 0.1221,
|
||
|
|
"step": 790
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.9321100917431195,
|
||
|
|
"grad_norm": 6.079627513885498,
|
||
|
|
"learning_rate": 1.8159408992947103e-08,
|
||
|
|
"loss": 0.08,
|
||
|
|
"step": 800
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.9688073394495413,
|
||
|
|
"grad_norm": 0.4067806601524353,
|
||
|
|
"learning_rate": 4.541915147557307e-09,
|
||
|
|
"loss": 0.1056,
|
||
|
|
"step": 810
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 3.0,
|
||
|
|
"step": 819,
|
||
|
|
"total_flos": 2.4661134707523584e+16,
|
||
|
|
"train_loss": 0.5229038521192595,
|
||
|
|
"train_runtime": 549.5617,
|
||
|
|
"train_samples_per_second": 5.95,
|
||
|
|
"train_steps_per_second": 1.49
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"logging_steps": 10,
|
||
|
|
"max_steps": 819,
|
||
|
|
"num_input_tokens_seen": 0,
|
||
|
|
"num_train_epochs": 3,
|
||
|
|
"save_steps": 500,
|
||
|
|
"stateful_callbacks": {
|
||
|
|
"TrainerControl": {
|
||
|
|
"args": {
|
||
|
|
"should_epoch_stop": false,
|
||
|
|
"should_evaluate": false,
|
||
|
|
"should_log": false,
|
||
|
|
"should_save": true,
|
||
|
|
"should_training_stop": true
|
||
|
|
},
|
||
|
|
"attributes": {}
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"total_flos": 2.4661134707523584e+16,
|
||
|
|
"train_batch_size": 1,
|
||
|
|
"trial_name": null,
|
||
|
|
"trial_params": null
|
||
|
|
}
|