611 lines
13 KiB
JSON
611 lines
13 KiB
JSON
|
|
{
|
||
|
|
"best_global_step": null,
|
||
|
|
"best_metric": null,
|
||
|
|
"best_model_checkpoint": null,
|
||
|
|
"epoch": 2.0,
|
||
|
|
"eval_steps": 500,
|
||
|
|
"global_step": 400,
|
||
|
|
"is_hyper_param_search": false,
|
||
|
|
"is_local_process_zero": true,
|
||
|
|
"is_world_process_zero": true,
|
||
|
|
"log_history": [
|
||
|
|
{
|
||
|
|
"epoch": 0.025,
|
||
|
|
"grad_norm": 6.375,
|
||
|
|
"learning_rate": 3.385271710662569e-06,
|
||
|
|
"loss": 0.2522,
|
||
|
|
"step": 5
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.05,
|
||
|
|
"grad_norm": 5.4375,
|
||
|
|
"learning_rate": 7.61686134899078e-06,
|
||
|
|
"loss": 0.2154,
|
||
|
|
"step": 10
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.075,
|
||
|
|
"grad_norm": 2.140625,
|
||
|
|
"learning_rate": 1.1848450987318993e-05,
|
||
|
|
"loss": 0.1339,
|
||
|
|
"step": 15
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.1,
|
||
|
|
"grad_norm": 1.96875,
|
||
|
|
"learning_rate": 1.6080040625647203e-05,
|
||
|
|
"loss": 0.1012,
|
||
|
|
"step": 20
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.125,
|
||
|
|
"grad_norm": 1.2109375,
|
||
|
|
"learning_rate": 2.031163026397541e-05,
|
||
|
|
"loss": 0.091,
|
||
|
|
"step": 25
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.15,
|
||
|
|
"grad_norm": 1.2734375,
|
||
|
|
"learning_rate": 2.4543219902303624e-05,
|
||
|
|
"loss": 0.0869,
|
||
|
|
"step": 30
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.175,
|
||
|
|
"grad_norm": 2.65625,
|
||
|
|
"learning_rate": 2.877480954063184e-05,
|
||
|
|
"loss": 0.0839,
|
||
|
|
"step": 35
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.2,
|
||
|
|
"grad_norm": 2.1875,
|
||
|
|
"learning_rate": 3.300639917896005e-05,
|
||
|
|
"loss": 0.0814,
|
||
|
|
"step": 40
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.225,
|
||
|
|
"grad_norm": 1.0234375,
|
||
|
|
"learning_rate": 3.723798881728826e-05,
|
||
|
|
"loss": 0.0777,
|
||
|
|
"step": 45
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.25,
|
||
|
|
"grad_norm": 0.85546875,
|
||
|
|
"learning_rate": 4.146957845561647e-05,
|
||
|
|
"loss": 0.0789,
|
||
|
|
"step": 50
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.275,
|
||
|
|
"grad_norm": 1.1171875,
|
||
|
|
"learning_rate": 4.2313223542407126e-05,
|
||
|
|
"loss": 0.0766,
|
||
|
|
"step": 55
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.3,
|
||
|
|
"grad_norm": 0.8671875,
|
||
|
|
"learning_rate": 4.230236641232887e-05,
|
||
|
|
"loss": 0.0762,
|
||
|
|
"step": 60
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.325,
|
||
|
|
"grad_norm": 0.7890625,
|
||
|
|
"learning_rate": 4.2283162699124824e-05,
|
||
|
|
"loss": 0.0761,
|
||
|
|
"step": 65
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.35,
|
||
|
|
"grad_norm": 1.2265625,
|
||
|
|
"learning_rate": 4.225562082617828e-05,
|
||
|
|
"loss": 0.0755,
|
||
|
|
"step": 70
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.375,
|
||
|
|
"grad_norm": 1.140625,
|
||
|
|
"learning_rate": 4.2219752874265e-05,
|
||
|
|
"loss": 0.0748,
|
||
|
|
"step": 75
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.4,
|
||
|
|
"grad_norm": 0.734375,
|
||
|
|
"learning_rate": 4.217557457625412e-05,
|
||
|
|
"loss": 0.0742,
|
||
|
|
"step": 80
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.425,
|
||
|
|
"grad_norm": 0.75390625,
|
||
|
|
"learning_rate": 4.2123105310207254e-05,
|
||
|
|
"loss": 0.0743,
|
||
|
|
"step": 85
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.45,
|
||
|
|
"grad_norm": 0.83984375,
|
||
|
|
"learning_rate": 4.206236809087862e-05,
|
||
|
|
"loss": 0.0738,
|
||
|
|
"step": 90
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.475,
|
||
|
|
"grad_norm": 0.69140625,
|
||
|
|
"learning_rate": 4.1993389559620004e-05,
|
||
|
|
"loss": 0.0725,
|
||
|
|
"step": 95
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.5,
|
||
|
|
"grad_norm": 0.75,
|
||
|
|
"learning_rate": 4.1916199972694975e-05,
|
||
|
|
"loss": 0.0721,
|
||
|
|
"step": 100
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.525,
|
||
|
|
"grad_norm": 0.61328125,
|
||
|
|
"learning_rate": 4.18308331880075e-05,
|
||
|
|
"loss": 0.0723,
|
||
|
|
"step": 105
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.55,
|
||
|
|
"grad_norm": 0.6640625,
|
||
|
|
"learning_rate": 4.173732665025073e-05,
|
||
|
|
"loss": 0.0721,
|
||
|
|
"step": 110
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.575,
|
||
|
|
"grad_norm": 0.58984375,
|
||
|
|
"learning_rate": 4.1635721374482566e-05,
|
||
|
|
"loss": 0.0721,
|
||
|
|
"step": 115
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.6,
|
||
|
|
"grad_norm": 0.71484375,
|
||
|
|
"learning_rate": 4.1526061928135026e-05,
|
||
|
|
"loss": 0.0709,
|
||
|
|
"step": 120
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.625,
|
||
|
|
"grad_norm": 0.73828125,
|
||
|
|
"learning_rate": 4.1408396411465564e-05,
|
||
|
|
"loss": 0.0691,
|
||
|
|
"step": 125
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.65,
|
||
|
|
"grad_norm": 0.71875,
|
||
|
|
"learning_rate": 4.1282776436458674e-05,
|
||
|
|
"loss": 0.0691,
|
||
|
|
"step": 130
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.675,
|
||
|
|
"grad_norm": 0.6015625,
|
||
|
|
"learning_rate": 4.114925710418718e-05,
|
||
|
|
"loss": 0.0693,
|
||
|
|
"step": 135
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.7,
|
||
|
|
"grad_norm": 0.6171875,
|
||
|
|
"learning_rate": 4.100789698064306e-05,
|
||
|
|
"loss": 0.0676,
|
||
|
|
"step": 140
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.725,
|
||
|
|
"grad_norm": 0.6796875,
|
||
|
|
"learning_rate": 4.0858758071048494e-05,
|
||
|
|
"loss": 0.0703,
|
||
|
|
"step": 145
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.75,
|
||
|
|
"grad_norm": 0.65625,
|
||
|
|
"learning_rate": 4.070190579265833e-05,
|
||
|
|
"loss": 0.0706,
|
||
|
|
"step": 150
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.775,
|
||
|
|
"grad_norm": 0.62890625,
|
||
|
|
"learning_rate": 4.0537408946065876e-05,
|
||
|
|
"loss": 0.0711,
|
||
|
|
"step": 155
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.8,
|
||
|
|
"grad_norm": 0.60546875,
|
||
|
|
"learning_rate": 4.0365339685024714e-05,
|
||
|
|
"loss": 0.0711,
|
||
|
|
"step": 160
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.825,
|
||
|
|
"grad_norm": 0.5234375,
|
||
|
|
"learning_rate": 4.0185773484799704e-05,
|
||
|
|
"loss": 0.0694,
|
||
|
|
"step": 165
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.85,
|
||
|
|
"grad_norm": 0.5390625,
|
||
|
|
"learning_rate": 3.999878910906096e-05,
|
||
|
|
"loss": 0.0682,
|
||
|
|
"step": 170
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.875,
|
||
|
|
"grad_norm": 0.7265625,
|
||
|
|
"learning_rate": 3.9804468575335577e-05,
|
||
|
|
"loss": 0.0675,
|
||
|
|
"step": 175
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.9,
|
||
|
|
"grad_norm": 0.63671875,
|
||
|
|
"learning_rate": 3.9602897119032e-05,
|
||
|
|
"loss": 0.0678,
|
||
|
|
"step": 180
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.925,
|
||
|
|
"grad_norm": 0.58203125,
|
||
|
|
"learning_rate": 3.939416315605297e-05,
|
||
|
|
"loss": 0.0661,
|
||
|
|
"step": 185
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.95,
|
||
|
|
"grad_norm": 0.5234375,
|
||
|
|
"learning_rate": 3.917835824401336e-05,
|
||
|
|
"loss": 0.068,
|
||
|
|
"step": 190
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 0.975,
|
||
|
|
"grad_norm": 0.447265625,
|
||
|
|
"learning_rate": 3.895557704208002e-05,
|
||
|
|
"loss": 0.0661,
|
||
|
|
"step": 195
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.0,
|
||
|
|
"grad_norm": 0.48828125,
|
||
|
|
"learning_rate": 3.872591726945111e-05,
|
||
|
|
"loss": 0.0672,
|
||
|
|
"step": 200
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.0,
|
||
|
|
"eval_loss": 0.06500394642353058,
|
||
|
|
"eval_runtime": 1.0599,
|
||
|
|
"eval_samples_per_second": 22.645,
|
||
|
|
"eval_steps_per_second": 22.645,
|
||
|
|
"step": 200
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.025,
|
||
|
|
"grad_norm": 0.65234375,
|
||
|
|
"learning_rate": 3.8489479662493214e-05,
|
||
|
|
"loss": 0.0608,
|
||
|
|
"step": 205
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.05,
|
||
|
|
"grad_norm": 0.73828125,
|
||
|
|
"learning_rate": 3.8246367930555095e-05,
|
||
|
|
"loss": 0.0596,
|
||
|
|
"step": 210
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.075,
|
||
|
|
"grad_norm": 0.58203125,
|
||
|
|
"learning_rate": 3.799668871047729e-05,
|
||
|
|
"loss": 0.0614,
|
||
|
|
"step": 215
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.1,
|
||
|
|
"grad_norm": 0.5625,
|
||
|
|
"learning_rate": 3.7740551519817704e-05,
|
||
|
|
"loss": 0.0584,
|
||
|
|
"step": 220
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.125,
|
||
|
|
"grad_norm": 0.62890625,
|
||
|
|
"learning_rate": 3.747806870881357e-05,
|
||
|
|
"loss": 0.0606,
|
||
|
|
"step": 225
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.15,
|
||
|
|
"grad_norm": 0.6640625,
|
||
|
|
"learning_rate": 3.7209355411100936e-05,
|
||
|
|
"loss": 0.0618,
|
||
|
|
"step": 230
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.175,
|
||
|
|
"grad_norm": 0.76171875,
|
||
|
|
"learning_rate": 3.693452949321325e-05,
|
||
|
|
"loss": 0.0608,
|
||
|
|
"step": 235
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.2,
|
||
|
|
"grad_norm": 0.4296875,
|
||
|
|
"learning_rate": 3.665371150288115e-05,
|
||
|
|
"loss": 0.062,
|
||
|
|
"step": 240
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.225,
|
||
|
|
"grad_norm": 0.55859375,
|
||
|
|
"learning_rate": 3.6367024616156335e-05,
|
||
|
|
"loss": 0.061,
|
||
|
|
"step": 245
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.25,
|
||
|
|
"grad_norm": 0.55859375,
|
||
|
|
"learning_rate": 3.607459458338241e-05,
|
||
|
|
"loss": 0.0613,
|
||
|
|
"step": 250
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.275,
|
||
|
|
"grad_norm": 0.57421875,
|
||
|
|
"learning_rate": 3.577654967403665e-05,
|
||
|
|
"loss": 0.0599,
|
||
|
|
"step": 255
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.3,
|
||
|
|
"grad_norm": 0.5859375,
|
||
|
|
"learning_rate": 3.547302062046677e-05,
|
||
|
|
"loss": 0.0598,
|
||
|
|
"step": 260
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.325,
|
||
|
|
"grad_norm": 0.49609375,
|
||
|
|
"learning_rate": 3.5164140560547404e-05,
|
||
|
|
"loss": 0.0606,
|
||
|
|
"step": 265
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.35,
|
||
|
|
"grad_norm": 0.54296875,
|
||
|
|
"learning_rate": 3.4850044979281415e-05,
|
||
|
|
"loss": 0.0603,
|
||
|
|
"step": 270
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.375,
|
||
|
|
"grad_norm": 0.52734375,
|
||
|
|
"learning_rate": 3.453087164937168e-05,
|
||
|
|
"loss": 0.0606,
|
||
|
|
"step": 275
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.4,
|
||
|
|
"grad_norm": 0.427734375,
|
||
|
|
"learning_rate": 3.42067605707895e-05,
|
||
|
|
"loss": 0.0583,
|
||
|
|
"step": 280
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.425,
|
||
|
|
"grad_norm": 0.5078125,
|
||
|
|
"learning_rate": 3.387785390936589e-05,
|
||
|
|
"loss": 0.0601,
|
||
|
|
"step": 285
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.45,
|
||
|
|
"grad_norm": 0.5859375,
|
||
|
|
"learning_rate": 3.3544295934433035e-05,
|
||
|
|
"loss": 0.0622,
|
||
|
|
"step": 290
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.475,
|
||
|
|
"grad_norm": 0.4609375,
|
||
|
|
"learning_rate": 3.320623295554294e-05,
|
||
|
|
"loss": 0.0611,
|
||
|
|
"step": 295
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.5,
|
||
|
|
"grad_norm": 0.6640625,
|
||
|
|
"learning_rate": 3.286381325829121e-05,
|
||
|
|
"loss": 0.0592,
|
||
|
|
"step": 300
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.525,
|
||
|
|
"grad_norm": 0.48046875,
|
||
|
|
"learning_rate": 3.251718703927414e-05,
|
||
|
|
"loss": 0.0602,
|
||
|
|
"step": 305
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.55,
|
||
|
|
"grad_norm": 0.48828125,
|
||
|
|
"learning_rate": 3.216650634020752e-05,
|
||
|
|
"loss": 0.0598,
|
||
|
|
"step": 310
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.575,
|
||
|
|
"grad_norm": 0.5625,
|
||
|
|
"learning_rate": 3.181192498123603e-05,
|
||
|
|
"loss": 0.0594,
|
||
|
|
"step": 315
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.6,
|
||
|
|
"grad_norm": 0.546875,
|
||
|
|
"learning_rate": 3.145359849346276e-05,
|
||
|
|
"loss": 0.0587,
|
||
|
|
"step": 320
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.625,
|
||
|
|
"grad_norm": 0.4609375,
|
||
|
|
"learning_rate": 3.109168405072805e-05,
|
||
|
|
"loss": 0.0606,
|
||
|
|
"step": 325
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.65,
|
||
|
|
"grad_norm": 0.5546875,
|
||
|
|
"learning_rate": 3.072634040066787e-05,
|
||
|
|
"loss": 0.0618,
|
||
|
|
"step": 330
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.675,
|
||
|
|
"grad_norm": 0.75390625,
|
||
|
|
"learning_rate": 3.0357727795081838e-05,
|
||
|
|
"loss": 0.0588,
|
||
|
|
"step": 335
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.7,
|
||
|
|
"grad_norm": 0.451171875,
|
||
|
|
"learning_rate": 2.998600791964154e-05,
|
||
|
|
"loss": 0.0599,
|
||
|
|
"step": 340
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.725,
|
||
|
|
"grad_norm": 0.63671875,
|
||
|
|
"learning_rate": 2.961134382296979e-05,
|
||
|
|
"loss": 0.0581,
|
||
|
|
"step": 345
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.75,
|
||
|
|
"grad_norm": 0.5546875,
|
||
|
|
"learning_rate": 2.923389984512217e-05,
|
||
|
|
"loss": 0.0615,
|
||
|
|
"step": 350
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.775,
|
||
|
|
"grad_norm": 0.5703125,
|
||
|
|
"learning_rate": 2.8853841545501988e-05,
|
||
|
|
"loss": 0.0578,
|
||
|
|
"step": 355
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.8,
|
||
|
|
"grad_norm": 0.443359375,
|
||
|
|
"learning_rate": 2.8471335630240508e-05,
|
||
|
|
"loss": 0.0584,
|
||
|
|
"step": 360
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.825,
|
||
|
|
"grad_norm": 0.4765625,
|
||
|
|
"learning_rate": 2.80865498790741e-05,
|
||
|
|
"loss": 0.0581,
|
||
|
|
"step": 365
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.85,
|
||
|
|
"grad_norm": 0.60546875,
|
||
|
|
"learning_rate": 2.7699653071750514e-05,
|
||
|
|
"loss": 0.0584,
|
||
|
|
"step": 370
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.875,
|
||
|
|
"grad_norm": 0.423828125,
|
||
|
|
"learning_rate": 2.7310814913996507e-05,
|
||
|
|
"loss": 0.0588,
|
||
|
|
"step": 375
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.9,
|
||
|
|
"grad_norm": 0.71484375,
|
||
|
|
"learning_rate": 2.6920205963079284e-05,
|
||
|
|
"loss": 0.0601,
|
||
|
|
"step": 380
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.925,
|
||
|
|
"grad_norm": 0.51953125,
|
||
|
|
"learning_rate": 2.6527997552994465e-05,
|
||
|
|
"loss": 0.0591,
|
||
|
|
"step": 385
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.95,
|
||
|
|
"grad_norm": 0.625,
|
||
|
|
"learning_rate": 2.6134361719313323e-05,
|
||
|
|
"loss": 0.059,
|
||
|
|
"step": 390
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 1.975,
|
||
|
|
"grad_norm": 0.57421875,
|
||
|
|
"learning_rate": 2.573947112372229e-05,
|
||
|
|
"loss": 0.0601,
|
||
|
|
"step": 395
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.0,
|
||
|
|
"grad_norm": 0.49609375,
|
||
|
|
"learning_rate": 2.5343498978287872e-05,
|
||
|
|
"loss": 0.0582,
|
||
|
|
"step": 400
|
||
|
|
},
|
||
|
|
{
|
||
|
|
"epoch": 2.0,
|
||
|
|
"eval_loss": 0.06334987282752991,
|
||
|
|
"eval_runtime": 0.9601,
|
||
|
|
"eval_samples_per_second": 24.998,
|
||
|
|
"eval_steps_per_second": 24.998,
|
||
|
|
"step": 400
|
||
|
|
}
|
||
|
|
],
|
||
|
|
"logging_steps": 5,
|
||
|
|
"max_steps": 800,
|
||
|
|
"num_input_tokens_seen": 0,
|
||
|
|
"num_train_epochs": 4,
|
||
|
|
"save_steps": 500,
|
||
|
|
"stateful_callbacks": {
|
||
|
|
"TrainerControl": {
|
||
|
|
"args": {
|
||
|
|
"should_epoch_stop": false,
|
||
|
|
"should_evaluate": false,
|
||
|
|
"should_log": false,
|
||
|
|
"should_save": true,
|
||
|
|
"should_training_stop": false
|
||
|
|
},
|
||
|
|
"attributes": {}
|
||
|
|
}
|
||
|
|
},
|
||
|
|
"total_flos": 2.46279712014336e+17,
|
||
|
|
"train_batch_size": 140,
|
||
|
|
"trial_name": null,
|
||
|
|
"trial_params": null
|
||
|
|
}
|