3907 lines
87 KiB
JSON
3907 lines
87 KiB
JSON
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 1.0,
|
|
"eval_steps": 180,
|
|
"global_step": 546,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.0,
|
|
"grad_norm": 45.74085835879772,
|
|
"learning_rate": 5.294117647058824e-07,
|
|
"loss": 0.9302,
|
|
"step": 1
|
|
},
|
|
{
|
|
"epoch": 0.0,
|
|
"grad_norm": 41.81197623600965,
|
|
"learning_rate": 1.0588235294117648e-06,
|
|
"loss": 0.9907,
|
|
"step": 2
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"grad_norm": 34.66046226058279,
|
|
"learning_rate": 1.5882352941176472e-06,
|
|
"loss": 0.8589,
|
|
"step": 3
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"grad_norm": 36.60912906609308,
|
|
"learning_rate": 2.1176470588235296e-06,
|
|
"loss": 0.9596,
|
|
"step": 4
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"grad_norm": 13.529080913419644,
|
|
"learning_rate": 2.647058823529412e-06,
|
|
"loss": 0.9156,
|
|
"step": 5
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"grad_norm": 11.247745477590652,
|
|
"learning_rate": 3.1764705882352943e-06,
|
|
"loss": 0.8662,
|
|
"step": 6
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"grad_norm": 9.935709220975076,
|
|
"learning_rate": 3.7058823529411763e-06,
|
|
"loss": 0.8188,
|
|
"step": 7
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"grad_norm": 10.683752353231812,
|
|
"learning_rate": 4.235294117647059e-06,
|
|
"loss": 0.8649,
|
|
"step": 8
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"grad_norm": 7.2223786829244485,
|
|
"learning_rate": 4.764705882352941e-06,
|
|
"loss": 0.8847,
|
|
"step": 9
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"grad_norm": 5.532911513871539,
|
|
"learning_rate": 5.294117647058824e-06,
|
|
"loss": 0.7912,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"grad_norm": 6.0566462004370445,
|
|
"learning_rate": 5.823529411764707e-06,
|
|
"loss": 0.8499,
|
|
"step": 11
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"grad_norm": 6.029058939804303,
|
|
"learning_rate": 6.352941176470589e-06,
|
|
"loss": 0.8355,
|
|
"step": 12
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"grad_norm": 5.579068183376219,
|
|
"learning_rate": 6.882352941176471e-06,
|
|
"loss": 0.8369,
|
|
"step": 13
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"grad_norm": 5.8162333403745325,
|
|
"learning_rate": 7.411764705882353e-06,
|
|
"loss": 0.7877,
|
|
"step": 14
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"grad_norm": 5.483888941332197,
|
|
"learning_rate": 7.941176470588235e-06,
|
|
"loss": 0.8179,
|
|
"step": 15
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"grad_norm": 5.421185100788852,
|
|
"learning_rate": 8.470588235294118e-06,
|
|
"loss": 0.7781,
|
|
"step": 16
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"grad_norm": 5.110258230829603,
|
|
"learning_rate": 9e-06,
|
|
"loss": 0.7407,
|
|
"step": 17
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"grad_norm": 5.049789210880498,
|
|
"learning_rate": 8.999920645850193e-06,
|
|
"loss": 0.6753,
|
|
"step": 18
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"grad_norm": 6.085851377931879,
|
|
"learning_rate": 8.999682586199475e-06,
|
|
"loss": 0.8487,
|
|
"step": 19
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"grad_norm": 4.917910635517668,
|
|
"learning_rate": 8.999285829443853e-06,
|
|
"loss": 0.7275,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"grad_norm": 4.491695334310831,
|
|
"learning_rate": 8.99873038957635e-06,
|
|
"loss": 0.7148,
|
|
"step": 21
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"grad_norm": 4.991071038913368,
|
|
"learning_rate": 8.998016286186505e-06,
|
|
"loss": 0.7096,
|
|
"step": 22
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"grad_norm": 4.863344316120844,
|
|
"learning_rate": 8.997143544459675e-06,
|
|
"loss": 0.7453,
|
|
"step": 23
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"grad_norm": 4.96243576916089,
|
|
"learning_rate": 8.996112195176166e-06,
|
|
"loss": 0.7888,
|
|
"step": 24
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"grad_norm": 4.797988274496772,
|
|
"learning_rate": 8.99492227471013e-06,
|
|
"loss": 0.6558,
|
|
"step": 25
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"grad_norm": 4.888619789901552,
|
|
"learning_rate": 8.993573825028287e-06,
|
|
"loss": 0.721,
|
|
"step": 26
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"grad_norm": 4.896915556171218,
|
|
"learning_rate": 8.992066893688456e-06,
|
|
"loss": 0.7063,
|
|
"step": 27
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"grad_norm": 5.205090260689351,
|
|
"learning_rate": 8.990401533837859e-06,
|
|
"loss": 0.7297,
|
|
"step": 28
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"grad_norm": 4.987306750911853,
|
|
"learning_rate": 8.988577804211255e-06,
|
|
"loss": 0.7211,
|
|
"step": 29
|
|
},
|
|
{
|
|
"epoch": 0.05,
|
|
"grad_norm": 4.494430893403371,
|
|
"learning_rate": 8.986595769128877e-06,
|
|
"loss": 0.7106,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"grad_norm": 4.829948203570389,
|
|
"learning_rate": 8.984455498494147e-06,
|
|
"loss": 0.7259,
|
|
"step": 31
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"grad_norm": 4.670530445939821,
|
|
"learning_rate": 8.982157067791227e-06,
|
|
"loss": 0.7139,
|
|
"step": 32
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"grad_norm": 4.894172033075715,
|
|
"learning_rate": 8.979700558082343e-06,
|
|
"loss": 0.6759,
|
|
"step": 33
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"grad_norm": 5.017252333799934,
|
|
"learning_rate": 8.977086056004937e-06,
|
|
"loss": 0.6432,
|
|
"step": 34
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"grad_norm": 5.255889967262944,
|
|
"learning_rate": 8.974313653768603e-06,
|
|
"loss": 0.688,
|
|
"step": 35
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"grad_norm": 4.756664287623692,
|
|
"learning_rate": 8.971383449151841e-06,
|
|
"loss": 0.7145,
|
|
"step": 36
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"grad_norm": 5.1194367823148115,
|
|
"learning_rate": 8.968295545498603e-06,
|
|
"loss": 0.6484,
|
|
"step": 37
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"grad_norm": 5.36142060301958,
|
|
"learning_rate": 8.965050051714653e-06,
|
|
"loss": 0.6917,
|
|
"step": 38
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"grad_norm": 4.6465793442045635,
|
|
"learning_rate": 8.961647082263727e-06,
|
|
"loss": 0.6473,
|
|
"step": 39
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"grad_norm": 4.887493828654642,
|
|
"learning_rate": 8.958086757163488e-06,
|
|
"loss": 0.6405,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"grad_norm": 5.167282128396302,
|
|
"learning_rate": 8.954369201981304e-06,
|
|
"loss": 0.6479,
|
|
"step": 41
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"grad_norm": 4.213989898293111,
|
|
"learning_rate": 8.950494547829806e-06,
|
|
"loss": 0.6168,
|
|
"step": 42
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"grad_norm": 4.730533203078349,
|
|
"learning_rate": 8.94646293136228e-06,
|
|
"loss": 0.6012,
|
|
"step": 43
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"grad_norm": 4.6302465122438825,
|
|
"learning_rate": 8.942274494767836e-06,
|
|
"loss": 0.6655,
|
|
"step": 44
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"grad_norm": 4.773562718553584,
|
|
"learning_rate": 8.937929385766396e-06,
|
|
"loss": 0.5803,
|
|
"step": 45
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"grad_norm": 4.245197809574637,
|
|
"learning_rate": 8.933427757603484e-06,
|
|
"loss": 0.5916,
|
|
"step": 46
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"grad_norm": 4.655340318767494,
|
|
"learning_rate": 8.928769769044821e-06,
|
|
"loss": 0.5867,
|
|
"step": 47
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"grad_norm": 4.26010716709734,
|
|
"learning_rate": 8.923955584370732e-06,
|
|
"loss": 0.566,
|
|
"step": 48
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"grad_norm": 4.35555909102148,
|
|
"learning_rate": 8.918985373370339e-06,
|
|
"loss": 0.6182,
|
|
"step": 49
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"grad_norm": 4.235399481286071,
|
|
"learning_rate": 8.913859311335583e-06,
|
|
"loss": 0.5318,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"grad_norm": 3.9796856690373787,
|
|
"learning_rate": 8.908577579055042e-06,
|
|
"loss": 0.5697,
|
|
"step": 51
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"grad_norm": 3.7896052798771316,
|
|
"learning_rate": 8.90314036280755e-06,
|
|
"loss": 0.5079,
|
|
"step": 52
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"grad_norm": 4.6197464198048035,
|
|
"learning_rate": 8.897547854355623e-06,
|
|
"loss": 0.5764,
|
|
"step": 53
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"grad_norm": 3.847548925631187,
|
|
"learning_rate": 8.89180025093871e-06,
|
|
"loss": 0.6729,
|
|
"step": 54
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"grad_norm": 3.948267250729118,
|
|
"learning_rate": 8.885897755266227e-06,
|
|
"loss": 0.484,
|
|
"step": 55
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"grad_norm": 3.8023830471999904,
|
|
"learning_rate": 8.879840575510407e-06,
|
|
"loss": 0.6078,
|
|
"step": 56
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"grad_norm": 3.6103951856144345,
|
|
"learning_rate": 8.873628925298958e-06,
|
|
"loss": 0.6604,
|
|
"step": 57
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"grad_norm": 3.394899070771869,
|
|
"learning_rate": 8.867263023707538e-06,
|
|
"loss": 0.5844,
|
|
"step": 58
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"grad_norm": 3.636681521110234,
|
|
"learning_rate": 8.860743095252015e-06,
|
|
"loss": 0.5642,
|
|
"step": 59
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"grad_norm": 3.4543420833481604,
|
|
"learning_rate": 8.85406936988056e-06,
|
|
"loss": 0.5654,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"grad_norm": 3.073681377873381,
|
|
"learning_rate": 8.847242082965524e-06,
|
|
"loss": 0.5365,
|
|
"step": 61
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"grad_norm": 3.4947884919103767,
|
|
"learning_rate": 8.840261475295158e-06,
|
|
"loss": 0.6296,
|
|
"step": 62
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"grad_norm": 3.6489945285679863,
|
|
"learning_rate": 8.833127793065098e-06,
|
|
"loss": 0.5209,
|
|
"step": 63
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"grad_norm": 3.249672803259227,
|
|
"learning_rate": 8.825841287869692e-06,
|
|
"loss": 0.5381,
|
|
"step": 64
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"grad_norm": 3.4000459196494064,
|
|
"learning_rate": 8.818402216693131e-06,
|
|
"loss": 0.5103,
|
|
"step": 65
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"grad_norm": 2.97477421640801,
|
|
"learning_rate": 8.810810841900381e-06,
|
|
"loss": 0.598,
|
|
"step": 66
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"grad_norm": 2.9587823322711513,
|
|
"learning_rate": 8.803067431227923e-06,
|
|
"loss": 0.583,
|
|
"step": 67
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"grad_norm": 3.5309481237284275,
|
|
"learning_rate": 8.795172257774323e-06,
|
|
"loss": 0.5081,
|
|
"step": 68
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"grad_norm": 3.3165420055002257,
|
|
"learning_rate": 8.787125599990595e-06,
|
|
"loss": 0.5826,
|
|
"step": 69
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"grad_norm": 3.2092085766652163,
|
|
"learning_rate": 8.778927741670377e-06,
|
|
"loss": 0.4887,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"grad_norm": 3.607603819870852,
|
|
"learning_rate": 8.770578971939926e-06,
|
|
"loss": 0.5903,
|
|
"step": 71
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"grad_norm": 3.7280424147198876,
|
|
"learning_rate": 8.762079585247916e-06,
|
|
"loss": 0.5648,
|
|
"step": 72
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"grad_norm": 3.267053888317302,
|
|
"learning_rate": 8.753429881355064e-06,
|
|
"loss": 0.5333,
|
|
"step": 73
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"grad_norm": 3.313513549447078,
|
|
"learning_rate": 8.744630165323546e-06,
|
|
"loss": 0.5137,
|
|
"step": 74
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"grad_norm": 3.125452143353669,
|
|
"learning_rate": 8.735680747506243e-06,
|
|
"loss": 0.5628,
|
|
"step": 75
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"grad_norm": 3.26949376937129,
|
|
"learning_rate": 8.726581943535797e-06,
|
|
"loss": 0.4944,
|
|
"step": 76
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"grad_norm": 3.5892533566558305,
|
|
"learning_rate": 8.717334074313475e-06,
|
|
"loss": 0.6071,
|
|
"step": 77
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"grad_norm": 3.0643536815845134,
|
|
"learning_rate": 8.707937465997857e-06,
|
|
"loss": 0.438,
|
|
"step": 78
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"grad_norm": 3.4384578761927793,
|
|
"learning_rate": 8.698392449993325e-06,
|
|
"loss": 0.4857,
|
|
"step": 79
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"grad_norm": 3.250720117382431,
|
|
"learning_rate": 8.688699362938382e-06,
|
|
"loss": 0.5432,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"grad_norm": 3.140512687733444,
|
|
"learning_rate": 8.678858546693773e-06,
|
|
"loss": 0.4357,
|
|
"step": 81
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"grad_norm": 3.064996364757649,
|
|
"learning_rate": 8.668870348330438e-06,
|
|
"loss": 0.5305,
|
|
"step": 82
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"grad_norm": 3.3167708556063658,
|
|
"learning_rate": 8.658735120117259e-06,
|
|
"loss": 0.5941,
|
|
"step": 83
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"grad_norm": 3.8243197875834793,
|
|
"learning_rate": 8.648453219508644e-06,
|
|
"loss": 0.6513,
|
|
"step": 84
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"grad_norm": 3.3607547819011163,
|
|
"learning_rate": 8.63802500913192e-06,
|
|
"loss": 0.512,
|
|
"step": 85
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"grad_norm": 2.9838331067395614,
|
|
"learning_rate": 8.62745085677454e-06,
|
|
"loss": 0.4745,
|
|
"step": 86
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"grad_norm": 3.255949722321893,
|
|
"learning_rate": 8.616731135371108e-06,
|
|
"loss": 0.4998,
|
|
"step": 87
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"grad_norm": 3.8889794210168085,
|
|
"learning_rate": 8.60586622299024e-06,
|
|
"loss": 0.573,
|
|
"step": 88
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"grad_norm": 2.8509124971694213,
|
|
"learning_rate": 8.594856502821218e-06,
|
|
"loss": 0.4208,
|
|
"step": 89
|
|
},
|
|
{
|
|
"epoch": 0.16,
|
|
"grad_norm": 3.359577194382925,
|
|
"learning_rate": 8.58370236316048e-06,
|
|
"loss": 0.4903,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"grad_norm": 3.401482140074036,
|
|
"learning_rate": 8.572404197397922e-06,
|
|
"loss": 0.5983,
|
|
"step": 91
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"grad_norm": 3.5728346753701326,
|
|
"learning_rate": 8.560962404003028e-06,
|
|
"loss": 0.5853,
|
|
"step": 92
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"grad_norm": 3.1188833491907357,
|
|
"learning_rate": 8.549377386510814e-06,
|
|
"loss": 0.5403,
|
|
"step": 93
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"grad_norm": 3.268964841434034,
|
|
"learning_rate": 8.5376495535076e-06,
|
|
"loss": 0.542,
|
|
"step": 94
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"grad_norm": 3.4652099962359557,
|
|
"learning_rate": 8.52577931861659e-06,
|
|
"loss": 0.5607,
|
|
"step": 95
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"grad_norm": 3.015528132164184,
|
|
"learning_rate": 8.513767100483296e-06,
|
|
"loss": 0.5095,
|
|
"step": 96
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"grad_norm": 3.350304187060291,
|
|
"learning_rate": 8.501613322760765e-06,
|
|
"loss": 0.594,
|
|
"step": 97
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"grad_norm": 3.1102793715461763,
|
|
"learning_rate": 8.489318414094643e-06,
|
|
"loss": 0.4869,
|
|
"step": 98
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"grad_norm": 3.1411569656443565,
|
|
"learning_rate": 8.476882808108047e-06,
|
|
"loss": 0.6397,
|
|
"step": 99
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"grad_norm": 3.5202497902583807,
|
|
"learning_rate": 8.464306943386288e-06,
|
|
"loss": 0.5394,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"grad_norm": 3.008265446344961,
|
|
"learning_rate": 8.451591263461388e-06,
|
|
"loss": 0.529,
|
|
"step": 101
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"grad_norm": 3.4268239980889987,
|
|
"learning_rate": 8.438736216796444e-06,
|
|
"loss": 0.5653,
|
|
"step": 102
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"grad_norm": 3.1260151689921054,
|
|
"learning_rate": 8.425742256769813e-06,
|
|
"loss": 0.4902,
|
|
"step": 103
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"grad_norm": 3.0662786153784043,
|
|
"learning_rate": 8.412609841659117e-06,
|
|
"loss": 0.5363,
|
|
"step": 104
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"grad_norm": 3.044610829850215,
|
|
"learning_rate": 8.399339434625081e-06,
|
|
"loss": 0.5166,
|
|
"step": 105
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"grad_norm": 3.090644861792101,
|
|
"learning_rate": 8.385931503695205e-06,
|
|
"loss": 0.5061,
|
|
"step": 106
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"grad_norm": 3.5211572711182466,
|
|
"learning_rate": 8.372386521747245e-06,
|
|
"loss": 0.4797,
|
|
"step": 107
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"grad_norm": 3.1259869780955643,
|
|
"learning_rate": 8.35870496649255e-06,
|
|
"loss": 0.5536,
|
|
"step": 108
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"grad_norm": 3.1477479625059184,
|
|
"learning_rate": 8.3448873204592e-06,
|
|
"loss": 0.5226,
|
|
"step": 109
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"grad_norm": 3.394536678514208,
|
|
"learning_rate": 8.330934070974996e-06,
|
|
"loss": 0.5171,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"grad_norm": 3.0512076379788446,
|
|
"learning_rate": 8.316845710150273e-06,
|
|
"loss": 0.5174,
|
|
"step": 111
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"grad_norm": 3.1298372485466657,
|
|
"learning_rate": 8.30262273486054e-06,
|
|
"loss": 0.5708,
|
|
"step": 112
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"grad_norm": 3.2197822515681547,
|
|
"learning_rate": 8.288265646728957e-06,
|
|
"loss": 0.5672,
|
|
"step": 113
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"grad_norm": 2.8425054026885905,
|
|
"learning_rate": 8.273774952108646e-06,
|
|
"loss": 0.4355,
|
|
"step": 114
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"grad_norm": 3.0737700440604447,
|
|
"learning_rate": 8.259151162064828e-06,
|
|
"loss": 0.5051,
|
|
"step": 115
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"grad_norm": 3.3741672653063053,
|
|
"learning_rate": 8.244394792356805e-06,
|
|
"loss": 0.5668,
|
|
"step": 116
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"grad_norm": 2.9919622990845904,
|
|
"learning_rate": 8.229506363419763e-06,
|
|
"loss": 0.5098,
|
|
"step": 117
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"grad_norm": 3.5978666662483336,
|
|
"learning_rate": 8.214486400346427e-06,
|
|
"loss": 0.5075,
|
|
"step": 118
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"grad_norm": 2.830538513498415,
|
|
"learning_rate": 8.199335432868524e-06,
|
|
"loss": 0.3966,
|
|
"step": 119
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"grad_norm": 3.425466044907628,
|
|
"learning_rate": 8.184053995338121e-06,
|
|
"loss": 0.5272,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"grad_norm": 3.360447298123908,
|
|
"learning_rate": 8.168642626708766e-06,
|
|
"loss": 0.4448,
|
|
"step": 121
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"grad_norm": 3.393894514627002,
|
|
"learning_rate": 8.153101870516483e-06,
|
|
"loss": 0.4818,
|
|
"step": 122
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"grad_norm": 2.9420100513328937,
|
|
"learning_rate": 8.137432274860603e-06,
|
|
"loss": 0.4266,
|
|
"step": 123
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"grad_norm": 3.026131177104377,
|
|
"learning_rate": 8.121634392384432e-06,
|
|
"loss": 0.5322,
|
|
"step": 124
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"grad_norm": 3.0022791415575356,
|
|
"learning_rate": 8.105708780255763e-06,
|
|
"loss": 0.4751,
|
|
"step": 125
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"grad_norm": 3.0331431150081967,
|
|
"learning_rate": 8.089656000147224e-06,
|
|
"loss": 0.5318,
|
|
"step": 126
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"grad_norm": 2.799949643310398,
|
|
"learning_rate": 8.073476618216466e-06,
|
|
"loss": 0.4884,
|
|
"step": 127
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"grad_norm": 3.10752388387383,
|
|
"learning_rate": 8.057171205086201e-06,
|
|
"loss": 0.502,
|
|
"step": 128
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"grad_norm": 2.679375500347743,
|
|
"learning_rate": 8.04074033582407e-06,
|
|
"loss": 0.4909,
|
|
"step": 129
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"grad_norm": 3.913512934254949,
|
|
"learning_rate": 8.024184589922364e-06,
|
|
"loss": 0.5783,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"grad_norm": 3.2336957534147523,
|
|
"learning_rate": 8.007504551277596e-06,
|
|
"loss": 0.4747,
|
|
"step": 131
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"grad_norm": 3.0635194567472195,
|
|
"learning_rate": 7.99070080816989e-06,
|
|
"loss": 0.3958,
|
|
"step": 132
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"grad_norm": 3.5392670715326933,
|
|
"learning_rate": 7.973773953242243e-06,
|
|
"loss": 0.5357,
|
|
"step": 133
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"grad_norm": 3.0425513203937466,
|
|
"learning_rate": 7.956724583479628e-06,
|
|
"loss": 0.4484,
|
|
"step": 134
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"grad_norm": 3.0063547022877177,
|
|
"learning_rate": 7.939553300187927e-06,
|
|
"loss": 0.4847,
|
|
"step": 135
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"grad_norm": 3.090710639455557,
|
|
"learning_rate": 7.922260708972737e-06,
|
|
"loss": 0.5472,
|
|
"step": 136
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"grad_norm": 3.0386658649096896,
|
|
"learning_rate": 7.904847419718001e-06,
|
|
"loss": 0.5269,
|
|
"step": 137
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"grad_norm": 3.0325202054284404,
|
|
"learning_rate": 7.887314046564503e-06,
|
|
"loss": 0.5088,
|
|
"step": 138
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"grad_norm": 2.951728467730531,
|
|
"learning_rate": 7.869661207888208e-06,
|
|
"loss": 0.516,
|
|
"step": 139
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"grad_norm": 2.7888611128147494,
|
|
"learning_rate": 7.85188952627845e-06,
|
|
"loss": 0.5526,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"grad_norm": 2.730206238800992,
|
|
"learning_rate": 7.833999628515978e-06,
|
|
"loss": 0.4386,
|
|
"step": 141
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"grad_norm": 2.68554620874365,
|
|
"learning_rate": 7.815992145550851e-06,
|
|
"loss": 0.4264,
|
|
"step": 142
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"grad_norm": 3.0537486049360014,
|
|
"learning_rate": 7.797867712480176e-06,
|
|
"loss": 0.4982,
|
|
"step": 143
|
|
},
|
|
{
|
|
"epoch": 0.26,
|
|
"grad_norm": 2.893416638895553,
|
|
"learning_rate": 7.779626968525726e-06,
|
|
"loss": 0.446,
|
|
"step": 144
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"grad_norm": 3.0980768404972903,
|
|
"learning_rate": 7.761270557011376e-06,
|
|
"loss": 0.554,
|
|
"step": 145
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"grad_norm": 3.051694866335669,
|
|
"learning_rate": 7.74279912534043e-06,
|
|
"loss": 0.5116,
|
|
"step": 146
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"grad_norm": 3.02640149340465,
|
|
"learning_rate": 7.724213324972776e-06,
|
|
"loss": 0.5214,
|
|
"step": 147
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"grad_norm": 3.3220717472344505,
|
|
"learning_rate": 7.705513811401925e-06,
|
|
"loss": 0.5102,
|
|
"step": 148
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"grad_norm": 3.0062189661718035,
|
|
"learning_rate": 7.686701244131875e-06,
|
|
"loss": 0.5608,
|
|
"step": 149
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"grad_norm": 3.72608459532875,
|
|
"learning_rate": 7.66777628665386e-06,
|
|
"loss": 0.4952,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"grad_norm": 3.0339587229192166,
|
|
"learning_rate": 7.648739606422954e-06,
|
|
"loss": 0.4397,
|
|
"step": 151
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"grad_norm": 3.462077122092901,
|
|
"learning_rate": 7.629591874834523e-06,
|
|
"loss": 0.5758,
|
|
"step": 152
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"grad_norm": 2.920916146469853,
|
|
"learning_rate": 7.610333767200549e-06,
|
|
"loss": 0.4906,
|
|
"step": 153
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"grad_norm": 3.070077771115752,
|
|
"learning_rate": 7.590965962725814e-06,
|
|
"loss": 0.4945,
|
|
"step": 154
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"grad_norm": 2.807751554616351,
|
|
"learning_rate": 7.5714891444839445e-06,
|
|
"loss": 0.5194,
|
|
"step": 155
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"grad_norm": 3.093567798163179,
|
|
"learning_rate": 7.551903999393317e-06,
|
|
"loss": 0.6042,
|
|
"step": 156
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"grad_norm": 2.715762934690965,
|
|
"learning_rate": 7.532211218192839e-06,
|
|
"loss": 0.4839,
|
|
"step": 157
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"grad_norm": 3.2674049778852243,
|
|
"learning_rate": 7.512411495417581e-06,
|
|
"loss": 0.5165,
|
|
"step": 158
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"grad_norm": 3.5340829629693746,
|
|
"learning_rate": 7.492505529374285e-06,
|
|
"loss": 0.5254,
|
|
"step": 159
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"grad_norm": 3.4955327605305717,
|
|
"learning_rate": 7.472494022116731e-06,
|
|
"loss": 0.6269,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"grad_norm": 3.0097092054381953,
|
|
"learning_rate": 7.452377679420986e-06,
|
|
"loss": 0.5398,
|
|
"step": 161
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"grad_norm": 2.939232846047572,
|
|
"learning_rate": 7.432157210760504e-06,
|
|
"loss": 0.5392,
|
|
"step": 162
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"grad_norm": 3.0658398508020026,
|
|
"learning_rate": 7.411833329281104e-06,
|
|
"loss": 0.5113,
|
|
"step": 163
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"grad_norm": 3.114454405370538,
|
|
"learning_rate": 7.391406751775828e-06,
|
|
"loss": 0.436,
|
|
"step": 164
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"grad_norm": 2.949930737290397,
|
|
"learning_rate": 7.370878198659648e-06,
|
|
"loss": 0.4592,
|
|
"step": 165
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"grad_norm": 3.0089047499171295,
|
|
"learning_rate": 7.350248393944066e-06,
|
|
"loss": 0.4749,
|
|
"step": 166
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"grad_norm": 3.595071693011163,
|
|
"learning_rate": 7.3295180652115775e-06,
|
|
"loss": 0.5159,
|
|
"step": 167
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"grad_norm": 3.0257008295544012,
|
|
"learning_rate": 7.308687943590009e-06,
|
|
"loss": 0.5108,
|
|
"step": 168
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"grad_norm": 3.1927846374435958,
|
|
"learning_rate": 7.287758763726734e-06,
|
|
"loss": 0.5003,
|
|
"step": 169
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"grad_norm": 2.5987961618586795,
|
|
"learning_rate": 7.2667312637627665e-06,
|
|
"loss": 0.4915,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"grad_norm": 3.105420319887882,
|
|
"learning_rate": 7.245606185306719e-06,
|
|
"loss": 0.469,
|
|
"step": 171
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"grad_norm": 3.0475778734665133,
|
|
"learning_rate": 7.224384273408652e-06,
|
|
"loss": 0.4488,
|
|
"step": 172
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"grad_norm": 2.9048930760010028,
|
|
"learning_rate": 7.203066276533802e-06,
|
|
"loss": 0.4642,
|
|
"step": 173
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"grad_norm": 2.825677517748575,
|
|
"learning_rate": 7.181652946536176e-06,
|
|
"loss": 0.4691,
|
|
"step": 174
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"grad_norm": 3.581911113634182,
|
|
"learning_rate": 7.160145038632037e-06,
|
|
"loss": 0.5567,
|
|
"step": 175
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"grad_norm": 2.9376972711527674,
|
|
"learning_rate": 7.138543311373275e-06,
|
|
"loss": 0.5202,
|
|
"step": 176
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"grad_norm": 3.1097171049397065,
|
|
"learning_rate": 7.116848526620645e-06,
|
|
"loss": 0.5118,
|
|
"step": 177
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"grad_norm": 2.8414745422245846,
|
|
"learning_rate": 7.095061449516903e-06,
|
|
"loss": 0.5088,
|
|
"step": 178
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"grad_norm": 2.783296171938037,
|
|
"learning_rate": 7.073182848459816e-06,
|
|
"loss": 0.4706,
|
|
"step": 179
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"grad_norm": 3.051289105175948,
|
|
"learning_rate": 7.051213495075068e-06,
|
|
"loss": 0.48,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"eval_accuracy": 0.8342102913428403,
|
|
"eval_accuracy_<|content|>": 1.0,
|
|
"eval_accuracy_<|from|>": 0.9690376569037656,
|
|
"eval_accuracy_<|recipient|>": 1.0,
|
|
"eval_accuracy_<|stop|>": 0.9098200737047475,
|
|
"eval_accuracy_total_num_<|content|>": 11653,
|
|
"eval_accuracy_total_num_<|from|>": 2390,
|
|
"eval_accuracy_total_num_<|recipient|>": 2390,
|
|
"eval_accuracy_total_num_<|stop|>": 9226,
|
|
"eval_loss": 0.5539013743400574,
|
|
"eval_perplexity": 1.2334550432606364,
|
|
"eval_runtime": 145.4535,
|
|
"eval_samples_per_second": 4.936,
|
|
"eval_steps_per_second": 0.158,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"grad_norm": 3.086585679777639,
|
|
"learning_rate": 7.029154164189041e-06,
|
|
"loss": 0.5342,
|
|
"step": 181
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"grad_norm": 3.081775823416091,
|
|
"learning_rate": 7.00700563380149e-06,
|
|
"loss": 0.5082,
|
|
"step": 182
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"grad_norm": 3.2399857933805096,
|
|
"learning_rate": 6.9847686850581025e-06,
|
|
"loss": 0.5268,
|
|
"step": 183
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"grad_norm": 3.184402549388879,
|
|
"learning_rate": 6.962444102222949e-06,
|
|
"loss": 0.4844,
|
|
"step": 184
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"grad_norm": 3.4728603055489233,
|
|
"learning_rate": 6.940032672650828e-06,
|
|
"loss": 0.5789,
|
|
"step": 185
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"grad_norm": 3.1840503345615963,
|
|
"learning_rate": 6.91753518675949e-06,
|
|
"loss": 0.4837,
|
|
"step": 186
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"grad_norm": 2.62887793770963,
|
|
"learning_rate": 6.894952438001763e-06,
|
|
"loss": 0.4791,
|
|
"step": 187
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"grad_norm": 2.935737185834959,
|
|
"learning_rate": 6.872285222837573e-06,
|
|
"loss": 0.5525,
|
|
"step": 188
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"grad_norm": 3.094716532349392,
|
|
"learning_rate": 6.849534340705842e-06,
|
|
"loss": 0.4259,
|
|
"step": 189
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"grad_norm": 2.814581995657929,
|
|
"learning_rate": 6.826700593996313e-06,
|
|
"loss": 0.5115,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"grad_norm": 2.9995006190264646,
|
|
"learning_rate": 6.803784788021233e-06,
|
|
"loss": 0.5786,
|
|
"step": 191
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"grad_norm": 3.113019844261514,
|
|
"learning_rate": 6.7807877309869566e-06,
|
|
"loss": 0.5133,
|
|
"step": 192
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"grad_norm": 3.6613949067607803,
|
|
"learning_rate": 6.757710233965442e-06,
|
|
"loss": 0.5776,
|
|
"step": 193
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"grad_norm": 2.9521840559028965,
|
|
"learning_rate": 6.73455311086565e-06,
|
|
"loss": 0.4917,
|
|
"step": 194
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"grad_norm": 3.273949831409698,
|
|
"learning_rate": 6.71131717840483e-06,
|
|
"loss": 0.5261,
|
|
"step": 195
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"grad_norm": 3.0486061943746354,
|
|
"learning_rate": 6.688003256079723e-06,
|
|
"loss": 0.5224,
|
|
"step": 196
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"grad_norm": 3.0050445631137492,
|
|
"learning_rate": 6.664612166137655e-06,
|
|
"loss": 0.4096,
|
|
"step": 197
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"grad_norm": 2.772725167734928,
|
|
"learning_rate": 6.641144733547539e-06,
|
|
"loss": 0.4233,
|
|
"step": 198
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"grad_norm": 2.711610004187709,
|
|
"learning_rate": 6.617601785970783e-06,
|
|
"loss": 0.456,
|
|
"step": 199
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"grad_norm": 2.9050942559350523,
|
|
"learning_rate": 6.593984153732091e-06,
|
|
"loss": 0.4164,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"grad_norm": 2.830748277257922,
|
|
"learning_rate": 6.570292669790185e-06,
|
|
"loss": 0.4468,
|
|
"step": 201
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"grad_norm": 2.7581248095396416,
|
|
"learning_rate": 6.546528169708429e-06,
|
|
"loss": 0.4075,
|
|
"step": 202
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"grad_norm": 3.2955277706412605,
|
|
"learning_rate": 6.522691491625356e-06,
|
|
"loss": 0.5416,
|
|
"step": 203
|
|
},
|
|
{
|
|
"epoch": 0.37,
|
|
"grad_norm": 3.0068296734820263,
|
|
"learning_rate": 6.498783476225111e-06,
|
|
"loss": 0.4617,
|
|
"step": 204
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"grad_norm": 3.093146249797236,
|
|
"learning_rate": 6.474804966707796e-06,
|
|
"loss": 0.533,
|
|
"step": 205
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"grad_norm": 3.181608809197894,
|
|
"learning_rate": 6.45075680875974e-06,
|
|
"loss": 0.5352,
|
|
"step": 206
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"grad_norm": 2.5746014903952235,
|
|
"learning_rate": 6.426639850523665e-06,
|
|
"loss": 0.4242,
|
|
"step": 207
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"grad_norm": 2.8430380139586027,
|
|
"learning_rate": 6.402454942568782e-06,
|
|
"loss": 0.452,
|
|
"step": 208
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"grad_norm": 3.2217318472356307,
|
|
"learning_rate": 6.378202937860781e-06,
|
|
"loss": 0.4925,
|
|
"step": 209
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"grad_norm": 4.053436435303002,
|
|
"learning_rate": 6.3538846917317584e-06,
|
|
"loss": 0.5917,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"grad_norm": 3.491166106143089,
|
|
"learning_rate": 6.3295010618500455e-06,
|
|
"loss": 0.4656,
|
|
"step": 211
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"grad_norm": 2.8864104479439923,
|
|
"learning_rate": 6.305052908189961e-06,
|
|
"loss": 0.5241,
|
|
"step": 212
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"grad_norm": 2.8305539784275546,
|
|
"learning_rate": 6.280541093001486e-06,
|
|
"loss": 0.4535,
|
|
"step": 213
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"grad_norm": 4.173577669127519,
|
|
"learning_rate": 6.255966480779838e-06,
|
|
"loss": 0.4361,
|
|
"step": 214
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"grad_norm": 3.458946310379365,
|
|
"learning_rate": 6.2313299382350024e-06,
|
|
"loss": 0.4808,
|
|
"step": 215
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"grad_norm": 3.386680894376734,
|
|
"learning_rate": 6.206632334261152e-06,
|
|
"loss": 0.5487,
|
|
"step": 216
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"grad_norm": 2.8401947488812262,
|
|
"learning_rate": 6.181874539906005e-06,
|
|
"loss": 0.4528,
|
|
"step": 217
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"grad_norm": 3.6329463858003086,
|
|
"learning_rate": 6.157057428340103e-06,
|
|
"loss": 0.5419,
|
|
"step": 218
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"grad_norm": 2.953220720763901,
|
|
"learning_rate": 6.132181874826023e-06,
|
|
"loss": 0.4612,
|
|
"step": 219
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"grad_norm": 2.888110484061968,
|
|
"learning_rate": 6.1072487566874934e-06,
|
|
"loss": 0.495,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"grad_norm": 3.228959293881162,
|
|
"learning_rate": 6.082258953278467e-06,
|
|
"loss": 0.4641,
|
|
"step": 221
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"grad_norm": 2.699387826863465,
|
|
"learning_rate": 6.057213345952103e-06,
|
|
"loss": 0.4009,
|
|
"step": 222
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"grad_norm": 2.973517701494324,
|
|
"learning_rate": 6.032112818029677e-06,
|
|
"loss": 0.5227,
|
|
"step": 223
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"grad_norm": 3.1940698990570473,
|
|
"learning_rate": 6.006958254769438e-06,
|
|
"loss": 0.5372,
|
|
"step": 224
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"grad_norm": 3.0576078522385903,
|
|
"learning_rate": 5.981750543335376e-06,
|
|
"loss": 0.4934,
|
|
"step": 225
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"grad_norm": 2.6228967285190037,
|
|
"learning_rate": 5.956490572765939e-06,
|
|
"loss": 0.4279,
|
|
"step": 226
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"grad_norm": 2.531552625948877,
|
|
"learning_rate": 5.931179233942677e-06,
|
|
"loss": 0.329,
|
|
"step": 227
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"grad_norm": 3.278388966461349,
|
|
"learning_rate": 5.905817419558825e-06,
|
|
"loss": 0.529,
|
|
"step": 228
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"grad_norm": 3.4754758682480524,
|
|
"learning_rate": 5.8804060240878105e-06,
|
|
"loss": 0.5724,
|
|
"step": 229
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"grad_norm": 2.997441679239916,
|
|
"learning_rate": 5.854945943751716e-06,
|
|
"loss": 0.3957,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"grad_norm": 3.2817590001479795,
|
|
"learning_rate": 5.829438076489666e-06,
|
|
"loss": 0.4562,
|
|
"step": 231
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"grad_norm": 2.974647669249094,
|
|
"learning_rate": 5.803883321926157e-06,
|
|
"loss": 0.4952,
|
|
"step": 232
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"grad_norm": 2.8506505377741873,
|
|
"learning_rate": 5.778282581339334e-06,
|
|
"loss": 0.4498,
|
|
"step": 233
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"grad_norm": 3.1642991015269972,
|
|
"learning_rate": 5.752636757629197e-06,
|
|
"loss": 0.5017,
|
|
"step": 234
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"grad_norm": 2.986025489448088,
|
|
"learning_rate": 5.726946755285763e-06,
|
|
"loss": 0.4021,
|
|
"step": 235
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"grad_norm": 3.204511943941506,
|
|
"learning_rate": 5.701213480357162e-06,
|
|
"loss": 0.4487,
|
|
"step": 236
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"grad_norm": 3.2451982048566186,
|
|
"learning_rate": 5.675437840417686e-06,
|
|
"loss": 0.4185,
|
|
"step": 237
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"grad_norm": 3.0504356319657857,
|
|
"learning_rate": 5.649620744535775e-06,
|
|
"loss": 0.4201,
|
|
"step": 238
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"grad_norm": 2.9215020397218927,
|
|
"learning_rate": 5.623763103241961e-06,
|
|
"loss": 0.4371,
|
|
"step": 239
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"grad_norm": 3.539528872026836,
|
|
"learning_rate": 5.59786582849675e-06,
|
|
"loss": 0.4408,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"grad_norm": 2.9617533897366197,
|
|
"learning_rate": 5.5719298336584615e-06,
|
|
"loss": 0.3983,
|
|
"step": 241
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"grad_norm": 3.2498757733189123,
|
|
"learning_rate": 5.545956033451016e-06,
|
|
"loss": 0.4539,
|
|
"step": 242
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"grad_norm": 2.89991661036547,
|
|
"learning_rate": 5.5199453439316745e-06,
|
|
"loss": 0.4626,
|
|
"step": 243
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"grad_norm": 3.01668847823045,
|
|
"learning_rate": 5.493898682458724e-06,
|
|
"loss": 0.4517,
|
|
"step": 244
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"grad_norm": 3.743755658461664,
|
|
"learning_rate": 5.467816967659135e-06,
|
|
"loss": 0.4642,
|
|
"step": 245
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"grad_norm": 2.8443195287857015,
|
|
"learning_rate": 5.441701119396149e-06,
|
|
"loss": 0.4246,
|
|
"step": 246
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"grad_norm": 3.073710254558929,
|
|
"learning_rate": 5.415552058736853e-06,
|
|
"loss": 0.4802,
|
|
"step": 247
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"grad_norm": 3.078185155386972,
|
|
"learning_rate": 5.389370707919678e-06,
|
|
"loss": 0.4353,
|
|
"step": 248
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"grad_norm": 3.0525629511663155,
|
|
"learning_rate": 5.363157990321884e-06,
|
|
"loss": 0.4131,
|
|
"step": 249
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"grad_norm": 2.7744828765643317,
|
|
"learning_rate": 5.336914830426994e-06,
|
|
"loss": 0.3595,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"grad_norm": 3.0194468460511343,
|
|
"learning_rate": 5.310642153792179e-06,
|
|
"loss": 0.4374,
|
|
"step": 251
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"grad_norm": 2.9525836135504626,
|
|
"learning_rate": 5.2843408870156245e-06,
|
|
"loss": 0.4697,
|
|
"step": 252
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"grad_norm": 3.1105687127607204,
|
|
"learning_rate": 5.25801195770385e-06,
|
|
"loss": 0.4272,
|
|
"step": 253
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"grad_norm": 2.7972605622221343,
|
|
"learning_rate": 5.231656294438987e-06,
|
|
"loss": 0.4399,
|
|
"step": 254
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"grad_norm": 3.181819677387704,
|
|
"learning_rate": 5.205274826746037e-06,
|
|
"loss": 0.5103,
|
|
"step": 255
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"grad_norm": 2.963550029692661,
|
|
"learning_rate": 5.178868485060084e-06,
|
|
"loss": 0.4946,
|
|
"step": 256
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"grad_norm": 3.116532683627354,
|
|
"learning_rate": 5.15243820069348e-06,
|
|
"loss": 0.4474,
|
|
"step": 257
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"grad_norm": 2.708462663188884,
|
|
"learning_rate": 5.125984905803002e-06,
|
|
"loss": 0.3684,
|
|
"step": 258
|
|
},
|
|
{
|
|
"epoch": 0.47,
|
|
"grad_norm": 3.2578072396033866,
|
|
"learning_rate": 5.0995095333569725e-06,
|
|
"loss": 0.4285,
|
|
"step": 259
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"grad_norm": 2.9466026196548,
|
|
"learning_rate": 5.073013017102356e-06,
|
|
"loss": 0.4622,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"grad_norm": 3.4399722683823666,
|
|
"learning_rate": 5.0464962915318275e-06,
|
|
"loss": 0.4863,
|
|
"step": 261
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"grad_norm": 2.8400043359061136,
|
|
"learning_rate": 5.019960291850817e-06,
|
|
"loss": 0.5108,
|
|
"step": 262
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"grad_norm": 3.2092125619436582,
|
|
"learning_rate": 4.99340595394452e-06,
|
|
"loss": 0.4611,
|
|
"step": 263
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"grad_norm": 3.2081127430702603,
|
|
"learning_rate": 4.966834214344896e-06,
|
|
"loss": 0.5237,
|
|
"step": 264
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"grad_norm": 3.01275795446049,
|
|
"learning_rate": 4.940246010197636e-06,
|
|
"loss": 0.4355,
|
|
"step": 265
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"grad_norm": 2.9504283586098285,
|
|
"learning_rate": 4.913642279229112e-06,
|
|
"loss": 0.425,
|
|
"step": 266
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"grad_norm": 2.8095986206489836,
|
|
"learning_rate": 4.887023959713302e-06,
|
|
"loss": 0.4211,
|
|
"step": 267
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"grad_norm": 2.6337433234035172,
|
|
"learning_rate": 4.860391990438703e-06,
|
|
"loss": 0.3975,
|
|
"step": 268
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"grad_norm": 3.336414465844828,
|
|
"learning_rate": 4.833747310675215e-06,
|
|
"loss": 0.4137,
|
|
"step": 269
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"grad_norm": 2.9666922489822287,
|
|
"learning_rate": 4.8070908601410195e-06,
|
|
"loss": 0.4994,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"grad_norm": 2.785948406018108,
|
|
"learning_rate": 4.780423578969439e-06,
|
|
"loss": 0.4266,
|
|
"step": 271
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"grad_norm": 3.2166447410549237,
|
|
"learning_rate": 4.7537464076757716e-06,
|
|
"loss": 0.5226,
|
|
"step": 272
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"grad_norm": 2.899196099830625,
|
|
"learning_rate": 4.727060287124127e-06,
|
|
"loss": 0.4751,
|
|
"step": 273
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"grad_norm": 2.8168782435500654,
|
|
"learning_rate": 4.700366158494242e-06,
|
|
"loss": 0.415,
|
|
"step": 274
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"grad_norm": 3.2115034342077893,
|
|
"learning_rate": 4.673664963248288e-06,
|
|
"loss": 0.4164,
|
|
"step": 275
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"grad_norm": 3.042273356585557,
|
|
"learning_rate": 4.646957643097662e-06,
|
|
"loss": 0.4153,
|
|
"step": 276
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"grad_norm": 2.943409973438589,
|
|
"learning_rate": 4.620245139969782e-06,
|
|
"loss": 0.4799,
|
|
"step": 277
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"grad_norm": 3.0970453691884394,
|
|
"learning_rate": 4.593528395974855e-06,
|
|
"loss": 0.414,
|
|
"step": 278
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"grad_norm": 3.296233508626491,
|
|
"learning_rate": 4.566808353372665e-06,
|
|
"loss": 0.4926,
|
|
"step": 279
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"grad_norm": 2.798291953595024,
|
|
"learning_rate": 4.5400859545393275e-06,
|
|
"loss": 0.326,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"grad_norm": 2.9911990383180838,
|
|
"learning_rate": 4.51336214193406e-06,
|
|
"loss": 0.4602,
|
|
"step": 281
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"grad_norm": 3.2907591725930847,
|
|
"learning_rate": 4.486637858065941e-06,
|
|
"loss": 0.5423,
|
|
"step": 282
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"grad_norm": 2.9708063665333655,
|
|
"learning_rate": 4.459914045460673e-06,
|
|
"loss": 0.4721,
|
|
"step": 283
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"grad_norm": 2.8618022253178967,
|
|
"learning_rate": 4.433191646627334e-06,
|
|
"loss": 0.4572,
|
|
"step": 284
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"grad_norm": 3.019329583222099,
|
|
"learning_rate": 4.406471604025146e-06,
|
|
"loss": 0.593,
|
|
"step": 285
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"grad_norm": 2.997025354923621,
|
|
"learning_rate": 4.37975486003022e-06,
|
|
"loss": 0.4696,
|
|
"step": 286
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"grad_norm": 2.827444237394317,
|
|
"learning_rate": 4.353042356902339e-06,
|
|
"loss": 0.5334,
|
|
"step": 287
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"grad_norm": 2.84409793067513,
|
|
"learning_rate": 4.326335036751713e-06,
|
|
"loss": 0.415,
|
|
"step": 288
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"grad_norm": 2.898900252090855,
|
|
"learning_rate": 4.299633841505758e-06,
|
|
"loss": 0.4068,
|
|
"step": 289
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"grad_norm": 3.2482503504186298,
|
|
"learning_rate": 4.272939712875874e-06,
|
|
"loss": 0.4589,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"grad_norm": 2.7691858529318547,
|
|
"learning_rate": 4.24625359232423e-06,
|
|
"loss": 0.3934,
|
|
"step": 291
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"grad_norm": 3.1324170959821886,
|
|
"learning_rate": 4.219576421030562e-06,
|
|
"loss": 0.5133,
|
|
"step": 292
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"grad_norm": 3.014849801788204,
|
|
"learning_rate": 4.1929091398589815e-06,
|
|
"loss": 0.4764,
|
|
"step": 293
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"grad_norm": 3.181217496694547,
|
|
"learning_rate": 4.166252689324786e-06,
|
|
"loss": 0.441,
|
|
"step": 294
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"grad_norm": 2.689885774519482,
|
|
"learning_rate": 4.139608009561298e-06,
|
|
"loss": 0.3994,
|
|
"step": 295
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"grad_norm": 3.0489681912036817,
|
|
"learning_rate": 4.112976040286698e-06,
|
|
"loss": 0.3961,
|
|
"step": 296
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"grad_norm": 3.2271133387257604,
|
|
"learning_rate": 4.086357720770889e-06,
|
|
"loss": 0.3583,
|
|
"step": 297
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"grad_norm": 2.9795806459854863,
|
|
"learning_rate": 4.059753989802365e-06,
|
|
"loss": 0.4533,
|
|
"step": 298
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"grad_norm": 3.056333521184435,
|
|
"learning_rate": 4.033165785655105e-06,
|
|
"loss": 0.4191,
|
|
"step": 299
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"grad_norm": 2.7797591910603425,
|
|
"learning_rate": 4.006594046055481e-06,
|
|
"loss": 0.4129,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"grad_norm": 2.7829157553437773,
|
|
"learning_rate": 3.980039708149183e-06,
|
|
"loss": 0.4641,
|
|
"step": 301
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"grad_norm": 2.8584038292205,
|
|
"learning_rate": 3.953503708468172e-06,
|
|
"loss": 0.4045,
|
|
"step": 302
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"grad_norm": 3.1666201313420483,
|
|
"learning_rate": 3.9269869828976434e-06,
|
|
"loss": 0.4768,
|
|
"step": 303
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"grad_norm": 3.002871105164156,
|
|
"learning_rate": 3.9004904666430286e-06,
|
|
"loss": 0.4104,
|
|
"step": 304
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"grad_norm": 2.6868742287526346,
|
|
"learning_rate": 3.874015094196999e-06,
|
|
"loss": 0.3655,
|
|
"step": 305
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"grad_norm": 3.3290811154885644,
|
|
"learning_rate": 3.847561799306521e-06,
|
|
"loss": 0.428,
|
|
"step": 306
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"grad_norm": 3.3534328576968075,
|
|
"learning_rate": 3.821131514939918e-06,
|
|
"loss": 0.3964,
|
|
"step": 307
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"grad_norm": 3.057407991750388,
|
|
"learning_rate": 3.7947251732539634e-06,
|
|
"loss": 0.3503,
|
|
"step": 308
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"grad_norm": 2.9478613655320602,
|
|
"learning_rate": 3.7683437055610132e-06,
|
|
"loss": 0.4839,
|
|
"step": 309
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"grad_norm": 3.096944672536294,
|
|
"learning_rate": 3.741988042296152e-06,
|
|
"loss": 0.4861,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"grad_norm": 3.031839004588168,
|
|
"learning_rate": 3.7156591129843766e-06,
|
|
"loss": 0.3311,
|
|
"step": 311
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"grad_norm": 3.324696958096886,
|
|
"learning_rate": 3.6893578462078225e-06,
|
|
"loss": 0.5411,
|
|
"step": 312
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"grad_norm": 2.8789748037411727,
|
|
"learning_rate": 3.6630851695730072e-06,
|
|
"loss": 0.4012,
|
|
"step": 313
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"grad_norm": 2.982423785684021,
|
|
"learning_rate": 3.636842009678116e-06,
|
|
"loss": 0.399,
|
|
"step": 314
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"grad_norm": 3.0340210509638403,
|
|
"learning_rate": 3.6106292920803226e-06,
|
|
"loss": 0.4918,
|
|
"step": 315
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"grad_norm": 3.1425769687017233,
|
|
"learning_rate": 3.5844479412631487e-06,
|
|
"loss": 0.4063,
|
|
"step": 316
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"grad_norm": 3.356706483225122,
|
|
"learning_rate": 3.5582988806038515e-06,
|
|
"loss": 0.4455,
|
|
"step": 317
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"grad_norm": 2.885186245727384,
|
|
"learning_rate": 3.5321830323408665e-06,
|
|
"loss": 0.3841,
|
|
"step": 318
|
|
},
|
|
{
|
|
"epoch": 0.58,
|
|
"grad_norm": 2.9816581052688456,
|
|
"learning_rate": 3.506101317541276e-06,
|
|
"loss": 0.3944,
|
|
"step": 319
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"grad_norm": 2.5759462359538823,
|
|
"learning_rate": 3.4800546560683257e-06,
|
|
"loss": 0.3458,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"grad_norm": 3.2037678865922365,
|
|
"learning_rate": 3.454043966548983e-06,
|
|
"loss": 0.5099,
|
|
"step": 321
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"grad_norm": 2.981583883808985,
|
|
"learning_rate": 3.428070166341539e-06,
|
|
"loss": 0.4449,
|
|
"step": 322
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"grad_norm": 2.8337618608970003,
|
|
"learning_rate": 3.4021341715032515e-06,
|
|
"loss": 0.4634,
|
|
"step": 323
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"grad_norm": 3.014800498238915,
|
|
"learning_rate": 3.37623689675804e-06,
|
|
"loss": 0.3942,
|
|
"step": 324
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"grad_norm": 2.7899676188971676,
|
|
"learning_rate": 3.3503792554642257e-06,
|
|
"loss": 0.4295,
|
|
"step": 325
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"grad_norm": 2.758387377417387,
|
|
"learning_rate": 3.3245621595823144e-06,
|
|
"loss": 0.4117,
|
|
"step": 326
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"grad_norm": 2.9195743418265483,
|
|
"learning_rate": 3.298786519642838e-06,
|
|
"loss": 0.3987,
|
|
"step": 327
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"grad_norm": 3.272273063699768,
|
|
"learning_rate": 3.2730532447142388e-06,
|
|
"loss": 0.4661,
|
|
"step": 328
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"grad_norm": 2.8803414674595005,
|
|
"learning_rate": 3.2473632423708047e-06,
|
|
"loss": 0.4119,
|
|
"step": 329
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"grad_norm": 2.8420256976499934,
|
|
"learning_rate": 3.2217174186606674e-06,
|
|
"loss": 0.4022,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"grad_norm": 3.0586228060925893,
|
|
"learning_rate": 3.1961166780738434e-06,
|
|
"loss": 0.4194,
|
|
"step": 331
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"grad_norm": 2.602928316139206,
|
|
"learning_rate": 3.1705619235103346e-06,
|
|
"loss": 0.4469,
|
|
"step": 332
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"grad_norm": 2.8041513320185887,
|
|
"learning_rate": 3.145054056248285e-06,
|
|
"loss": 0.4542,
|
|
"step": 333
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"grad_norm": 2.956029387049807,
|
|
"learning_rate": 3.119593975912191e-06,
|
|
"loss": 0.3719,
|
|
"step": 334
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"grad_norm": 2.8427250939264015,
|
|
"learning_rate": 3.094182580441177e-06,
|
|
"loss": 0.4497,
|
|
"step": 335
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"grad_norm": 2.8638249425808473,
|
|
"learning_rate": 3.068820766057324e-06,
|
|
"loss": 0.4619,
|
|
"step": 336
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"grad_norm": 2.9554947238405735,
|
|
"learning_rate": 3.043509427234063e-06,
|
|
"loss": 0.4732,
|
|
"step": 337
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"grad_norm": 2.9630005477792665,
|
|
"learning_rate": 3.0182494566646253e-06,
|
|
"loss": 0.4281,
|
|
"step": 338
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"grad_norm": 2.80768279260768,
|
|
"learning_rate": 2.9930417452305625e-06,
|
|
"loss": 0.4112,
|
|
"step": 339
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"grad_norm": 2.869613335046005,
|
|
"learning_rate": 2.9678871819703232e-06,
|
|
"loss": 0.4269,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"grad_norm": 2.8154525217466024,
|
|
"learning_rate": 2.9427866540478985e-06,
|
|
"loss": 0.383,
|
|
"step": 341
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"grad_norm": 2.942980112292583,
|
|
"learning_rate": 2.917741046721534e-06,
|
|
"loss": 0.4211,
|
|
"step": 342
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"grad_norm": 3.485545508172962,
|
|
"learning_rate": 2.892751243312508e-06,
|
|
"loss": 0.4125,
|
|
"step": 343
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"grad_norm": 3.011993738146343,
|
|
"learning_rate": 2.8678181251739783e-06,
|
|
"loss": 0.4047,
|
|
"step": 344
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"grad_norm": 3.3150989974276377,
|
|
"learning_rate": 2.8429425716598957e-06,
|
|
"loss": 0.4448,
|
|
"step": 345
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"grad_norm": 3.186550114152289,
|
|
"learning_rate": 2.818125460093995e-06,
|
|
"loss": 0.3649,
|
|
"step": 346
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"grad_norm": 3.1017974574392073,
|
|
"learning_rate": 2.793367665738849e-06,
|
|
"loss": 0.4054,
|
|
"step": 347
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"grad_norm": 2.7219935694517585,
|
|
"learning_rate": 2.768670061764998e-06,
|
|
"loss": 0.3586,
|
|
"step": 348
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"grad_norm": 2.837486057139893,
|
|
"learning_rate": 2.7440335192201634e-06,
|
|
"loss": 0.4083,
|
|
"step": 349
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"grad_norm": 3.1514682975557755,
|
|
"learning_rate": 2.7194589069985157e-06,
|
|
"loss": 0.4417,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"grad_norm": 2.657620564175483,
|
|
"learning_rate": 2.694947091810038e-06,
|
|
"loss": 0.399,
|
|
"step": 351
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"grad_norm": 3.249229209441283,
|
|
"learning_rate": 2.6704989381499543e-06,
|
|
"loss": 0.5156,
|
|
"step": 352
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"grad_norm": 4.349086454941179,
|
|
"learning_rate": 2.646115308268243e-06,
|
|
"loss": 0.4783,
|
|
"step": 353
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"grad_norm": 3.3140655663532814,
|
|
"learning_rate": 2.621797062139219e-06,
|
|
"loss": 0.5764,
|
|
"step": 354
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"grad_norm": 3.1064412522110363,
|
|
"learning_rate": 2.597545057431219e-06,
|
|
"loss": 0.3681,
|
|
"step": 355
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"grad_norm": 2.9477820068777802,
|
|
"learning_rate": 2.5733601494763345e-06,
|
|
"loss": 0.3664,
|
|
"step": 356
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"grad_norm": 3.074069575294038,
|
|
"learning_rate": 2.549243191240261e-06,
|
|
"loss": 0.3998,
|
|
"step": 357
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"grad_norm": 2.9320278131468167,
|
|
"learning_rate": 2.5251950332922044e-06,
|
|
"loss": 0.3532,
|
|
"step": 358
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"grad_norm": 2.979431917933861,
|
|
"learning_rate": 2.5012165237748918e-06,
|
|
"loss": 0.4152,
|
|
"step": 359
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"grad_norm": 2.870207683094522,
|
|
"learning_rate": 2.4773085083746447e-06,
|
|
"loss": 0.4281,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"eval_accuracy": 0.8396950370778964,
|
|
"eval_accuracy_<|content|>": 1.0,
|
|
"eval_accuracy_<|from|>": 0.9682008368200837,
|
|
"eval_accuracy_<|recipient|>": 1.0,
|
|
"eval_accuracy_<|stop|>": 0.9415781487101669,
|
|
"eval_accuracy_total_num_<|content|>": 11653,
|
|
"eval_accuracy_total_num_<|from|>": 2390,
|
|
"eval_accuracy_total_num_<|recipient|>": 2390,
|
|
"eval_accuracy_total_num_<|stop|>": 9226,
|
|
"eval_loss": 0.5304175615310669,
|
|
"eval_perplexity": 1.222568088092722,
|
|
"eval_runtime": 143.7884,
|
|
"eval_samples_per_second": 4.993,
|
|
"eval_steps_per_second": 0.16,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"grad_norm": 3.1558292378921986,
|
|
"learning_rate": 2.4534718302915707e-06,
|
|
"loss": 0.4236,
|
|
"step": 361
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"grad_norm": 2.846849967965893,
|
|
"learning_rate": 2.4297073302098155e-06,
|
|
"loss": 0.4222,
|
|
"step": 362
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"grad_norm": 5.586938688827532,
|
|
"learning_rate": 2.4060158462679093e-06,
|
|
"loss": 0.4339,
|
|
"step": 363
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"grad_norm": 3.0886008483753407,
|
|
"learning_rate": 2.382398214029217e-06,
|
|
"loss": 0.4635,
|
|
"step": 364
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"grad_norm": 3.3571328876403776,
|
|
"learning_rate": 2.358855266452461e-06,
|
|
"loss": 0.3914,
|
|
"step": 365
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"grad_norm": 3.26739769209467,
|
|
"learning_rate": 2.335387833862347e-06,
|
|
"loss": 0.4108,
|
|
"step": 366
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"grad_norm": 3.662589948887738,
|
|
"learning_rate": 2.311996743920278e-06,
|
|
"loss": 0.4346,
|
|
"step": 367
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"grad_norm": 2.6323468533508096,
|
|
"learning_rate": 2.288682821595172e-06,
|
|
"loss": 0.3816,
|
|
"step": 368
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"grad_norm": 3.1285106520623405,
|
|
"learning_rate": 2.265446889134351e-06,
|
|
"loss": 0.4002,
|
|
"step": 369
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"grad_norm": 3.2486605102290156,
|
|
"learning_rate": 2.242289766034559e-06,
|
|
"loss": 0.5269,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"grad_norm": 3.3462066748104173,
|
|
"learning_rate": 2.219212269013046e-06,
|
|
"loss": 0.4584,
|
|
"step": 371
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"grad_norm": 3.0531594235247574,
|
|
"learning_rate": 2.1962152119787674e-06,
|
|
"loss": 0.3964,
|
|
"step": 372
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"grad_norm": 3.196922420577308,
|
|
"learning_rate": 2.1732994060036877e-06,
|
|
"loss": 0.4305,
|
|
"step": 373
|
|
},
|
|
{
|
|
"epoch": 0.68,
|
|
"grad_norm": 2.8872499706194055,
|
|
"learning_rate": 2.150465659294158e-06,
|
|
"loss": 0.3821,
|
|
"step": 374
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"grad_norm": 2.8234516482363605,
|
|
"learning_rate": 2.12771477716243e-06,
|
|
"loss": 0.4064,
|
|
"step": 375
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"grad_norm": 3.167139950218957,
|
|
"learning_rate": 2.1050475619982366e-06,
|
|
"loss": 0.4018,
|
|
"step": 376
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"grad_norm": 3.183453329554161,
|
|
"learning_rate": 2.08246481324051e-06,
|
|
"loss": 0.3662,
|
|
"step": 377
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"grad_norm": 2.8281387047214435,
|
|
"learning_rate": 2.0599673273491723e-06,
|
|
"loss": 0.4003,
|
|
"step": 378
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"grad_norm": 2.9532294199870766,
|
|
"learning_rate": 2.037555897777051e-06,
|
|
"loss": 0.4884,
|
|
"step": 379
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"grad_norm": 2.9934462366999823,
|
|
"learning_rate": 2.015231314941899e-06,
|
|
"loss": 0.4409,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"grad_norm": 2.917496558912529,
|
|
"learning_rate": 1.99299436619851e-06,
|
|
"loss": 0.3991,
|
|
"step": 381
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"grad_norm": 3.006595992084315,
|
|
"learning_rate": 1.9708458358109593e-06,
|
|
"loss": 0.4115,
|
|
"step": 382
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"grad_norm": 3.4004768944041337,
|
|
"learning_rate": 1.948786504924933e-06,
|
|
"loss": 0.4011,
|
|
"step": 383
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"grad_norm": 2.9725302984373316,
|
|
"learning_rate": 1.926817151540186e-06,
|
|
"loss": 0.4174,
|
|
"step": 384
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"grad_norm": 3.0284221194882566,
|
|
"learning_rate": 1.904938550483098e-06,
|
|
"loss": 0.4066,
|
|
"step": 385
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"grad_norm": 3.1349016666002694,
|
|
"learning_rate": 1.8831514733793563e-06,
|
|
"loss": 0.3916,
|
|
"step": 386
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"grad_norm": 3.132488173215795,
|
|
"learning_rate": 1.8614566886267255e-06,
|
|
"loss": 0.4547,
|
|
"step": 387
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"grad_norm": 2.7119681959559214,
|
|
"learning_rate": 1.8398549613679623e-06,
|
|
"loss": 0.3396,
|
|
"step": 388
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"grad_norm": 2.8303277598141703,
|
|
"learning_rate": 1.8183470534638254e-06,
|
|
"loss": 0.4438,
|
|
"step": 389
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"grad_norm": 2.8328013937254566,
|
|
"learning_rate": 1.7969337234661995e-06,
|
|
"loss": 0.3821,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"grad_norm": 3.29786655494721,
|
|
"learning_rate": 1.7756157265913494e-06,
|
|
"loss": 0.4762,
|
|
"step": 391
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"grad_norm": 2.999661857849317,
|
|
"learning_rate": 1.7543938146932824e-06,
|
|
"loss": 0.3592,
|
|
"step": 392
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"grad_norm": 3.1307693218977164,
|
|
"learning_rate": 1.733268736237234e-06,
|
|
"loss": 0.3632,
|
|
"step": 393
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"grad_norm": 2.4716130192821564,
|
|
"learning_rate": 1.712241236273265e-06,
|
|
"loss": 0.3557,
|
|
"step": 394
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"grad_norm": 2.6893743350162573,
|
|
"learning_rate": 1.6913120564099922e-06,
|
|
"loss": 0.3903,
|
|
"step": 395
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"grad_norm": 3.2908137090131415,
|
|
"learning_rate": 1.6704819347884244e-06,
|
|
"loss": 0.4379,
|
|
"step": 396
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"grad_norm": 2.725819167216134,
|
|
"learning_rate": 1.6497516060559343e-06,
|
|
"loss": 0.4022,
|
|
"step": 397
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"grad_norm": 2.7714438772597383,
|
|
"learning_rate": 1.6291218013403526e-06,
|
|
"loss": 0.3573,
|
|
"step": 398
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"grad_norm": 3.0262420212778696,
|
|
"learning_rate": 1.6085932482241717e-06,
|
|
"loss": 0.5107,
|
|
"step": 399
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"grad_norm": 2.9862803965767433,
|
|
"learning_rate": 1.588166670718896e-06,
|
|
"loss": 0.4796,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"grad_norm": 3.4630735035037166,
|
|
"learning_rate": 1.5678427892394964e-06,
|
|
"loss": 0.3796,
|
|
"step": 401
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"grad_norm": 2.9904593886016397,
|
|
"learning_rate": 1.5476223205790144e-06,
|
|
"loss": 0.4201,
|
|
"step": 402
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"grad_norm": 3.3388765162679452,
|
|
"learning_rate": 1.5275059778832698e-06,
|
|
"loss": 0.426,
|
|
"step": 403
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"grad_norm": 2.937992115817354,
|
|
"learning_rate": 1.507494470625716e-06,
|
|
"loss": 0.3927,
|
|
"step": 404
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"grad_norm": 3.059511112127061,
|
|
"learning_rate": 1.4875885045824196e-06,
|
|
"loss": 0.3823,
|
|
"step": 405
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"grad_norm": 3.0822718123934636,
|
|
"learning_rate": 1.467788781807161e-06,
|
|
"loss": 0.4194,
|
|
"step": 406
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"grad_norm": 2.7313604382295664,
|
|
"learning_rate": 1.4480960006066834e-06,
|
|
"loss": 0.3759,
|
|
"step": 407
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"grad_norm": 3.095089947510629,
|
|
"learning_rate": 1.4285108555160574e-06,
|
|
"loss": 0.4467,
|
|
"step": 408
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"grad_norm": 3.1156949297886203,
|
|
"learning_rate": 1.4090340372741876e-06,
|
|
"loss": 0.3484,
|
|
"step": 409
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"grad_norm": 2.8122154313370986,
|
|
"learning_rate": 1.3896662327994518e-06,
|
|
"loss": 0.3543,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"grad_norm": 2.976204787430994,
|
|
"learning_rate": 1.3704081251654789e-06,
|
|
"loss": 0.4072,
|
|
"step": 411
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"grad_norm": 2.5302263684508137,
|
|
"learning_rate": 1.3512603935770464e-06,
|
|
"loss": 0.3935,
|
|
"step": 412
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"grad_norm": 2.9419615139301327,
|
|
"learning_rate": 1.3322237133461411e-06,
|
|
"loss": 0.3763,
|
|
"step": 413
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"grad_norm": 2.647899170576283,
|
|
"learning_rate": 1.313298755868127e-06,
|
|
"loss": 0.3247,
|
|
"step": 414
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"grad_norm": 3.044665135509862,
|
|
"learning_rate": 1.2944861885980753e-06,
|
|
"loss": 0.4252,
|
|
"step": 415
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"grad_norm": 2.975619341499161,
|
|
"learning_rate": 1.275786675027224e-06,
|
|
"loss": 0.4269,
|
|
"step": 416
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"grad_norm": 2.551840319130771,
|
|
"learning_rate": 1.2572008746595716e-06,
|
|
"loss": 0.3021,
|
|
"step": 417
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"grad_norm": 2.962920800863855,
|
|
"learning_rate": 1.2387294429886257e-06,
|
|
"loss": 0.4714,
|
|
"step": 418
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"grad_norm": 2.6917240524435857,
|
|
"learning_rate": 1.220373031474275e-06,
|
|
"loss": 0.383,
|
|
"step": 419
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"grad_norm": 2.7580992111265004,
|
|
"learning_rate": 1.2021322875198237e-06,
|
|
"loss": 0.3424,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"grad_norm": 2.970888385582573,
|
|
"learning_rate": 1.184007854449151e-06,
|
|
"loss": 0.4731,
|
|
"step": 421
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"grad_norm": 3.2819420638040726,
|
|
"learning_rate": 1.1660003714840218e-06,
|
|
"loss": 0.4693,
|
|
"step": 422
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"grad_norm": 3.156537693087468,
|
|
"learning_rate": 1.148110473721551e-06,
|
|
"loss": 0.4721,
|
|
"step": 423
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"grad_norm": 3.043134342290459,
|
|
"learning_rate": 1.1303387921117926e-06,
|
|
"loss": 0.3895,
|
|
"step": 424
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"grad_norm": 3.3434194206765,
|
|
"learning_rate": 1.1126859534354973e-06,
|
|
"loss": 0.4606,
|
|
"step": 425
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"grad_norm": 3.060501840140978,
|
|
"learning_rate": 1.0951525802819983e-06,
|
|
"loss": 0.3245,
|
|
"step": 426
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"grad_norm": 3.4296992413591174,
|
|
"learning_rate": 1.077739291027264e-06,
|
|
"loss": 0.4347,
|
|
"step": 427
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"grad_norm": 3.245882575892385,
|
|
"learning_rate": 1.0604466998120733e-06,
|
|
"loss": 0.4445,
|
|
"step": 428
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"grad_norm": 3.2605077517669003,
|
|
"learning_rate": 1.0432754165203728e-06,
|
|
"loss": 0.4296,
|
|
"step": 429
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"grad_norm": 3.117314557255133,
|
|
"learning_rate": 1.0262260467577574e-06,
|
|
"loss": 0.4592,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"grad_norm": 2.821109962537146,
|
|
"learning_rate": 1.0092991918301104e-06,
|
|
"loss": 0.412,
|
|
"step": 431
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"grad_norm": 2.8910453989379885,
|
|
"learning_rate": 9.92495448722404e-07,
|
|
"loss": 0.3691,
|
|
"step": 432
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"grad_norm": 3.0574940089719234,
|
|
"learning_rate": 9.758154100776357e-07,
|
|
"loss": 0.3599,
|
|
"step": 433
|
|
},
|
|
{
|
|
"epoch": 0.79,
|
|
"grad_norm": 2.7630835696696026,
|
|
"learning_rate": 9.592596641759322e-07,
|
|
"loss": 0.335,
|
|
"step": 434
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"grad_norm": 2.9688753141992783,
|
|
"learning_rate": 9.428287949137998e-07,
|
|
"loss": 0.3502,
|
|
"step": 435
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"grad_norm": 3.197411406246126,
|
|
"learning_rate": 9.265233817835344e-07,
|
|
"loss": 0.4235,
|
|
"step": 436
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"grad_norm": 3.183872705451458,
|
|
"learning_rate": 9.103439998527761e-07,
|
|
"loss": 0.3767,
|
|
"step": 437
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"grad_norm": 3.242407462026631,
|
|
"learning_rate": 8.942912197442375e-07,
|
|
"loss": 0.4239,
|
|
"step": 438
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"grad_norm": 2.8467781862273687,
|
|
"learning_rate": 8.783656076155699e-07,
|
|
"loss": 0.392,
|
|
"step": 439
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"grad_norm": 2.7753863937470027,
|
|
"learning_rate": 8.625677251393986e-07,
|
|
"loss": 0.3126,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"grad_norm": 2.8590330067588936,
|
|
"learning_rate": 8.468981294835185e-07,
|
|
"loss": 0.269,
|
|
"step": 441
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"grad_norm": 3.1980602206752184,
|
|
"learning_rate": 8.313573732912338e-07,
|
|
"loss": 0.4088,
|
|
"step": 442
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"grad_norm": 3.3293284084094847,
|
|
"learning_rate": 8.159460046618789e-07,
|
|
"loss": 0.4432,
|
|
"step": 443
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"grad_norm": 3.3126149003996104,
|
|
"learning_rate": 8.006645671314755e-07,
|
|
"loss": 0.3833,
|
|
"step": 444
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"grad_norm": 2.8492721415814466,
|
|
"learning_rate": 7.855135996535744e-07,
|
|
"loss": 0.4496,
|
|
"step": 445
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"grad_norm": 3.1104827586982693,
|
|
"learning_rate": 7.704936365802366e-07,
|
|
"loss": 0.4153,
|
|
"step": 446
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"grad_norm": 2.619502946645031,
|
|
"learning_rate": 7.556052076431957e-07,
|
|
"loss": 0.3402,
|
|
"step": 447
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"grad_norm": 2.8960736573805583,
|
|
"learning_rate": 7.408488379351735e-07,
|
|
"loss": 0.3809,
|
|
"step": 448
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"grad_norm": 2.986307049514654,
|
|
"learning_rate": 7.262250478913554e-07,
|
|
"loss": 0.3985,
|
|
"step": 449
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"grad_norm": 3.214856997333643,
|
|
"learning_rate": 7.117343532710441e-07,
|
|
"loss": 0.4535,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"grad_norm": 3.058663778911059,
|
|
"learning_rate": 6.973772651394613e-07,
|
|
"loss": 0.474,
|
|
"step": 451
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"grad_norm": 3.0080864248694135,
|
|
"learning_rate": 6.831542898497285e-07,
|
|
"loss": 0.314,
|
|
"step": 452
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"grad_norm": 3.004700420192997,
|
|
"learning_rate": 6.690659290250051e-07,
|
|
"loss": 0.4162,
|
|
"step": 453
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"grad_norm": 2.7926241141953305,
|
|
"learning_rate": 6.551126795408014e-07,
|
|
"loss": 0.3009,
|
|
"step": 454
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"grad_norm": 2.6002880262382875,
|
|
"learning_rate": 6.412950335074515e-07,
|
|
"loss": 0.3633,
|
|
"step": 455
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"grad_norm": 2.7541627826598467,
|
|
"learning_rate": 6.276134782527553e-07,
|
|
"loss": 0.2815,
|
|
"step": 456
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"grad_norm": 3.0215352798139787,
|
|
"learning_rate": 6.140684963047972e-07,
|
|
"loss": 0.361,
|
|
"step": 457
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"grad_norm": 2.8851701623761272,
|
|
"learning_rate": 6.006605653749196e-07,
|
|
"loss": 0.3771,
|
|
"step": 458
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"grad_norm": 2.7814485544944576,
|
|
"learning_rate": 5.873901583408846e-07,
|
|
"loss": 0.373,
|
|
"step": 459
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"grad_norm": 2.829881754986226,
|
|
"learning_rate": 5.742577432301871e-07,
|
|
"loss": 0.3414,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"grad_norm": 2.754012734303014,
|
|
"learning_rate": 5.612637832035564e-07,
|
|
"loss": 0.3234,
|
|
"step": 461
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"grad_norm": 3.2770514536057993,
|
|
"learning_rate": 5.484087365386126e-07,
|
|
"loss": 0.4243,
|
|
"step": 462
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"grad_norm": 2.6950154585629624,
|
|
"learning_rate": 5.356930566137129e-07,
|
|
"loss": 0.3947,
|
|
"step": 463
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"grad_norm": 3.0138926360925824,
|
|
"learning_rate": 5.231171918919542e-07,
|
|
"loss": 0.3841,
|
|
"step": 464
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"grad_norm": 2.9014173051270853,
|
|
"learning_rate": 5.106815859053588e-07,
|
|
"loss": 0.3783,
|
|
"step": 465
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"grad_norm": 2.8925547331882657,
|
|
"learning_rate": 4.983866772392352e-07,
|
|
"loss": 0.3879,
|
|
"step": 466
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"grad_norm": 2.64765588352481,
|
|
"learning_rate": 4.862328995167042e-07,
|
|
"loss": 0.3364,
|
|
"step": 467
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"grad_norm": 3.17963184339672,
|
|
"learning_rate": 4.742206813834104e-07,
|
|
"loss": 0.38,
|
|
"step": 468
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"grad_norm": 2.9727782607561397,
|
|
"learning_rate": 4.6235044649240046e-07,
|
|
"loss": 0.3204,
|
|
"step": 469
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"grad_norm": 2.860597490385895,
|
|
"learning_rate": 4.5062261348918556e-07,
|
|
"loss": 0.3613,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"grad_norm": 3.1360278861815423,
|
|
"learning_rate": 4.390375959969729e-07,
|
|
"loss": 0.3872,
|
|
"step": 471
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"grad_norm": 2.8277184137987814,
|
|
"learning_rate": 4.275958026020788e-07,
|
|
"loss": 0.3867,
|
|
"step": 472
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"grad_norm": 2.9604512713036115,
|
|
"learning_rate": 4.1629763683952065e-07,
|
|
"loss": 0.3856,
|
|
"step": 473
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"grad_norm": 2.8774799329952523,
|
|
"learning_rate": 4.0514349717878136e-07,
|
|
"loss": 0.3387,
|
|
"step": 474
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"grad_norm": 2.907118120588671,
|
|
"learning_rate": 3.9413377700976003e-07,
|
|
"loss": 0.3169,
|
|
"step": 475
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"grad_norm": 3.159755506707381,
|
|
"learning_rate": 3.8326886462889277e-07,
|
|
"loss": 0.3683,
|
|
"step": 476
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"grad_norm": 2.8239324438698925,
|
|
"learning_rate": 3.725491432254623e-07,
|
|
"loss": 0.3662,
|
|
"step": 477
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"grad_norm": 3.0011876106504274,
|
|
"learning_rate": 3.6197499086807907e-07,
|
|
"loss": 0.3685,
|
|
"step": 478
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"grad_norm": 2.747487952670731,
|
|
"learning_rate": 3.5154678049135525e-07,
|
|
"loss": 0.4044,
|
|
"step": 479
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"grad_norm": 3.0759679312715877,
|
|
"learning_rate": 3.4126487988274083e-07,
|
|
"loss": 0.4869,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"grad_norm": 3.0657695098664917,
|
|
"learning_rate": 3.311296516695615e-07,
|
|
"loss": 0.4258,
|
|
"step": 481
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"grad_norm": 2.9997455994102906,
|
|
"learning_rate": 3.211414533062274e-07,
|
|
"loss": 0.4122,
|
|
"step": 482
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"grad_norm": 3.5661882671216985,
|
|
"learning_rate": 3.113006370616195e-07,
|
|
"loss": 0.4152,
|
|
"step": 483
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"grad_norm": 2.7447170280640925,
|
|
"learning_rate": 3.016075500066758e-07,
|
|
"loss": 0.4015,
|
|
"step": 484
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"grad_norm": 3.07045536332393,
|
|
"learning_rate": 2.92062534002143e-07,
|
|
"loss": 0.4326,
|
|
"step": 485
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"grad_norm": 2.940038374343366,
|
|
"learning_rate": 2.8266592568652424e-07,
|
|
"loss": 0.318,
|
|
"step": 486
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"grad_norm": 3.323235132954793,
|
|
"learning_rate": 2.7341805646420287e-07,
|
|
"loss": 0.4058,
|
|
"step": 487
|
|
},
|
|
{
|
|
"epoch": 0.89,
|
|
"grad_norm": 2.9754827521512177,
|
|
"learning_rate": 2.643192524937571e-07,
|
|
"loss": 0.3209,
|
|
"step": 488
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"grad_norm": 3.2068648085808715,
|
|
"learning_rate": 2.5536983467645446e-07,
|
|
"loss": 0.39,
|
|
"step": 489
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"grad_norm": 2.8252447688631133,
|
|
"learning_rate": 2.46570118644936e-07,
|
|
"loss": 0.3363,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"grad_norm": 2.9012019076879185,
|
|
"learning_rate": 2.379204147520841e-07,
|
|
"loss": 0.5429,
|
|
"step": 491
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"grad_norm": 2.8439125833594776,
|
|
"learning_rate": 2.2942102806007475e-07,
|
|
"loss": 0.4359,
|
|
"step": 492
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"grad_norm": 3.078530295907687,
|
|
"learning_rate": 2.2107225832962276e-07,
|
|
"loss": 0.3237,
|
|
"step": 493
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"grad_norm": 3.0952379773719674,
|
|
"learning_rate": 2.12874400009405e-07,
|
|
"loss": 0.3668,
|
|
"step": 494
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"grad_norm": 2.9175877638152294,
|
|
"learning_rate": 2.0482774222567647e-07,
|
|
"loss": 0.3614,
|
|
"step": 495
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"grad_norm": 3.1124844369657136,
|
|
"learning_rate": 1.969325687720782e-07,
|
|
"loss": 0.4416,
|
|
"step": 496
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"grad_norm": 2.93419070603059,
|
|
"learning_rate": 1.8918915809962e-07,
|
|
"loss": 0.3761,
|
|
"step": 497
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"grad_norm": 3.0850086370269834,
|
|
"learning_rate": 1.815977833068686e-07,
|
|
"loss": 0.3638,
|
|
"step": 498
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"grad_norm": 3.3126934567624087,
|
|
"learning_rate": 1.7415871213030798e-07,
|
|
"loss": 0.4387,
|
|
"step": 499
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"grad_norm": 2.9704128877807734,
|
|
"learning_rate": 1.668722069349039e-07,
|
|
"loss": 0.3203,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"grad_norm": 3.3148322275840285,
|
|
"learning_rate": 1.5973852470484229e-07,
|
|
"loss": 0.3694,
|
|
"step": 501
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"grad_norm": 2.910881480766959,
|
|
"learning_rate": 1.52757917034476e-07,
|
|
"loss": 0.3436,
|
|
"step": 502
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"grad_norm": 2.6701541737867385,
|
|
"learning_rate": 1.459306301194419e-07,
|
|
"loss": 0.3975,
|
|
"step": 503
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"grad_norm": 2.8804003541719414,
|
|
"learning_rate": 1.3925690474798585e-07,
|
|
"loss": 0.4631,
|
|
"step": 504
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"grad_norm": 2.83394184085523,
|
|
"learning_rate": 1.3273697629246245e-07,
|
|
"loss": 0.3488,
|
|
"step": 505
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"grad_norm": 3.291213004132381,
|
|
"learning_rate": 1.263710747010412e-07,
|
|
"loss": 0.4086,
|
|
"step": 506
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"grad_norm": 3.323992744920309,
|
|
"learning_rate": 1.20159424489594e-07,
|
|
"loss": 0.4162,
|
|
"step": 507
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"grad_norm": 2.7671784809336475,
|
|
"learning_rate": 1.1410224473377245e-07,
|
|
"loss": 0.3632,
|
|
"step": 508
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"grad_norm": 3.219143272064861,
|
|
"learning_rate": 1.081997490612896e-07,
|
|
"loss": 0.3256,
|
|
"step": 509
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"grad_norm": 3.25109764240598,
|
|
"learning_rate": 1.0245214564437738e-07,
|
|
"loss": 0.3837,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"grad_norm": 3.1019795157757724,
|
|
"learning_rate": 9.685963719245155e-08,
|
|
"loss": 0.3457,
|
|
"step": 511
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"grad_norm": 2.806320842849082,
|
|
"learning_rate": 9.14224209449569e-08,
|
|
"loss": 0.3727,
|
|
"step": 512
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"grad_norm": 3.3366050941861323,
|
|
"learning_rate": 8.614068866441627e-08,
|
|
"loss": 0.4336,
|
|
"step": 513
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"grad_norm": 3.0222670147132176,
|
|
"learning_rate": 8.101462662966152e-08,
|
|
"loss": 0.3439,
|
|
"step": 514
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"grad_norm": 3.1926310699535523,
|
|
"learning_rate": 7.604441562926828e-08,
|
|
"loss": 0.4314,
|
|
"step": 515
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"grad_norm": 3.017804262807194,
|
|
"learning_rate": 7.12302309551785e-08,
|
|
"loss": 0.3838,
|
|
"step": 516
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"grad_norm": 2.878278497678078,
|
|
"learning_rate": 6.657224239651694e-08,
|
|
"loss": 0.3179,
|
|
"step": 517
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"grad_norm": 2.941702290572878,
|
|
"learning_rate": 6.207061423360494e-08,
|
|
"loss": 0.3798,
|
|
"step": 518
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"grad_norm": 3.0988677959358335,
|
|
"learning_rate": 5.772550523216458e-08,
|
|
"loss": 0.3075,
|
|
"step": 519
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"grad_norm": 3.3560092980721836,
|
|
"learning_rate": 5.353706863772012e-08,
|
|
"loss": 0.4263,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"grad_norm": 3.1956366083471903,
|
|
"learning_rate": 4.950545217019437e-08,
|
|
"loss": 0.4319,
|
|
"step": 521
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"grad_norm": 3.0159486475297634,
|
|
"learning_rate": 4.5630798018697295e-08,
|
|
"loss": 0.4031,
|
|
"step": 522
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"grad_norm": 3.297944272171461,
|
|
"learning_rate": 4.19132428365116e-08,
|
|
"loss": 0.3586,
|
|
"step": 523
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"grad_norm": 3.1274412386954915,
|
|
"learning_rate": 3.835291773627253e-08,
|
|
"loss": 0.4114,
|
|
"step": 524
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"grad_norm": 3.0121919305698297,
|
|
"learning_rate": 3.494994828534659e-08,
|
|
"loss": 0.3502,
|
|
"step": 525
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"grad_norm": 3.445856414643335,
|
|
"learning_rate": 3.1704454501397086e-08,
|
|
"loss": 0.3895,
|
|
"step": 526
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"grad_norm": 3.0074455364542936,
|
|
"learning_rate": 2.8616550848159507e-08,
|
|
"loss": 0.3388,
|
|
"step": 527
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"grad_norm": 3.054543501588241,
|
|
"learning_rate": 2.5686346231396263e-08,
|
|
"loss": 0.4015,
|
|
"step": 528
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"grad_norm": 2.600340393809179,
|
|
"learning_rate": 2.291394399506275e-08,
|
|
"loss": 0.2537,
|
|
"step": 529
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"grad_norm": 3.027308647749434,
|
|
"learning_rate": 2.029944191765626e-08,
|
|
"loss": 0.3464,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"grad_norm": 3.230365902006891,
|
|
"learning_rate": 1.7842932208773254e-08,
|
|
"loss": 0.4738,
|
|
"step": 531
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"grad_norm": 3.3324796604669946,
|
|
"learning_rate": 1.554450150585296e-08,
|
|
"loss": 0.3936,
|
|
"step": 532
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"grad_norm": 2.9961740543863775,
|
|
"learning_rate": 1.34042308711243e-08,
|
|
"loss": 0.4011,
|
|
"step": 533
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"grad_norm": 2.9383255928842975,
|
|
"learning_rate": 1.1422195788745204e-08,
|
|
"loss": 0.392,
|
|
"step": 534
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"grad_norm": 3.133800572378904,
|
|
"learning_rate": 9.598466162142217e-09,
|
|
"loss": 0.4284,
|
|
"step": 535
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"grad_norm": 2.8351204684200018,
|
|
"learning_rate": 7.93310631154398e-09,
|
|
"loss": 0.303,
|
|
"step": 536
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"grad_norm": 3.2635637853252866,
|
|
"learning_rate": 6.426174971712051e-09,
|
|
"loss": 0.4375,
|
|
"step": 537
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"grad_norm": 3.269014882416967,
|
|
"learning_rate": 5.077725289871549e-09,
|
|
"loss": 0.454,
|
|
"step": 538
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"grad_norm": 2.5907194457239044,
|
|
"learning_rate": 3.887804823835162e-09,
|
|
"loss": 0.2889,
|
|
"step": 539
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"grad_norm": 2.8481580051950464,
|
|
"learning_rate": 2.8564555403254867e-09,
|
|
"loss": 0.3428,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"eval_accuracy": 0.8420877296434167,
|
|
"eval_accuracy_<|content|>": 1.0,
|
|
"eval_accuracy_<|from|>": 0.9753138075313807,
|
|
"eval_accuracy_<|recipient|>": 1.0,
|
|
"eval_accuracy_<|stop|>": 0.9352915673097767,
|
|
"eval_accuracy_total_num_<|content|>": 11653,
|
|
"eval_accuracy_total_num_<|from|>": 2390,
|
|
"eval_accuracy_total_num_<|recipient|>": 2390,
|
|
"eval_accuracy_total_num_<|stop|>": 9226,
|
|
"eval_loss": 0.5226789712905884,
|
|
"eval_perplexity": 1.218962280428345,
|
|
"eval_runtime": 143.8622,
|
|
"eval_samples_per_second": 4.991,
|
|
"eval_steps_per_second": 0.16,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"grad_norm": 2.6212472705225,
|
|
"learning_rate": 1.983713813496213e-09,
|
|
"loss": 0.3164,
|
|
"step": 541
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"grad_norm": 3.275268183412255,
|
|
"learning_rate": 1.2696104236486483e-09,
|
|
"loss": 0.3929,
|
|
"step": 542
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"grad_norm": 2.90413116939948,
|
|
"learning_rate": 7.141705561460876e-10,
|
|
"loss": 0.4164,
|
|
"step": 543
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"grad_norm": 3.0667169569157484,
|
|
"learning_rate": 3.1741380052552337e-10,
|
|
"loss": 0.3806,
|
|
"step": 544
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"grad_norm": 2.962612357034888,
|
|
"learning_rate": 7.935414980719813e-11,
|
|
"loss": 0.3653,
|
|
"step": 545
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"grad_norm": 2.921197573056188,
|
|
"learning_rate": 0.0,
|
|
"loss": 0.35,
|
|
"step": 546
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"step": 546,
|
|
"total_flos": 114321292001280.0,
|
|
"train_loss": 0.4735211233297984,
|
|
"train_runtime": 7869.2834,
|
|
"train_samples_per_second": 1.11,
|
|
"train_steps_per_second": 0.069
|
|
}
|
|
],
|
|
"logging_steps": 1.0,
|
|
"max_steps": 546,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 1,
|
|
"save_steps": 200.0,
|
|
"total_flos": 114321292001280.0,
|
|
"train_batch_size": 4,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|