1322 lines
27 KiB
JSON
1322 lines
27 KiB
JSON
{
|
|
"best_metric": null,
|
|
"best_model_checkpoint": null,
|
|
"epoch": 1.9668508287292816,
|
|
"eval_steps": 45,
|
|
"global_step": 180,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.01,
|
|
"grad_norm": 10.25,
|
|
"learning_rate": 2.0000000000000002e-07,
|
|
"loss": 1.382,
|
|
"step": 1
|
|
},
|
|
{
|
|
"epoch": 0.01,
|
|
"eval_loss": 1.4056271314620972,
|
|
"eval_runtime": 26.9519,
|
|
"eval_samples_per_second": 96.023,
|
|
"eval_steps_per_second": 96.023,
|
|
"step": 1
|
|
},
|
|
{
|
|
"epoch": 0.02,
|
|
"grad_norm": 10.625,
|
|
"learning_rate": 4.0000000000000003e-07,
|
|
"loss": 1.3855,
|
|
"step": 2
|
|
},
|
|
{
|
|
"epoch": 0.03,
|
|
"grad_norm": 10.625,
|
|
"learning_rate": 6.000000000000001e-07,
|
|
"loss": 1.4042,
|
|
"step": 3
|
|
},
|
|
{
|
|
"epoch": 0.04,
|
|
"grad_norm": 10.25,
|
|
"learning_rate": 8.000000000000001e-07,
|
|
"loss": 1.4277,
|
|
"step": 4
|
|
},
|
|
{
|
|
"epoch": 0.06,
|
|
"grad_norm": 10.375,
|
|
"learning_rate": 1.0000000000000002e-06,
|
|
"loss": 1.3786,
|
|
"step": 5
|
|
},
|
|
{
|
|
"epoch": 0.07,
|
|
"grad_norm": 9.625,
|
|
"learning_rate": 1.2000000000000002e-06,
|
|
"loss": 1.3671,
|
|
"step": 6
|
|
},
|
|
{
|
|
"epoch": 0.08,
|
|
"grad_norm": 8.8125,
|
|
"learning_rate": 1.4000000000000001e-06,
|
|
"loss": 1.3566,
|
|
"step": 7
|
|
},
|
|
{
|
|
"epoch": 0.09,
|
|
"grad_norm": 8.5625,
|
|
"learning_rate": 1.6000000000000001e-06,
|
|
"loss": 1.3815,
|
|
"step": 8
|
|
},
|
|
{
|
|
"epoch": 0.1,
|
|
"grad_norm": 7.46875,
|
|
"learning_rate": 1.8000000000000001e-06,
|
|
"loss": 1.3631,
|
|
"step": 9
|
|
},
|
|
{
|
|
"epoch": 0.11,
|
|
"grad_norm": 7.28125,
|
|
"learning_rate": 2.0000000000000003e-06,
|
|
"loss": 1.358,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.12,
|
|
"grad_norm": 6.53125,
|
|
"learning_rate": 2.2e-06,
|
|
"loss": 1.3416,
|
|
"step": 11
|
|
},
|
|
{
|
|
"epoch": 0.13,
|
|
"grad_norm": 5.46875,
|
|
"learning_rate": 2.4000000000000003e-06,
|
|
"loss": 1.3489,
|
|
"step": 12
|
|
},
|
|
{
|
|
"epoch": 0.14,
|
|
"grad_norm": 4.90625,
|
|
"learning_rate": 2.6e-06,
|
|
"loss": 1.298,
|
|
"step": 13
|
|
},
|
|
{
|
|
"epoch": 0.15,
|
|
"grad_norm": 4.75,
|
|
"learning_rate": 2.8000000000000003e-06,
|
|
"loss": 1.2632,
|
|
"step": 14
|
|
},
|
|
{
|
|
"epoch": 0.17,
|
|
"grad_norm": 4.1875,
|
|
"learning_rate": 3e-06,
|
|
"loss": 1.2835,
|
|
"step": 15
|
|
},
|
|
{
|
|
"epoch": 0.18,
|
|
"grad_norm": 3.671875,
|
|
"learning_rate": 3.2000000000000003e-06,
|
|
"loss": 1.2846,
|
|
"step": 16
|
|
},
|
|
{
|
|
"epoch": 0.19,
|
|
"grad_norm": 3.078125,
|
|
"learning_rate": 3.4000000000000005e-06,
|
|
"loss": 1.2776,
|
|
"step": 17
|
|
},
|
|
{
|
|
"epoch": 0.2,
|
|
"grad_norm": 3.546875,
|
|
"learning_rate": 3.6000000000000003e-06,
|
|
"loss": 1.259,
|
|
"step": 18
|
|
},
|
|
{
|
|
"epoch": 0.21,
|
|
"grad_norm": 3.71875,
|
|
"learning_rate": 3.8000000000000005e-06,
|
|
"loss": 1.235,
|
|
"step": 19
|
|
},
|
|
{
|
|
"epoch": 0.22,
|
|
"grad_norm": 4.21875,
|
|
"learning_rate": 4.000000000000001e-06,
|
|
"loss": 1.2551,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.23,
|
|
"grad_norm": 2.90625,
|
|
"learning_rate": 4.2000000000000004e-06,
|
|
"loss": 1.2587,
|
|
"step": 21
|
|
},
|
|
{
|
|
"epoch": 0.24,
|
|
"grad_norm": 2.6875,
|
|
"learning_rate": 4.4e-06,
|
|
"loss": 1.2888,
|
|
"step": 22
|
|
},
|
|
{
|
|
"epoch": 0.25,
|
|
"grad_norm": 3.28125,
|
|
"learning_rate": 4.600000000000001e-06,
|
|
"loss": 1.1968,
|
|
"step": 23
|
|
},
|
|
{
|
|
"epoch": 0.27,
|
|
"grad_norm": 2.875,
|
|
"learning_rate": 4.800000000000001e-06,
|
|
"loss": 1.2625,
|
|
"step": 24
|
|
},
|
|
{
|
|
"epoch": 0.28,
|
|
"grad_norm": 2.40625,
|
|
"learning_rate": 5e-06,
|
|
"loss": 1.1868,
|
|
"step": 25
|
|
},
|
|
{
|
|
"epoch": 0.29,
|
|
"grad_norm": 2.328125,
|
|
"learning_rate": 5.2e-06,
|
|
"loss": 1.2694,
|
|
"step": 26
|
|
},
|
|
{
|
|
"epoch": 0.3,
|
|
"grad_norm": 2.59375,
|
|
"learning_rate": 5.400000000000001e-06,
|
|
"loss": 1.2459,
|
|
"step": 27
|
|
},
|
|
{
|
|
"epoch": 0.31,
|
|
"grad_norm": 2.328125,
|
|
"learning_rate": 5.600000000000001e-06,
|
|
"loss": 1.2338,
|
|
"step": 28
|
|
},
|
|
{
|
|
"epoch": 0.32,
|
|
"grad_norm": 2.125,
|
|
"learning_rate": 5.8e-06,
|
|
"loss": 1.2317,
|
|
"step": 29
|
|
},
|
|
{
|
|
"epoch": 0.33,
|
|
"grad_norm": 2.171875,
|
|
"learning_rate": 6e-06,
|
|
"loss": 1.2246,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.34,
|
|
"grad_norm": 2.09375,
|
|
"learning_rate": 6.200000000000001e-06,
|
|
"loss": 1.2437,
|
|
"step": 31
|
|
},
|
|
{
|
|
"epoch": 0.35,
|
|
"grad_norm": 2.0625,
|
|
"learning_rate": 6.4000000000000006e-06,
|
|
"loss": 1.2344,
|
|
"step": 32
|
|
},
|
|
{
|
|
"epoch": 0.36,
|
|
"grad_norm": 2.078125,
|
|
"learning_rate": 6.600000000000001e-06,
|
|
"loss": 1.2157,
|
|
"step": 33
|
|
},
|
|
{
|
|
"epoch": 0.38,
|
|
"grad_norm": 2.171875,
|
|
"learning_rate": 6.800000000000001e-06,
|
|
"loss": 1.1672,
|
|
"step": 34
|
|
},
|
|
{
|
|
"epoch": 0.39,
|
|
"grad_norm": 2.03125,
|
|
"learning_rate": 7e-06,
|
|
"loss": 1.1638,
|
|
"step": 35
|
|
},
|
|
{
|
|
"epoch": 0.4,
|
|
"grad_norm": 2.015625,
|
|
"learning_rate": 7.2000000000000005e-06,
|
|
"loss": 1.1659,
|
|
"step": 36
|
|
},
|
|
{
|
|
"epoch": 0.41,
|
|
"grad_norm": 2.265625,
|
|
"learning_rate": 7.4e-06,
|
|
"loss": 1.2027,
|
|
"step": 37
|
|
},
|
|
{
|
|
"epoch": 0.42,
|
|
"grad_norm": 2.125,
|
|
"learning_rate": 7.600000000000001e-06,
|
|
"loss": 1.2126,
|
|
"step": 38
|
|
},
|
|
{
|
|
"epoch": 0.43,
|
|
"grad_norm": 1.921875,
|
|
"learning_rate": 7.800000000000002e-06,
|
|
"loss": 1.1615,
|
|
"step": 39
|
|
},
|
|
{
|
|
"epoch": 0.44,
|
|
"grad_norm": 2.015625,
|
|
"learning_rate": 8.000000000000001e-06,
|
|
"loss": 1.1843,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.45,
|
|
"grad_norm": 2.0,
|
|
"learning_rate": 8.2e-06,
|
|
"loss": 1.2071,
|
|
"step": 41
|
|
},
|
|
{
|
|
"epoch": 0.46,
|
|
"grad_norm": 2.078125,
|
|
"learning_rate": 8.400000000000001e-06,
|
|
"loss": 1.1488,
|
|
"step": 42
|
|
},
|
|
{
|
|
"epoch": 0.48,
|
|
"grad_norm": 2.015625,
|
|
"learning_rate": 8.6e-06,
|
|
"loss": 1.1812,
|
|
"step": 43
|
|
},
|
|
{
|
|
"epoch": 0.49,
|
|
"grad_norm": 1.96875,
|
|
"learning_rate": 8.8e-06,
|
|
"loss": 1.1694,
|
|
"step": 44
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"grad_norm": 2.28125,
|
|
"learning_rate": 9e-06,
|
|
"loss": 1.1762,
|
|
"step": 45
|
|
},
|
|
{
|
|
"epoch": 0.5,
|
|
"eval_loss": 1.1987459659576416,
|
|
"eval_runtime": 27.0775,
|
|
"eval_samples_per_second": 95.578,
|
|
"eval_steps_per_second": 95.578,
|
|
"step": 45
|
|
},
|
|
{
|
|
"epoch": 0.51,
|
|
"grad_norm": 2.015625,
|
|
"learning_rate": 9.200000000000002e-06,
|
|
"loss": 1.1775,
|
|
"step": 46
|
|
},
|
|
{
|
|
"epoch": 0.52,
|
|
"grad_norm": 2.015625,
|
|
"learning_rate": 9.4e-06,
|
|
"loss": 1.1948,
|
|
"step": 47
|
|
},
|
|
{
|
|
"epoch": 0.53,
|
|
"grad_norm": 2.03125,
|
|
"learning_rate": 9.600000000000001e-06,
|
|
"loss": 1.2011,
|
|
"step": 48
|
|
},
|
|
{
|
|
"epoch": 0.54,
|
|
"grad_norm": 1.9375,
|
|
"learning_rate": 9.800000000000001e-06,
|
|
"loss": 1.1742,
|
|
"step": 49
|
|
},
|
|
{
|
|
"epoch": 0.55,
|
|
"grad_norm": 2.1875,
|
|
"learning_rate": 1e-05,
|
|
"loss": 1.1784,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.56,
|
|
"grad_norm": 2.109375,
|
|
"learning_rate": 1.02e-05,
|
|
"loss": 1.1725,
|
|
"step": 51
|
|
},
|
|
{
|
|
"epoch": 0.57,
|
|
"grad_norm": 2.203125,
|
|
"learning_rate": 1.04e-05,
|
|
"loss": 1.1532,
|
|
"step": 52
|
|
},
|
|
{
|
|
"epoch": 0.59,
|
|
"grad_norm": 2.125,
|
|
"learning_rate": 1.0600000000000002e-05,
|
|
"loss": 1.1359,
|
|
"step": 53
|
|
},
|
|
{
|
|
"epoch": 0.6,
|
|
"grad_norm": 1.9765625,
|
|
"learning_rate": 1.0800000000000002e-05,
|
|
"loss": 1.1602,
|
|
"step": 54
|
|
},
|
|
{
|
|
"epoch": 0.61,
|
|
"grad_norm": 2.109375,
|
|
"learning_rate": 1.1000000000000001e-05,
|
|
"loss": 1.1536,
|
|
"step": 55
|
|
},
|
|
{
|
|
"epoch": 0.62,
|
|
"grad_norm": 1.96875,
|
|
"learning_rate": 1.1200000000000001e-05,
|
|
"loss": 1.1599,
|
|
"step": 56
|
|
},
|
|
{
|
|
"epoch": 0.63,
|
|
"grad_norm": 2.21875,
|
|
"learning_rate": 1.14e-05,
|
|
"loss": 1.1479,
|
|
"step": 57
|
|
},
|
|
{
|
|
"epoch": 0.64,
|
|
"grad_norm": 2.25,
|
|
"learning_rate": 1.16e-05,
|
|
"loss": 1.1662,
|
|
"step": 58
|
|
},
|
|
{
|
|
"epoch": 0.65,
|
|
"grad_norm": 1.984375,
|
|
"learning_rate": 1.18e-05,
|
|
"loss": 1.1606,
|
|
"step": 59
|
|
},
|
|
{
|
|
"epoch": 0.66,
|
|
"grad_norm": 2.0625,
|
|
"learning_rate": 1.2e-05,
|
|
"loss": 1.1694,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.67,
|
|
"grad_norm": 2.0625,
|
|
"learning_rate": 1.22e-05,
|
|
"loss": 1.1647,
|
|
"step": 61
|
|
},
|
|
{
|
|
"epoch": 0.69,
|
|
"grad_norm": 2.1875,
|
|
"learning_rate": 1.2400000000000002e-05,
|
|
"loss": 1.1458,
|
|
"step": 62
|
|
},
|
|
{
|
|
"epoch": 0.7,
|
|
"grad_norm": 2.015625,
|
|
"learning_rate": 1.2600000000000001e-05,
|
|
"loss": 1.1522,
|
|
"step": 63
|
|
},
|
|
{
|
|
"epoch": 0.71,
|
|
"grad_norm": 1.859375,
|
|
"learning_rate": 1.2800000000000001e-05,
|
|
"loss": 1.1335,
|
|
"step": 64
|
|
},
|
|
{
|
|
"epoch": 0.72,
|
|
"grad_norm": 2.09375,
|
|
"learning_rate": 1.3000000000000001e-05,
|
|
"loss": 1.1563,
|
|
"step": 65
|
|
},
|
|
{
|
|
"epoch": 0.73,
|
|
"grad_norm": 1.8828125,
|
|
"learning_rate": 1.3200000000000002e-05,
|
|
"loss": 1.1619,
|
|
"step": 66
|
|
},
|
|
{
|
|
"epoch": 0.74,
|
|
"grad_norm": 1.96875,
|
|
"learning_rate": 1.3400000000000002e-05,
|
|
"loss": 1.1745,
|
|
"step": 67
|
|
},
|
|
{
|
|
"epoch": 0.75,
|
|
"grad_norm": 2.03125,
|
|
"learning_rate": 1.3600000000000002e-05,
|
|
"loss": 1.1807,
|
|
"step": 68
|
|
},
|
|
{
|
|
"epoch": 0.76,
|
|
"grad_norm": 1.9921875,
|
|
"learning_rate": 1.38e-05,
|
|
"loss": 1.168,
|
|
"step": 69
|
|
},
|
|
{
|
|
"epoch": 0.77,
|
|
"grad_norm": 2.046875,
|
|
"learning_rate": 1.4e-05,
|
|
"loss": 1.1797,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.78,
|
|
"grad_norm": 2.0,
|
|
"learning_rate": 1.4200000000000001e-05,
|
|
"loss": 1.1521,
|
|
"step": 71
|
|
},
|
|
{
|
|
"epoch": 0.8,
|
|
"grad_norm": 1.9453125,
|
|
"learning_rate": 1.4400000000000001e-05,
|
|
"loss": 1.1588,
|
|
"step": 72
|
|
},
|
|
{
|
|
"epoch": 0.81,
|
|
"grad_norm": 1.9609375,
|
|
"learning_rate": 1.46e-05,
|
|
"loss": 1.1476,
|
|
"step": 73
|
|
},
|
|
{
|
|
"epoch": 0.82,
|
|
"grad_norm": 2.03125,
|
|
"learning_rate": 1.48e-05,
|
|
"loss": 1.1619,
|
|
"step": 74
|
|
},
|
|
{
|
|
"epoch": 0.83,
|
|
"grad_norm": 1.96875,
|
|
"learning_rate": 1.5000000000000002e-05,
|
|
"loss": 1.134,
|
|
"step": 75
|
|
},
|
|
{
|
|
"epoch": 0.84,
|
|
"grad_norm": 2.015625,
|
|
"learning_rate": 1.5200000000000002e-05,
|
|
"loss": 1.1719,
|
|
"step": 76
|
|
},
|
|
{
|
|
"epoch": 0.85,
|
|
"grad_norm": 2.0,
|
|
"learning_rate": 1.54e-05,
|
|
"loss": 1.1329,
|
|
"step": 77
|
|
},
|
|
{
|
|
"epoch": 0.86,
|
|
"grad_norm": 1.9765625,
|
|
"learning_rate": 1.5600000000000003e-05,
|
|
"loss": 1.138,
|
|
"step": 78
|
|
},
|
|
{
|
|
"epoch": 0.87,
|
|
"grad_norm": 1.96875,
|
|
"learning_rate": 1.58e-05,
|
|
"loss": 1.1076,
|
|
"step": 79
|
|
},
|
|
{
|
|
"epoch": 0.88,
|
|
"grad_norm": 2.078125,
|
|
"learning_rate": 1.6000000000000003e-05,
|
|
"loss": 1.1485,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.9,
|
|
"grad_norm": 2.0,
|
|
"learning_rate": 1.62e-05,
|
|
"loss": 1.1164,
|
|
"step": 81
|
|
},
|
|
{
|
|
"epoch": 0.91,
|
|
"grad_norm": 2.0625,
|
|
"learning_rate": 1.64e-05,
|
|
"loss": 1.128,
|
|
"step": 82
|
|
},
|
|
{
|
|
"epoch": 0.92,
|
|
"grad_norm": 2.015625,
|
|
"learning_rate": 1.66e-05,
|
|
"loss": 1.116,
|
|
"step": 83
|
|
},
|
|
{
|
|
"epoch": 0.93,
|
|
"grad_norm": 1.890625,
|
|
"learning_rate": 1.6800000000000002e-05,
|
|
"loss": 1.1424,
|
|
"step": 84
|
|
},
|
|
{
|
|
"epoch": 0.94,
|
|
"grad_norm": 1.9375,
|
|
"learning_rate": 1.7e-05,
|
|
"loss": 1.0941,
|
|
"step": 85
|
|
},
|
|
{
|
|
"epoch": 0.95,
|
|
"grad_norm": 2.15625,
|
|
"learning_rate": 1.72e-05,
|
|
"loss": 1.1392,
|
|
"step": 86
|
|
},
|
|
{
|
|
"epoch": 0.96,
|
|
"grad_norm": 2.046875,
|
|
"learning_rate": 1.7400000000000003e-05,
|
|
"loss": 1.1303,
|
|
"step": 87
|
|
},
|
|
{
|
|
"epoch": 0.97,
|
|
"grad_norm": 2.03125,
|
|
"learning_rate": 1.76e-05,
|
|
"loss": 1.1606,
|
|
"step": 88
|
|
},
|
|
{
|
|
"epoch": 0.98,
|
|
"grad_norm": 1.9609375,
|
|
"learning_rate": 1.7800000000000002e-05,
|
|
"loss": 1.1234,
|
|
"step": 89
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"grad_norm": 2.078125,
|
|
"learning_rate": 1.8e-05,
|
|
"loss": 1.1294,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.99,
|
|
"eval_loss": 1.1493040323257446,
|
|
"eval_runtime": 27.0758,
|
|
"eval_samples_per_second": 95.583,
|
|
"eval_steps_per_second": 95.583,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 1.01,
|
|
"grad_norm": 2.140625,
|
|
"learning_rate": 1.8200000000000002e-05,
|
|
"loss": 1.1352,
|
|
"step": 91
|
|
},
|
|
{
|
|
"epoch": 1.02,
|
|
"grad_norm": 2.078125,
|
|
"learning_rate": 1.8400000000000003e-05,
|
|
"loss": 1.1296,
|
|
"step": 92
|
|
},
|
|
{
|
|
"epoch": 1.01,
|
|
"grad_norm": 1.984375,
|
|
"learning_rate": 1.86e-05,
|
|
"loss": 1.0708,
|
|
"step": 93
|
|
},
|
|
{
|
|
"epoch": 1.02,
|
|
"grad_norm": 2.046875,
|
|
"learning_rate": 1.88e-05,
|
|
"loss": 1.0164,
|
|
"step": 94
|
|
},
|
|
{
|
|
"epoch": 1.03,
|
|
"grad_norm": 1.96875,
|
|
"learning_rate": 1.9e-05,
|
|
"loss": 1.0213,
|
|
"step": 95
|
|
},
|
|
{
|
|
"epoch": 1.04,
|
|
"grad_norm": 2.078125,
|
|
"learning_rate": 1.9200000000000003e-05,
|
|
"loss": 1.0203,
|
|
"step": 96
|
|
},
|
|
{
|
|
"epoch": 1.05,
|
|
"grad_norm": 2.25,
|
|
"learning_rate": 1.94e-05,
|
|
"loss": 1.0297,
|
|
"step": 97
|
|
},
|
|
{
|
|
"epoch": 1.06,
|
|
"grad_norm": 2.171875,
|
|
"learning_rate": 1.9600000000000002e-05,
|
|
"loss": 1.0501,
|
|
"step": 98
|
|
},
|
|
{
|
|
"epoch": 1.07,
|
|
"grad_norm": 2.15625,
|
|
"learning_rate": 1.98e-05,
|
|
"loss": 1.0419,
|
|
"step": 99
|
|
},
|
|
{
|
|
"epoch": 1.08,
|
|
"grad_norm": 1.96875,
|
|
"learning_rate": 2e-05,
|
|
"loss": 1.0666,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 1.09,
|
|
"grad_norm": 2.25,
|
|
"learning_rate": 1.9992290362407232e-05,
|
|
"loss": 1.0147,
|
|
"step": 101
|
|
},
|
|
{
|
|
"epoch": 1.1,
|
|
"grad_norm": 2.078125,
|
|
"learning_rate": 1.9969173337331283e-05,
|
|
"loss": 1.0248,
|
|
"step": 102
|
|
},
|
|
{
|
|
"epoch": 1.12,
|
|
"grad_norm": 1.96875,
|
|
"learning_rate": 1.9930684569549265e-05,
|
|
"loss": 1.0137,
|
|
"step": 103
|
|
},
|
|
{
|
|
"epoch": 1.13,
|
|
"grad_norm": 2.109375,
|
|
"learning_rate": 1.9876883405951378e-05,
|
|
"loss": 1.0331,
|
|
"step": 104
|
|
},
|
|
{
|
|
"epoch": 1.14,
|
|
"grad_norm": 2.203125,
|
|
"learning_rate": 1.9807852804032306e-05,
|
|
"loss": 1.0315,
|
|
"step": 105
|
|
},
|
|
{
|
|
"epoch": 1.15,
|
|
"grad_norm": 2.109375,
|
|
"learning_rate": 1.9723699203976768e-05,
|
|
"loss": 1.0185,
|
|
"step": 106
|
|
},
|
|
{
|
|
"epoch": 1.16,
|
|
"grad_norm": 2.0,
|
|
"learning_rate": 1.9624552364536472e-05,
|
|
"loss": 0.9612,
|
|
"step": 107
|
|
},
|
|
{
|
|
"epoch": 1.17,
|
|
"grad_norm": 2.140625,
|
|
"learning_rate": 1.9510565162951538e-05,
|
|
"loss": 1.0257,
|
|
"step": 108
|
|
},
|
|
{
|
|
"epoch": 1.18,
|
|
"grad_norm": 2.015625,
|
|
"learning_rate": 1.9381913359224844e-05,
|
|
"loss": 1.0289,
|
|
"step": 109
|
|
},
|
|
{
|
|
"epoch": 1.19,
|
|
"grad_norm": 2.03125,
|
|
"learning_rate": 1.9238795325112867e-05,
|
|
"loss": 1.009,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 1.2,
|
|
"grad_norm": 2.109375,
|
|
"learning_rate": 1.9081431738250815e-05,
|
|
"loss": 1.0269,
|
|
"step": 111
|
|
},
|
|
{
|
|
"epoch": 1.22,
|
|
"grad_norm": 2.046875,
|
|
"learning_rate": 1.891006524188368e-05,
|
|
"loss": 1.0398,
|
|
"step": 112
|
|
},
|
|
{
|
|
"epoch": 1.23,
|
|
"grad_norm": 2.078125,
|
|
"learning_rate": 1.8724960070727974e-05,
|
|
"loss": 1.0113,
|
|
"step": 113
|
|
},
|
|
{
|
|
"epoch": 1.24,
|
|
"grad_norm": 2.078125,
|
|
"learning_rate": 1.8526401643540924e-05,
|
|
"loss": 1.0057,
|
|
"step": 114
|
|
},
|
|
{
|
|
"epoch": 1.25,
|
|
"grad_norm": 2.140625,
|
|
"learning_rate": 1.8314696123025456e-05,
|
|
"loss": 0.9797,
|
|
"step": 115
|
|
},
|
|
{
|
|
"epoch": 1.26,
|
|
"grad_norm": 1.9921875,
|
|
"learning_rate": 1.8090169943749477e-05,
|
|
"loss": 1.0187,
|
|
"step": 116
|
|
},
|
|
{
|
|
"epoch": 1.27,
|
|
"grad_norm": 2.03125,
|
|
"learning_rate": 1.785316930880745e-05,
|
|
"loss": 1.023,
|
|
"step": 117
|
|
},
|
|
{
|
|
"epoch": 1.28,
|
|
"grad_norm": 2.0,
|
|
"learning_rate": 1.7604059656000313e-05,
|
|
"loss": 1.0601,
|
|
"step": 118
|
|
},
|
|
{
|
|
"epoch": 1.29,
|
|
"grad_norm": 2.109375,
|
|
"learning_rate": 1.7343225094356857e-05,
|
|
"loss": 1.0275,
|
|
"step": 119
|
|
},
|
|
{
|
|
"epoch": 1.3,
|
|
"grad_norm": 2.03125,
|
|
"learning_rate": 1.7071067811865477e-05,
|
|
"loss": 1.0169,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 1.31,
|
|
"grad_norm": 2.015625,
|
|
"learning_rate": 1.678800745532942e-05,
|
|
"loss": 1.0175,
|
|
"step": 121
|
|
},
|
|
{
|
|
"epoch": 1.33,
|
|
"grad_norm": 1.984375,
|
|
"learning_rate": 1.6494480483301836e-05,
|
|
"loss": 0.9896,
|
|
"step": 122
|
|
},
|
|
{
|
|
"epoch": 1.34,
|
|
"grad_norm": 1.9609375,
|
|
"learning_rate": 1.6190939493098344e-05,
|
|
"loss": 1.0251,
|
|
"step": 123
|
|
},
|
|
{
|
|
"epoch": 1.35,
|
|
"grad_norm": 1.984375,
|
|
"learning_rate": 1.5877852522924733e-05,
|
|
"loss": 1.0083,
|
|
"step": 124
|
|
},
|
|
{
|
|
"epoch": 1.36,
|
|
"grad_norm": 1.953125,
|
|
"learning_rate": 1.5555702330196024e-05,
|
|
"loss": 0.9796,
|
|
"step": 125
|
|
},
|
|
{
|
|
"epoch": 1.37,
|
|
"grad_norm": 2.078125,
|
|
"learning_rate": 1.5224985647159489e-05,
|
|
"loss": 1.0203,
|
|
"step": 126
|
|
},
|
|
{
|
|
"epoch": 1.38,
|
|
"grad_norm": 2.0625,
|
|
"learning_rate": 1.4886212414969551e-05,
|
|
"loss": 1.0284,
|
|
"step": 127
|
|
},
|
|
{
|
|
"epoch": 1.39,
|
|
"grad_norm": 1.890625,
|
|
"learning_rate": 1.4539904997395468e-05,
|
|
"loss": 1.0034,
|
|
"step": 128
|
|
},
|
|
{
|
|
"epoch": 1.4,
|
|
"grad_norm": 1.90625,
|
|
"learning_rate": 1.4186597375374283e-05,
|
|
"loss": 1.0101,
|
|
"step": 129
|
|
},
|
|
{
|
|
"epoch": 1.41,
|
|
"grad_norm": 1.9375,
|
|
"learning_rate": 1.3826834323650899e-05,
|
|
"loss": 0.9997,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 1.43,
|
|
"grad_norm": 1.921875,
|
|
"learning_rate": 1.346117057077493e-05,
|
|
"loss": 1.0133,
|
|
"step": 131
|
|
},
|
|
{
|
|
"epoch": 1.44,
|
|
"grad_norm": 2.0625,
|
|
"learning_rate": 1.3090169943749475e-05,
|
|
"loss": 1.0233,
|
|
"step": 132
|
|
},
|
|
{
|
|
"epoch": 1.45,
|
|
"grad_norm": 1.90625,
|
|
"learning_rate": 1.2714404498650743e-05,
|
|
"loss": 0.9997,
|
|
"step": 133
|
|
},
|
|
{
|
|
"epoch": 1.46,
|
|
"grad_norm": 1.9375,
|
|
"learning_rate": 1.2334453638559057e-05,
|
|
"loss": 0.9804,
|
|
"step": 134
|
|
},
|
|
{
|
|
"epoch": 1.47,
|
|
"grad_norm": 2.0,
|
|
"learning_rate": 1.1950903220161286e-05,
|
|
"loss": 1.0028,
|
|
"step": 135
|
|
},
|
|
{
|
|
"epoch": 1.47,
|
|
"eval_loss": 1.1330536603927612,
|
|
"eval_runtime": 27.0763,
|
|
"eval_samples_per_second": 95.582,
|
|
"eval_steps_per_second": 95.582,
|
|
"step": 135
|
|
},
|
|
{
|
|
"epoch": 1.48,
|
|
"grad_norm": 1.96875,
|
|
"learning_rate": 1.156434465040231e-05,
|
|
"loss": 0.998,
|
|
"step": 136
|
|
},
|
|
{
|
|
"epoch": 1.49,
|
|
"grad_norm": 2.046875,
|
|
"learning_rate": 1.1175373974578378e-05,
|
|
"loss": 1.0039,
|
|
"step": 137
|
|
},
|
|
{
|
|
"epoch": 1.5,
|
|
"grad_norm": 1.953125,
|
|
"learning_rate": 1.0784590957278452e-05,
|
|
"loss": 1.0141,
|
|
"step": 138
|
|
},
|
|
{
|
|
"epoch": 1.51,
|
|
"grad_norm": 1.96875,
|
|
"learning_rate": 1.0392598157590687e-05,
|
|
"loss": 1.0049,
|
|
"step": 139
|
|
},
|
|
{
|
|
"epoch": 1.52,
|
|
"grad_norm": 1.953125,
|
|
"learning_rate": 1e-05,
|
|
"loss": 1.0312,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 1.54,
|
|
"grad_norm": 2.046875,
|
|
"learning_rate": 9.607401842409318e-06,
|
|
"loss": 0.9889,
|
|
"step": 141
|
|
},
|
|
{
|
|
"epoch": 1.55,
|
|
"grad_norm": 1.9296875,
|
|
"learning_rate": 9.215409042721553e-06,
|
|
"loss": 1.0125,
|
|
"step": 142
|
|
},
|
|
{
|
|
"epoch": 1.56,
|
|
"grad_norm": 1.90625,
|
|
"learning_rate": 8.824626025421625e-06,
|
|
"loss": 0.9825,
|
|
"step": 143
|
|
},
|
|
{
|
|
"epoch": 1.57,
|
|
"grad_norm": 1.9453125,
|
|
"learning_rate": 8.43565534959769e-06,
|
|
"loss": 0.9934,
|
|
"step": 144
|
|
},
|
|
{
|
|
"epoch": 1.58,
|
|
"grad_norm": 1.8984375,
|
|
"learning_rate": 8.04909677983872e-06,
|
|
"loss": 0.9618,
|
|
"step": 145
|
|
},
|
|
{
|
|
"epoch": 1.59,
|
|
"grad_norm": 1.90625,
|
|
"learning_rate": 7.66554636144095e-06,
|
|
"loss": 0.9879,
|
|
"step": 146
|
|
},
|
|
{
|
|
"epoch": 1.6,
|
|
"grad_norm": 2.0,
|
|
"learning_rate": 7.285595501349259e-06,
|
|
"loss": 1.0055,
|
|
"step": 147
|
|
},
|
|
{
|
|
"epoch": 1.61,
|
|
"grad_norm": 1.921875,
|
|
"learning_rate": 6.909830056250527e-06,
|
|
"loss": 0.9934,
|
|
"step": 148
|
|
},
|
|
{
|
|
"epoch": 1.62,
|
|
"grad_norm": 1.875,
|
|
"learning_rate": 6.538829429225068e-06,
|
|
"loss": 1.0107,
|
|
"step": 149
|
|
},
|
|
{
|
|
"epoch": 1.64,
|
|
"grad_norm": 1.8828125,
|
|
"learning_rate": 6.173165676349103e-06,
|
|
"loss": 1.0198,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 1.65,
|
|
"grad_norm": 2.21875,
|
|
"learning_rate": 5.813402624625722e-06,
|
|
"loss": 1.0236,
|
|
"step": 151
|
|
},
|
|
{
|
|
"epoch": 1.66,
|
|
"grad_norm": 1.84375,
|
|
"learning_rate": 5.460095002604533e-06,
|
|
"loss": 0.9888,
|
|
"step": 152
|
|
},
|
|
{
|
|
"epoch": 1.67,
|
|
"grad_norm": 1.828125,
|
|
"learning_rate": 5.1137875850304545e-06,
|
|
"loss": 0.9656,
|
|
"step": 153
|
|
},
|
|
{
|
|
"epoch": 1.68,
|
|
"grad_norm": 1.84375,
|
|
"learning_rate": 4.775014352840512e-06,
|
|
"loss": 0.9728,
|
|
"step": 154
|
|
},
|
|
{
|
|
"epoch": 1.69,
|
|
"grad_norm": 2.125,
|
|
"learning_rate": 4.444297669803981e-06,
|
|
"loss": 1.0094,
|
|
"step": 155
|
|
},
|
|
{
|
|
"epoch": 1.7,
|
|
"grad_norm": 1.859375,
|
|
"learning_rate": 4.12214747707527e-06,
|
|
"loss": 1.0192,
|
|
"step": 156
|
|
},
|
|
{
|
|
"epoch": 1.71,
|
|
"grad_norm": 1.8515625,
|
|
"learning_rate": 3.8090605069016596e-06,
|
|
"loss": 1.0004,
|
|
"step": 157
|
|
},
|
|
{
|
|
"epoch": 1.72,
|
|
"grad_norm": 1.953125,
|
|
"learning_rate": 3.505519516698165e-06,
|
|
"loss": 1.0017,
|
|
"step": 158
|
|
},
|
|
{
|
|
"epoch": 1.73,
|
|
"grad_norm": 1.8671875,
|
|
"learning_rate": 3.2119925446705824e-06,
|
|
"loss": 0.9942,
|
|
"step": 159
|
|
},
|
|
{
|
|
"epoch": 1.75,
|
|
"grad_norm": 1.8203125,
|
|
"learning_rate": 2.9289321881345257e-06,
|
|
"loss": 0.9939,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 1.76,
|
|
"grad_norm": 1.9375,
|
|
"learning_rate": 2.656774905643147e-06,
|
|
"loss": 0.976,
|
|
"step": 161
|
|
},
|
|
{
|
|
"epoch": 1.77,
|
|
"grad_norm": 1.875,
|
|
"learning_rate": 2.395940343999691e-06,
|
|
"loss": 1.0301,
|
|
"step": 162
|
|
},
|
|
{
|
|
"epoch": 1.78,
|
|
"grad_norm": 1.8515625,
|
|
"learning_rate": 2.146830691192553e-06,
|
|
"loss": 0.9927,
|
|
"step": 163
|
|
},
|
|
{
|
|
"epoch": 1.79,
|
|
"grad_norm": 1.796875,
|
|
"learning_rate": 1.9098300562505266e-06,
|
|
"loss": 1.0101,
|
|
"step": 164
|
|
},
|
|
{
|
|
"epoch": 1.8,
|
|
"grad_norm": 1.84375,
|
|
"learning_rate": 1.6853038769745466e-06,
|
|
"loss": 1.0085,
|
|
"step": 165
|
|
},
|
|
{
|
|
"epoch": 1.81,
|
|
"grad_norm": 1.78125,
|
|
"learning_rate": 1.4735983564590784e-06,
|
|
"loss": 0.986,
|
|
"step": 166
|
|
},
|
|
{
|
|
"epoch": 1.82,
|
|
"grad_norm": 1.84375,
|
|
"learning_rate": 1.2750399292720284e-06,
|
|
"loss": 1.0162,
|
|
"step": 167
|
|
},
|
|
{
|
|
"epoch": 1.83,
|
|
"grad_norm": 1.8359375,
|
|
"learning_rate": 1.0899347581163222e-06,
|
|
"loss": 0.9965,
|
|
"step": 168
|
|
},
|
|
{
|
|
"epoch": 1.85,
|
|
"grad_norm": 1.890625,
|
|
"learning_rate": 9.185682617491865e-07,
|
|
"loss": 1.0018,
|
|
"step": 169
|
|
},
|
|
{
|
|
"epoch": 1.86,
|
|
"grad_norm": 1.90625,
|
|
"learning_rate": 7.612046748871327e-07,
|
|
"loss": 1.0407,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 1.87,
|
|
"grad_norm": 1.8046875,
|
|
"learning_rate": 6.180866407751595e-07,
|
|
"loss": 0.9921,
|
|
"step": 171
|
|
},
|
|
{
|
|
"epoch": 1.88,
|
|
"grad_norm": 1.828125,
|
|
"learning_rate": 4.894348370484648e-07,
|
|
"loss": 1.0116,
|
|
"step": 172
|
|
},
|
|
{
|
|
"epoch": 1.89,
|
|
"grad_norm": 1.875,
|
|
"learning_rate": 3.7544763546352834e-07,
|
|
"loss": 0.9826,
|
|
"step": 173
|
|
},
|
|
{
|
|
"epoch": 1.9,
|
|
"grad_norm": 1.8046875,
|
|
"learning_rate": 2.7630079602323447e-07,
|
|
"loss": 1.0124,
|
|
"step": 174
|
|
},
|
|
{
|
|
"epoch": 1.91,
|
|
"grad_norm": 1.828125,
|
|
"learning_rate": 1.921471959676957e-07,
|
|
"loss": 0.9558,
|
|
"step": 175
|
|
},
|
|
{
|
|
"epoch": 1.92,
|
|
"grad_norm": 1.875,
|
|
"learning_rate": 1.231165940486234e-07,
|
|
"loss": 1.0259,
|
|
"step": 176
|
|
},
|
|
{
|
|
"epoch": 1.93,
|
|
"grad_norm": 1.8515625,
|
|
"learning_rate": 6.931543045073708e-08,
|
|
"loss": 0.9974,
|
|
"step": 177
|
|
},
|
|
{
|
|
"epoch": 1.94,
|
|
"grad_norm": 1.8359375,
|
|
"learning_rate": 3.082666266872036e-08,
|
|
"loss": 0.9809,
|
|
"step": 178
|
|
},
|
|
{
|
|
"epoch": 1.96,
|
|
"grad_norm": 1.8515625,
|
|
"learning_rate": 7.70963759277099e-09,
|
|
"loss": 0.9815,
|
|
"step": 179
|
|
},
|
|
{
|
|
"epoch": 1.97,
|
|
"grad_norm": 1.859375,
|
|
"learning_rate": 0.0,
|
|
"loss": 0.9899,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 1.97,
|
|
"eval_loss": 1.1227047443389893,
|
|
"eval_runtime": 27.0775,
|
|
"eval_samples_per_second": 95.578,
|
|
"eval_steps_per_second": 95.578,
|
|
"step": 180
|
|
}
|
|
],
|
|
"logging_steps": 1,
|
|
"max_steps": 180,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 2,
|
|
"save_steps": 90,
|
|
"total_flos": 5.3119016276852736e+17,
|
|
"train_batch_size": 1,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|