origin-checkpoints-dense-10k-lora / trainer_state.json
bitersun's picture
Upload folder using huggingface_hub
685b400 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 3125,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0016,
"grad_norm": 17.753393173217773,
"learning_rate": 3.191489361702127e-08,
"loss": 1.0943,
"step": 5
},
{
"epoch": 0.0032,
"grad_norm": 5.271658897399902,
"learning_rate": 6.382978723404254e-08,
"loss": 1.1761,
"step": 10
},
{
"epoch": 0.0048,
"grad_norm": 18.389263153076172,
"learning_rate": 1.0638297872340425e-07,
"loss": 1.232,
"step": 15
},
{
"epoch": 0.0064,
"grad_norm": 5.9695916175842285,
"learning_rate": 1.2765957446808508e-07,
"loss": 0.9909,
"step": 20
},
{
"epoch": 0.008,
"grad_norm": 11.398734092712402,
"learning_rate": 1.702127659574468e-07,
"loss": 1.0854,
"step": 25
},
{
"epoch": 0.0096,
"grad_norm": 8.841111183166504,
"learning_rate": 2.127659574468085e-07,
"loss": 1.1299,
"step": 30
},
{
"epoch": 0.0112,
"grad_norm": 12.362792015075684,
"learning_rate": 2.4468085106382976e-07,
"loss": 1.1409,
"step": 35
},
{
"epoch": 0.0128,
"grad_norm": 11.710867881774902,
"learning_rate": 2.7659574468085106e-07,
"loss": 1.0812,
"step": 40
},
{
"epoch": 0.0144,
"grad_norm": 69.05541229248047,
"learning_rate": 3.085106382978723e-07,
"loss": 1.2053,
"step": 45
},
{
"epoch": 0.016,
"grad_norm": 22.71782875061035,
"learning_rate": 3.404255319148936e-07,
"loss": 1.1944,
"step": 50
},
{
"epoch": 0.0176,
"grad_norm": 9.513526916503906,
"learning_rate": 3.7234042553191484e-07,
"loss": 0.9585,
"step": 55
},
{
"epoch": 0.0192,
"grad_norm": 25.43731117248535,
"learning_rate": 4.0425531914893614e-07,
"loss": 1.0221,
"step": 60
},
{
"epoch": 0.0208,
"grad_norm": 9.436578750610352,
"learning_rate": 4.3617021276595744e-07,
"loss": 1.0496,
"step": 65
},
{
"epoch": 0.0224,
"grad_norm": 19.79042625427246,
"learning_rate": 4.787234042553192e-07,
"loss": 1.1427,
"step": 70
},
{
"epoch": 0.024,
"grad_norm": 12.988973617553711,
"learning_rate": 5.212765957446809e-07,
"loss": 1.0918,
"step": 75
},
{
"epoch": 0.0256,
"grad_norm": 57.59418487548828,
"learning_rate": 5.425531914893617e-07,
"loss": 1.2063,
"step": 80
},
{
"epoch": 0.0272,
"grad_norm": 8.209541320800781,
"learning_rate": 5.74468085106383e-07,
"loss": 1.2093,
"step": 85
},
{
"epoch": 0.0288,
"grad_norm": 236.162353515625,
"learning_rate": 5.957446808510638e-07,
"loss": 1.0908,
"step": 90
},
{
"epoch": 0.0304,
"grad_norm": 5.889673233032227,
"learning_rate": 6.170212765957446e-07,
"loss": 1.0849,
"step": 95
},
{
"epoch": 0.032,
"grad_norm": 17.223268508911133,
"learning_rate": 6.382978723404255e-07,
"loss": 0.9797,
"step": 100
},
{
"epoch": 0.0336,
"grad_norm": 12.999480247497559,
"learning_rate": 6.808510638297872e-07,
"loss": 1.109,
"step": 105
},
{
"epoch": 0.0352,
"grad_norm": 13.105081558227539,
"learning_rate": 7.127659574468084e-07,
"loss": 1.1344,
"step": 110
},
{
"epoch": 0.0368,
"grad_norm": 10.676628112792969,
"learning_rate": 7.659574468085106e-07,
"loss": 1.0022,
"step": 115
},
{
"epoch": 0.0384,
"grad_norm": 12.837193489074707,
"learning_rate": 8.191489361702127e-07,
"loss": 1.1584,
"step": 120
},
{
"epoch": 0.04,
"grad_norm": 30.59712791442871,
"learning_rate": 8.51063829787234e-07,
"loss": 1.0765,
"step": 125
},
{
"epoch": 0.0416,
"grad_norm": 6.356354713439941,
"learning_rate": 8.936170212765957e-07,
"loss": 1.0548,
"step": 130
},
{
"epoch": 0.0432,
"grad_norm": 28.62993621826172,
"learning_rate": 9.042553191489361e-07,
"loss": 1.1557,
"step": 135
},
{
"epoch": 0.0448,
"grad_norm": 7.197190284729004,
"learning_rate": 9.361702127659575e-07,
"loss": 0.8783,
"step": 140
},
{
"epoch": 0.0464,
"grad_norm": 15.51217269897461,
"learning_rate": 9.574468085106384e-07,
"loss": 1.1564,
"step": 145
},
{
"epoch": 0.048,
"grad_norm": 8.407793998718262,
"learning_rate": 9.996700758825468e-07,
"loss": 1.0376,
"step": 150
},
{
"epoch": 0.0496,
"grad_norm": 9.72488021850586,
"learning_rate": 9.98350379412735e-07,
"loss": 1.0693,
"step": 155
},
{
"epoch": 0.0512,
"grad_norm": 33.889404296875,
"learning_rate": 9.976905311778291e-07,
"loss": 1.0633,
"step": 160
},
{
"epoch": 0.0528,
"grad_norm": 12.949823379516602,
"learning_rate": 9.960409105905641e-07,
"loss": 1.1246,
"step": 165
},
{
"epoch": 0.0544,
"grad_norm": 25.435590744018555,
"learning_rate": 9.950511382382052e-07,
"loss": 1.1312,
"step": 170
},
{
"epoch": 0.056,
"grad_norm": 4.347928047180176,
"learning_rate": 9.940613658858462e-07,
"loss": 0.8828,
"step": 175
},
{
"epoch": 0.0576,
"grad_norm": 21.24222183227539,
"learning_rate": 9.927416694160343e-07,
"loss": 1.0106,
"step": 180
},
{
"epoch": 0.0592,
"grad_norm": 134.2750701904297,
"learning_rate": 9.914219729462222e-07,
"loss": 1.0893,
"step": 185
},
{
"epoch": 0.0608,
"grad_norm": 7.6175642013549805,
"learning_rate": 9.910920488287693e-07,
"loss": 0.9956,
"step": 190
},
{
"epoch": 0.0624,
"grad_norm": 10.543917655944824,
"learning_rate": 9.904322005938633e-07,
"loss": 1.1866,
"step": 195
},
{
"epoch": 0.064,
"grad_norm": 15.187593460083008,
"learning_rate": 9.891125041240514e-07,
"loss": 0.9146,
"step": 200
},
{
"epoch": 0.0656,
"grad_norm": 15.958827018737793,
"learning_rate": 9.877928076542395e-07,
"loss": 0.9731,
"step": 205
},
{
"epoch": 0.0672,
"grad_norm": 9.200213432312012,
"learning_rate": 9.868030353018806e-07,
"loss": 0.8353,
"step": 210
},
{
"epoch": 0.0688,
"grad_norm": 95.77112579345703,
"learning_rate": 9.858132629495216e-07,
"loss": 0.9552,
"step": 215
},
{
"epoch": 0.0704,
"grad_norm": 108.77849578857422,
"learning_rate": 9.848234905971626e-07,
"loss": 1.0746,
"step": 220
},
{
"epoch": 0.072,
"grad_norm": 27.069581985473633,
"learning_rate": 9.835037941273505e-07,
"loss": 1.1292,
"step": 225
},
{
"epoch": 0.0736,
"grad_norm": 9.253786087036133,
"learning_rate": 9.831738700098976e-07,
"loss": 1.0132,
"step": 230
},
{
"epoch": 0.0752,
"grad_norm": 7.91702938079834,
"learning_rate": 9.821840976575387e-07,
"loss": 0.9699,
"step": 235
},
{
"epoch": 0.0768,
"grad_norm": 6.46748161315918,
"learning_rate": 9.805344770702739e-07,
"loss": 1.0323,
"step": 240
},
{
"epoch": 0.0784,
"grad_norm": 4.719814300537109,
"learning_rate": 9.792147806004618e-07,
"loss": 0.9378,
"step": 245
},
{
"epoch": 0.08,
"grad_norm": 11.16118335723877,
"learning_rate": 9.78554932365556e-07,
"loss": 0.9825,
"step": 250
},
{
"epoch": 0.0816,
"grad_norm": 10.521653175354004,
"learning_rate": 9.76905311778291e-07,
"loss": 1.1565,
"step": 255
},
{
"epoch": 0.0832,
"grad_norm": 19.716693878173828,
"learning_rate": 9.75915539425932e-07,
"loss": 0.9906,
"step": 260
},
{
"epoch": 0.0848,
"grad_norm": 15.141022682189941,
"learning_rate": 9.75255691191026e-07,
"loss": 1.0978,
"step": 265
},
{
"epoch": 0.0864,
"grad_norm": 5.733545780181885,
"learning_rate": 9.73935994721214e-07,
"loss": 0.9707,
"step": 270
},
{
"epoch": 0.088,
"grad_norm": 76.02755737304688,
"learning_rate": 9.72946222368855e-07,
"loss": 1.056,
"step": 275
},
{
"epoch": 0.0896,
"grad_norm": 23.83684539794922,
"learning_rate": 9.712966017815903e-07,
"loss": 0.8035,
"step": 280
},
{
"epoch": 0.0912,
"grad_norm": 6.880498886108398,
"learning_rate": 9.703068294292313e-07,
"loss": 0.8934,
"step": 285
},
{
"epoch": 0.0928,
"grad_norm": 22.199569702148438,
"learning_rate": 9.693170570768722e-07,
"loss": 0.966,
"step": 290
},
{
"epoch": 0.0944,
"grad_norm": 15.069271087646484,
"learning_rate": 9.683272847245132e-07,
"loss": 0.998,
"step": 295
},
{
"epoch": 0.096,
"grad_norm": 19.67232322692871,
"learning_rate": 9.670075882547013e-07,
"loss": 0.9979,
"step": 300
},
{
"epoch": 0.0976,
"grad_norm": 15.360819816589355,
"learning_rate": 9.660178159023424e-07,
"loss": 0.9696,
"step": 305
},
{
"epoch": 0.0992,
"grad_norm": 11.680339813232422,
"learning_rate": 9.646981194325305e-07,
"loss": 0.9597,
"step": 310
},
{
"epoch": 0.1008,
"grad_norm": 18.256032943725586,
"learning_rate": 9.643681953150776e-07,
"loss": 1.0115,
"step": 315
},
{
"epoch": 0.1024,
"grad_norm": 12.97192668914795,
"learning_rate": 9.633784229627186e-07,
"loss": 1.0294,
"step": 320
},
{
"epoch": 0.104,
"grad_norm": 4.558760643005371,
"learning_rate": 9.617288023754536e-07,
"loss": 0.9127,
"step": 325
},
{
"epoch": 0.1056,
"grad_norm": 35.9009895324707,
"learning_rate": 9.604091059056415e-07,
"loss": 1.1651,
"step": 330
},
{
"epoch": 0.1072,
"grad_norm": 5.918971061706543,
"learning_rate": 9.600791817881886e-07,
"loss": 0.979,
"step": 335
},
{
"epoch": 0.1088,
"grad_norm": 9.000371932983398,
"learning_rate": 9.587594853183767e-07,
"loss": 0.9887,
"step": 340
},
{
"epoch": 0.1104,
"grad_norm": 3.865067958831787,
"learning_rate": 9.580996370834707e-07,
"loss": 0.9858,
"step": 345
},
{
"epoch": 0.112,
"grad_norm": 10.578916549682617,
"learning_rate": 9.567799406136588e-07,
"loss": 0.942,
"step": 350
},
{
"epoch": 0.1136,
"grad_norm": 10.010342597961426,
"learning_rate": 9.55460244143847e-07,
"loss": 1.1082,
"step": 355
},
{
"epoch": 0.1152,
"grad_norm": 3.942474126815796,
"learning_rate": 9.54470471791488e-07,
"loss": 1.0817,
"step": 360
},
{
"epoch": 0.1168,
"grad_norm": 25.993215560913086,
"learning_rate": 9.53810623556582e-07,
"loss": 0.9509,
"step": 365
},
{
"epoch": 0.1184,
"grad_norm": 18.104969024658203,
"learning_rate": 9.524909270867701e-07,
"loss": 0.952,
"step": 370
},
{
"epoch": 0.12,
"grad_norm": 16.51449966430664,
"learning_rate": 9.515011547344111e-07,
"loss": 1.1549,
"step": 375
},
{
"epoch": 0.1216,
"grad_norm": 7.9347710609436035,
"learning_rate": 9.511712306169581e-07,
"loss": 1.0856,
"step": 380
},
{
"epoch": 0.1232,
"grad_norm": 12.884934425354004,
"learning_rate": 9.508413064995052e-07,
"loss": 0.8983,
"step": 385
},
{
"epoch": 0.1248,
"grad_norm": 3.2588469982147217,
"learning_rate": 9.491916859122402e-07,
"loss": 0.9289,
"step": 390
},
{
"epoch": 0.1264,
"grad_norm": 13.216696739196777,
"learning_rate": 9.482019135598812e-07,
"loss": 1.0303,
"step": 395
},
{
"epoch": 0.128,
"grad_norm": 5.220747947692871,
"learning_rate": 9.468822170900692e-07,
"loss": 0.8735,
"step": 400
},
{
"epoch": 0.1296,
"grad_norm": 4.664916515350342,
"learning_rate": 9.452325965028043e-07,
"loss": 1.0623,
"step": 405
},
{
"epoch": 0.1312,
"grad_norm": 9.029937744140625,
"learning_rate": 9.435829759155394e-07,
"loss": 1.0159,
"step": 410
},
{
"epoch": 0.1328,
"grad_norm": 31.10121726989746,
"learning_rate": 9.419333553282744e-07,
"loss": 1.0106,
"step": 415
},
{
"epoch": 0.1344,
"grad_norm": 407.8924560546875,
"learning_rate": 9.412735070933685e-07,
"loss": 0.9323,
"step": 420
},
{
"epoch": 0.136,
"grad_norm": 11.239225387573242,
"learning_rate": 9.402837347410095e-07,
"loss": 1.0606,
"step": 425
},
{
"epoch": 0.1376,
"grad_norm": 25.785709381103516,
"learning_rate": 9.389640382711976e-07,
"loss": 1.0077,
"step": 430
},
{
"epoch": 0.1392,
"grad_norm": 4.758748531341553,
"learning_rate": 9.379742659188387e-07,
"loss": 1.0224,
"step": 435
},
{
"epoch": 0.1408,
"grad_norm": 8.108410835266113,
"learning_rate": 9.366545694490267e-07,
"loss": 1.096,
"step": 440
},
{
"epoch": 0.1424,
"grad_norm": 7.627793788909912,
"learning_rate": 9.356647970966677e-07,
"loss": 1.0142,
"step": 445
},
{
"epoch": 0.144,
"grad_norm": 16.316343307495117,
"learning_rate": 9.346750247443088e-07,
"loss": 1.0279,
"step": 450
},
{
"epoch": 0.1456,
"grad_norm": 7.309606552124023,
"learning_rate": 9.336852523919498e-07,
"loss": 1.0464,
"step": 455
},
{
"epoch": 0.1472,
"grad_norm": 10.05410099029541,
"learning_rate": 9.320356318046849e-07,
"loss": 0.8944,
"step": 460
},
{
"epoch": 0.1488,
"grad_norm": 4.556393623352051,
"learning_rate": 9.310458594523259e-07,
"loss": 1.028,
"step": 465
},
{
"epoch": 0.1504,
"grad_norm": 7.501353740692139,
"learning_rate": 9.29396238865061e-07,
"loss": 1.0284,
"step": 470
},
{
"epoch": 0.152,
"grad_norm": 5.888306140899658,
"learning_rate": 9.287363906301551e-07,
"loss": 1.0443,
"step": 475
},
{
"epoch": 0.1536,
"grad_norm": 17.299659729003906,
"learning_rate": 9.27416694160343e-07,
"loss": 0.9476,
"step": 480
},
{
"epoch": 0.1552,
"grad_norm": 11.71612548828125,
"learning_rate": 9.260969976905311e-07,
"loss": 0.9573,
"step": 485
},
{
"epoch": 0.1568,
"grad_norm": 15.907907485961914,
"learning_rate": 9.251072253381722e-07,
"loss": 0.9067,
"step": 490
},
{
"epoch": 0.1584,
"grad_norm": 7.4433417320251465,
"learning_rate": 9.241174529858132e-07,
"loss": 0.9206,
"step": 495
},
{
"epoch": 0.16,
"grad_norm": 20.83209228515625,
"learning_rate": 9.227977565160012e-07,
"loss": 1.0452,
"step": 500
},
{
"epoch": 0.1616,
"grad_norm": 7.117931365966797,
"learning_rate": 9.214780600461894e-07,
"loss": 0.9224,
"step": 505
},
{
"epoch": 0.1632,
"grad_norm": 34.387203216552734,
"learning_rate": 9.201583635763775e-07,
"loss": 1.0476,
"step": 510
},
{
"epoch": 0.1648,
"grad_norm": 5.099363327026367,
"learning_rate": 9.188386671065654e-07,
"loss": 0.9696,
"step": 515
},
{
"epoch": 0.1664,
"grad_norm": 10.837265968322754,
"learning_rate": 9.181788188716595e-07,
"loss": 0.9765,
"step": 520
},
{
"epoch": 0.168,
"grad_norm": 7.461008548736572,
"learning_rate": 9.168591224018475e-07,
"loss": 0.844,
"step": 525
},
{
"epoch": 0.1696,
"grad_norm": 18.195476531982422,
"learning_rate": 9.161992741669415e-07,
"loss": 1.0127,
"step": 530
},
{
"epoch": 0.1712,
"grad_norm": 19.382884979248047,
"learning_rate": 9.155394259320356e-07,
"loss": 1.0422,
"step": 535
},
{
"epoch": 0.1728,
"grad_norm": 3.098797082901001,
"learning_rate": 9.142197294622237e-07,
"loss": 0.955,
"step": 540
},
{
"epoch": 0.1744,
"grad_norm": 36.317996978759766,
"learning_rate": 9.129000329924117e-07,
"loss": 1.0281,
"step": 545
},
{
"epoch": 0.176,
"grad_norm": 68.5218276977539,
"learning_rate": 9.122401847575058e-07,
"loss": 0.9965,
"step": 550
},
{
"epoch": 0.1776,
"grad_norm": 10.113546371459961,
"learning_rate": 9.115803365225998e-07,
"loss": 0.8978,
"step": 555
},
{
"epoch": 0.1792,
"grad_norm": 30.086557388305664,
"learning_rate": 9.105905641702408e-07,
"loss": 1.0359,
"step": 560
},
{
"epoch": 0.1808,
"grad_norm": 11.476800918579102,
"learning_rate": 9.092708677004288e-07,
"loss": 0.8809,
"step": 565
},
{
"epoch": 0.1824,
"grad_norm": 12.45345687866211,
"learning_rate": 9.082810953480699e-07,
"loss": 0.9595,
"step": 570
},
{
"epoch": 0.184,
"grad_norm": 5.285238265991211,
"learning_rate": 9.076212471131639e-07,
"loss": 0.9676,
"step": 575
},
{
"epoch": 0.1856,
"grad_norm": 165.41041564941406,
"learning_rate": 9.06631474760805e-07,
"loss": 1.0644,
"step": 580
},
{
"epoch": 0.1872,
"grad_norm": 11.455655097961426,
"learning_rate": 9.05971626525899e-07,
"loss": 1.0373,
"step": 585
},
{
"epoch": 0.1888,
"grad_norm": 13.930870056152344,
"learning_rate": 9.049818541735401e-07,
"loss": 0.9109,
"step": 590
},
{
"epoch": 0.1904,
"grad_norm": 3.5618736743927,
"learning_rate": 9.04322005938634e-07,
"loss": 0.8976,
"step": 595
},
{
"epoch": 0.192,
"grad_norm": 21.46792984008789,
"learning_rate": 9.036621577037281e-07,
"loss": 0.912,
"step": 600
},
{
"epoch": 0.1936,
"grad_norm": 16.73967933654785,
"learning_rate": 9.023424612339161e-07,
"loss": 0.9407,
"step": 605
},
{
"epoch": 0.1952,
"grad_norm": 28.35312843322754,
"learning_rate": 9.016826129990101e-07,
"loss": 0.9553,
"step": 610
},
{
"epoch": 0.1968,
"grad_norm": 5.807398796081543,
"learning_rate": 9.006928406466512e-07,
"loss": 0.9616,
"step": 615
},
{
"epoch": 0.1984,
"grad_norm": 6.136154492292301e+16,
"learning_rate": 8.997030682942923e-07,
"loss": 1.0047,
"step": 620
},
{
"epoch": 0.2,
"grad_norm": 20.447399139404297,
"learning_rate": 8.987132959419334e-07,
"loss": 1.05,
"step": 625
},
{
"epoch": 0.2016,
"grad_norm": 6.224416255950928,
"learning_rate": 8.973935994721214e-07,
"loss": 1.0902,
"step": 630
},
{
"epoch": 0.2032,
"grad_norm": 4.525908946990967,
"learning_rate": 8.960739030023094e-07,
"loss": 0.9018,
"step": 635
},
{
"epoch": 0.2048,
"grad_norm": 18.014423370361328,
"learning_rate": 8.950841306499504e-07,
"loss": 0.9259,
"step": 640
},
{
"epoch": 0.2064,
"grad_norm": 5.060450553894043,
"learning_rate": 8.934345100626855e-07,
"loss": 0.9576,
"step": 645
},
{
"epoch": 0.208,
"grad_norm": 6.123034477233887,
"learning_rate": 8.924447377103266e-07,
"loss": 0.8193,
"step": 650
},
{
"epoch": 0.2096,
"grad_norm": 5.297720909118652,
"learning_rate": 8.911250412405147e-07,
"loss": 0.9992,
"step": 655
},
{
"epoch": 0.2112,
"grad_norm": 47.66415786743164,
"learning_rate": 8.904651930056087e-07,
"loss": 0.9482,
"step": 660
},
{
"epoch": 0.2128,
"grad_norm": 16.03940200805664,
"learning_rate": 8.898053447707027e-07,
"loss": 0.9219,
"step": 665
},
{
"epoch": 0.2144,
"grad_norm": 49.70168685913086,
"learning_rate": 8.891454965357967e-07,
"loss": 0.8535,
"step": 670
},
{
"epoch": 0.216,
"grad_norm": 5.318902015686035,
"learning_rate": 8.884856483008907e-07,
"loss": 0.9169,
"step": 675
},
{
"epoch": 0.2176,
"grad_norm": 29.10072898864746,
"learning_rate": 8.874958759485318e-07,
"loss": 0.9721,
"step": 680
},
{
"epoch": 0.2192,
"grad_norm": 3.657233238220215,
"learning_rate": 8.861761794787198e-07,
"loss": 0.9178,
"step": 685
},
{
"epoch": 0.2208,
"grad_norm": 21.64499855041504,
"learning_rate": 8.85186407126361e-07,
"loss": 1.0563,
"step": 690
},
{
"epoch": 0.2224,
"grad_norm": 39.829437255859375,
"learning_rate": 8.845265588914549e-07,
"loss": 0.9631,
"step": 695
},
{
"epoch": 0.224,
"grad_norm": 4.240927696228027,
"learning_rate": 8.84196634774002e-07,
"loss": 0.8741,
"step": 700
},
{
"epoch": 0.2256,
"grad_norm": 21.78352165222168,
"learning_rate": 8.835367865390959e-07,
"loss": 0.9326,
"step": 705
},
{
"epoch": 0.2272,
"grad_norm": 8.84949779510498,
"learning_rate": 8.825470141867371e-07,
"loss": 0.993,
"step": 710
},
{
"epoch": 0.2288,
"grad_norm": 3.609835624694824,
"learning_rate": 8.81227317716925e-07,
"loss": 0.9103,
"step": 715
},
{
"epoch": 0.2304,
"grad_norm": 3.2008163928985596,
"learning_rate": 8.805674694820191e-07,
"loss": 0.8525,
"step": 720
},
{
"epoch": 0.232,
"grad_norm": 4.056381702423096,
"learning_rate": 8.795776971296601e-07,
"loss": 0.9224,
"step": 725
},
{
"epoch": 0.2336,
"grad_norm": 12.993239402770996,
"learning_rate": 8.782580006598482e-07,
"loss": 1.0513,
"step": 730
},
{
"epoch": 0.2352,
"grad_norm": 4.219738006591797,
"learning_rate": 8.769383041900362e-07,
"loss": 0.828,
"step": 735
},
{
"epoch": 0.2368,
"grad_norm": 3.9953763484954834,
"learning_rate": 8.756186077202244e-07,
"loss": 1.0853,
"step": 740
},
{
"epoch": 0.2384,
"grad_norm": 149.8667755126953,
"learning_rate": 8.742989112504124e-07,
"loss": 0.831,
"step": 745
},
{
"epoch": 0.24,
"grad_norm": 9.772624969482422,
"learning_rate": 8.729792147806004e-07,
"loss": 0.8648,
"step": 750
},
{
"epoch": 0.2416,
"grad_norm": 6.546755313873291,
"learning_rate": 8.723193665456945e-07,
"loss": 0.9525,
"step": 755
},
{
"epoch": 0.2432,
"grad_norm": 33.5244026184082,
"learning_rate": 8.709996700758825e-07,
"loss": 0.9539,
"step": 760
},
{
"epoch": 0.2448,
"grad_norm": 6.489267826080322,
"learning_rate": 8.693500494886176e-07,
"loss": 0.8907,
"step": 765
},
{
"epoch": 0.2464,
"grad_norm": 22.97650146484375,
"learning_rate": 8.680303530188057e-07,
"loss": 0.9431,
"step": 770
},
{
"epoch": 0.248,
"grad_norm": 4.188962936401367,
"learning_rate": 8.670405806664467e-07,
"loss": 0.8657,
"step": 775
},
{
"epoch": 0.2496,
"grad_norm": 12.398491859436035,
"learning_rate": 8.663807324315408e-07,
"loss": 0.9384,
"step": 780
},
{
"epoch": 0.2512,
"grad_norm": 5.374507904052734,
"learning_rate": 8.647311118442758e-07,
"loss": 0.8974,
"step": 785
},
{
"epoch": 0.2528,
"grad_norm": 7.633402347564697,
"learning_rate": 8.644011877268228e-07,
"loss": 0.8876,
"step": 790
},
{
"epoch": 0.2544,
"grad_norm": 12.387234687805176,
"learning_rate": 8.630814912570108e-07,
"loss": 1.0416,
"step": 795
},
{
"epoch": 0.256,
"grad_norm": 3.2906081676483154,
"learning_rate": 8.620917189046519e-07,
"loss": 0.924,
"step": 800
},
{
"epoch": 0.2576,
"grad_norm": 7.542803764343262,
"learning_rate": 8.6077202243484e-07,
"loss": 1.1339,
"step": 805
},
{
"epoch": 0.2592,
"grad_norm": 15.092568397521973,
"learning_rate": 8.59782250082481e-07,
"loss": 0.9043,
"step": 810
},
{
"epoch": 0.2608,
"grad_norm": 6.346741676330566,
"learning_rate": 8.591224018475751e-07,
"loss": 0.9703,
"step": 815
},
{
"epoch": 0.2624,
"grad_norm": 4.8828911781311035,
"learning_rate": 8.578027053777631e-07,
"loss": 0.9665,
"step": 820
},
{
"epoch": 0.264,
"grad_norm": 70.4166259765625,
"learning_rate": 8.568129330254041e-07,
"loss": 0.7673,
"step": 825
},
{
"epoch": 0.2656,
"grad_norm": 17.03971290588379,
"learning_rate": 8.564830089079511e-07,
"loss": 0.8872,
"step": 830
},
{
"epoch": 0.2672,
"grad_norm": 14.032660484313965,
"learning_rate": 8.551633124381392e-07,
"loss": 0.9907,
"step": 835
},
{
"epoch": 0.2688,
"grad_norm": 7.869609355926514,
"learning_rate": 8.545034642032333e-07,
"loss": 0.9527,
"step": 840
},
{
"epoch": 0.2704,
"grad_norm": 14.373311042785645,
"learning_rate": 8.535136918508743e-07,
"loss": 0.9445,
"step": 845
},
{
"epoch": 0.272,
"grad_norm": 8.587657928466797,
"learning_rate": 8.525239194985154e-07,
"loss": 0.943,
"step": 850
},
{
"epoch": 0.2736,
"grad_norm": 38.496395111083984,
"learning_rate": 8.518640712636094e-07,
"loss": 0.9087,
"step": 855
},
{
"epoch": 0.2752,
"grad_norm": 23.268457412719727,
"learning_rate": 8.508742989112503e-07,
"loss": 0.94,
"step": 860
},
{
"epoch": 0.2768,
"grad_norm": 14.687287330627441,
"learning_rate": 8.495546024414384e-07,
"loss": 0.9875,
"step": 865
},
{
"epoch": 0.2784,
"grad_norm": 6.241093158721924,
"learning_rate": 8.485648300890794e-07,
"loss": 0.9066,
"step": 870
},
{
"epoch": 0.28,
"grad_norm": 3.009932041168213,
"learning_rate": 8.475750577367206e-07,
"loss": 0.8807,
"step": 875
},
{
"epoch": 0.2816,
"grad_norm": 7.232269287109375,
"learning_rate": 8.462553612669086e-07,
"loss": 0.9392,
"step": 880
},
{
"epoch": 0.2832,
"grad_norm": 18.572834014892578,
"learning_rate": 8.452655889145496e-07,
"loss": 0.9329,
"step": 885
},
{
"epoch": 0.2848,
"grad_norm": 5.431556701660156,
"learning_rate": 8.446057406796437e-07,
"loss": 0.8609,
"step": 890
},
{
"epoch": 0.2864,
"grad_norm": 5.945143222808838,
"learning_rate": 8.439458924447377e-07,
"loss": 0.8401,
"step": 895
},
{
"epoch": 0.288,
"grad_norm": 4.523690223693848,
"learning_rate": 8.429561200923787e-07,
"loss": 0.8114,
"step": 900
},
{
"epoch": 0.2896,
"grad_norm": 9.477612495422363,
"learning_rate": 8.422962718574727e-07,
"loss": 0.8875,
"step": 905
},
{
"epoch": 0.2912,
"grad_norm": 11.76913070678711,
"learning_rate": 8.416364236225668e-07,
"loss": 0.9311,
"step": 910
},
{
"epoch": 0.2928,
"grad_norm": 21.548538208007812,
"learning_rate": 8.409765753876607e-07,
"loss": 0.8303,
"step": 915
},
{
"epoch": 0.2944,
"grad_norm": 4.586549758911133,
"learning_rate": 8.403167271527548e-07,
"loss": 1.0134,
"step": 920
},
{
"epoch": 0.296,
"grad_norm": 12.563512802124023,
"learning_rate": 8.389970306829429e-07,
"loss": 0.9424,
"step": 925
},
{
"epoch": 0.2976,
"grad_norm": 20.280359268188477,
"learning_rate": 8.376773342131309e-07,
"loss": 0.9972,
"step": 930
},
{
"epoch": 0.2992,
"grad_norm": 3.714491844177246,
"learning_rate": 8.363576377433191e-07,
"loss": 0.9688,
"step": 935
},
{
"epoch": 0.3008,
"grad_norm": 110.5365982055664,
"learning_rate": 8.35037941273507e-07,
"loss": 0.8571,
"step": 940
},
{
"epoch": 0.3024,
"grad_norm": 4.156267166137695,
"learning_rate": 8.337182448036951e-07,
"loss": 0.8008,
"step": 945
},
{
"epoch": 0.304,
"grad_norm": 8.01066780090332,
"learning_rate": 8.330583965687892e-07,
"loss": 0.9203,
"step": 950
},
{
"epoch": 0.3056,
"grad_norm": 4.470520496368408,
"learning_rate": 8.323985483338831e-07,
"loss": 0.8878,
"step": 955
},
{
"epoch": 0.3072,
"grad_norm": 5.175978660583496,
"learning_rate": 8.310788518640712e-07,
"loss": 0.9269,
"step": 960
},
{
"epoch": 0.3088,
"grad_norm": 7.409951686859131,
"learning_rate": 8.300890795117123e-07,
"loss": 0.9188,
"step": 965
},
{
"epoch": 0.3104,
"grad_norm": 3.353938579559326,
"learning_rate": 8.290993071593533e-07,
"loss": 0.9578,
"step": 970
},
{
"epoch": 0.312,
"grad_norm": 6.477808475494385,
"learning_rate": 8.277796106895413e-07,
"loss": 0.9303,
"step": 975
},
{
"epoch": 0.3136,
"grad_norm": 19.51748275756836,
"learning_rate": 8.271197624546354e-07,
"loss": 1.0033,
"step": 980
},
{
"epoch": 0.3152,
"grad_norm": 6.306683540344238,
"learning_rate": 8.261299901022764e-07,
"loss": 0.8887,
"step": 985
},
{
"epoch": 0.3168,
"grad_norm": 13.722183227539062,
"learning_rate": 8.248102936324645e-07,
"loss": 0.888,
"step": 990
},
{
"epoch": 0.3184,
"grad_norm": 11.276162147521973,
"learning_rate": 8.238205212801055e-07,
"loss": 0.7709,
"step": 995
},
{
"epoch": 0.32,
"grad_norm": 8.74631118774414,
"learning_rate": 8.228307489277466e-07,
"loss": 0.9271,
"step": 1000
},
{
"epoch": 0.3216,
"grad_norm": 14.264816284179688,
"learning_rate": 8.215110524579347e-07,
"loss": 0.9112,
"step": 1005
},
{
"epoch": 0.3232,
"grad_norm": 13.194239616394043,
"learning_rate": 8.205212801055756e-07,
"loss": 0.7938,
"step": 1010
},
{
"epoch": 0.3248,
"grad_norm": 8.465250015258789,
"learning_rate": 8.192015836357637e-07,
"loss": 0.8321,
"step": 1015
},
{
"epoch": 0.3264,
"grad_norm": 9.953231811523438,
"learning_rate": 8.175519630484988e-07,
"loss": 0.9216,
"step": 1020
},
{
"epoch": 0.328,
"grad_norm": 18.280670166015625,
"learning_rate": 8.165621906961399e-07,
"loss": 0.8846,
"step": 1025
},
{
"epoch": 0.3296,
"grad_norm": 14.445294380187988,
"learning_rate": 8.15242494226328e-07,
"loss": 0.9195,
"step": 1030
},
{
"epoch": 0.3312,
"grad_norm": 17.734664916992188,
"learning_rate": 8.14252721873969e-07,
"loss": 1.0705,
"step": 1035
},
{
"epoch": 0.3328,
"grad_norm": 4.701229572296143,
"learning_rate": 8.132629495216101e-07,
"loss": 0.9359,
"step": 1040
},
{
"epoch": 0.3344,
"grad_norm": 15.422259330749512,
"learning_rate": 8.12603101286704e-07,
"loss": 0.7105,
"step": 1045
},
{
"epoch": 0.336,
"grad_norm": 6.021975517272949,
"learning_rate": 8.11283404816892e-07,
"loss": 0.9007,
"step": 1050
},
{
"epoch": 0.3376,
"grad_norm": 5.254234313964844,
"learning_rate": 8.102936324645331e-07,
"loss": 0.9011,
"step": 1055
},
{
"epoch": 0.3392,
"grad_norm": 8.567048072814941,
"learning_rate": 8.093038601121741e-07,
"loss": 0.9535,
"step": 1060
},
{
"epoch": 0.3408,
"grad_norm": 6.953893661499023,
"learning_rate": 8.083140877598153e-07,
"loss": 0.8239,
"step": 1065
},
{
"epoch": 0.3424,
"grad_norm": 3.6682486534118652,
"learning_rate": 8.076542395249092e-07,
"loss": 0.7625,
"step": 1070
},
{
"epoch": 0.344,
"grad_norm": 27.097938537597656,
"learning_rate": 8.063345430550973e-07,
"loss": 1.0441,
"step": 1075
},
{
"epoch": 0.3456,
"grad_norm": 95.70716094970703,
"learning_rate": 8.050148465852854e-07,
"loss": 0.9024,
"step": 1080
},
{
"epoch": 0.3472,
"grad_norm": 3.4635119438171387,
"learning_rate": 8.050148465852854e-07,
"loss": 0.9188,
"step": 1085
},
{
"epoch": 0.3488,
"grad_norm": 15.564712524414062,
"learning_rate": 8.043549983503793e-07,
"loss": 0.9593,
"step": 1090
},
{
"epoch": 0.3504,
"grad_norm": 4.094420909881592,
"learning_rate": 8.033652259980203e-07,
"loss": 1.0881,
"step": 1095
},
{
"epoch": 0.352,
"grad_norm": 9.64607048034668,
"learning_rate": 8.027053777631144e-07,
"loss": 1.0337,
"step": 1100
},
{
"epoch": 0.3536,
"grad_norm": 8.195448875427246,
"learning_rate": 8.020455295282085e-07,
"loss": 0.9579,
"step": 1105
},
{
"epoch": 0.3552,
"grad_norm": 18.41771697998047,
"learning_rate": 8.010557571758495e-07,
"loss": 0.9274,
"step": 1110
},
{
"epoch": 0.3568,
"grad_norm": 17.324462890625,
"learning_rate": 8.003959089409436e-07,
"loss": 0.7573,
"step": 1115
},
{
"epoch": 0.3584,
"grad_norm": 4.447535991668701,
"learning_rate": 7.990762124711316e-07,
"loss": 0.9158,
"step": 1120
},
{
"epoch": 0.36,
"grad_norm": 3.5949699878692627,
"learning_rate": 7.980864401187727e-07,
"loss": 0.7518,
"step": 1125
},
{
"epoch": 0.3616,
"grad_norm": 5.333132743835449,
"learning_rate": 7.970966677664137e-07,
"loss": 0.916,
"step": 1130
},
{
"epoch": 0.3632,
"grad_norm": 7.188840389251709,
"learning_rate": 7.961068954140547e-07,
"loss": 0.8816,
"step": 1135
},
{
"epoch": 0.3648,
"grad_norm": 20.7573299407959,
"learning_rate": 7.944572748267898e-07,
"loss": 0.8662,
"step": 1140
},
{
"epoch": 0.3664,
"grad_norm": 8.999933242797852,
"learning_rate": 7.931375783569778e-07,
"loss": 0.947,
"step": 1145
},
{
"epoch": 0.368,
"grad_norm": 31.44452476501465,
"learning_rate": 7.918178818871659e-07,
"loss": 0.9484,
"step": 1150
},
{
"epoch": 0.3696,
"grad_norm": 4.299691677093506,
"learning_rate": 7.904981854173541e-07,
"loss": 0.8061,
"step": 1155
},
{
"epoch": 0.3712,
"grad_norm": 15.915518760681152,
"learning_rate": 7.89508413064995e-07,
"loss": 0.9734,
"step": 1160
},
{
"epoch": 0.3728,
"grad_norm": 5.6831512451171875,
"learning_rate": 7.88518640712636e-07,
"loss": 0.9088,
"step": 1165
},
{
"epoch": 0.3744,
"grad_norm": 6.080997467041016,
"learning_rate": 7.878587924777301e-07,
"loss": 0.8873,
"step": 1170
},
{
"epoch": 0.376,
"grad_norm": 4.1102423667907715,
"learning_rate": 7.871989442428241e-07,
"loss": 0.9481,
"step": 1175
},
{
"epoch": 0.3776,
"grad_norm": 4.511707782745361,
"learning_rate": 7.862091718904652e-07,
"loss": 0.9334,
"step": 1180
},
{
"epoch": 0.3792,
"grad_norm": 8.41877269744873,
"learning_rate": 7.845595513032002e-07,
"loss": 0.9352,
"step": 1185
},
{
"epoch": 0.3808,
"grad_norm": 17.796035766601562,
"learning_rate": 7.835697789508413e-07,
"loss": 0.7917,
"step": 1190
},
{
"epoch": 0.3824,
"grad_norm": 13.614259719848633,
"learning_rate": 7.825800065984824e-07,
"loss": 0.9999,
"step": 1195
},
{
"epoch": 0.384,
"grad_norm": 9.236659049987793,
"learning_rate": 7.812603101286703e-07,
"loss": 0.8904,
"step": 1200
},
{
"epoch": 0.3856,
"grad_norm": 9.400324821472168,
"learning_rate": 7.806004618937644e-07,
"loss": 0.9334,
"step": 1205
},
{
"epoch": 0.3872,
"grad_norm": 8.435835838317871,
"learning_rate": 7.799406136588584e-07,
"loss": 0.9307,
"step": 1210
},
{
"epoch": 0.3888,
"grad_norm": 9.127885818481445,
"learning_rate": 7.789508413064995e-07,
"loss": 0.9026,
"step": 1215
},
{
"epoch": 0.3904,
"grad_norm": 3.5822970867156982,
"learning_rate": 7.779610689541405e-07,
"loss": 0.9078,
"step": 1220
},
{
"epoch": 0.392,
"grad_norm": 2.795750856399536,
"learning_rate": 7.766413724843286e-07,
"loss": 0.8793,
"step": 1225
},
{
"epoch": 0.3936,
"grad_norm": 10.045112609863281,
"learning_rate": 7.759815242494227e-07,
"loss": 0.9174,
"step": 1230
},
{
"epoch": 0.3952,
"grad_norm": 19.499820709228516,
"learning_rate": 7.753216760145166e-07,
"loss": 1.0243,
"step": 1235
},
{
"epoch": 0.3968,
"grad_norm": 14.11526107788086,
"learning_rate": 7.743319036621576e-07,
"loss": 0.8489,
"step": 1240
},
{
"epoch": 0.3984,
"grad_norm": 5.373587608337402,
"learning_rate": 7.736720554272516e-07,
"loss": 0.9082,
"step": 1245
},
{
"epoch": 0.4,
"grad_norm": 12.755280494689941,
"learning_rate": 7.733421313097987e-07,
"loss": 0.9121,
"step": 1250
},
{
"epoch": 0.4016,
"grad_norm": 4.152519226074219,
"learning_rate": 7.726822830748927e-07,
"loss": 0.9171,
"step": 1255
},
{
"epoch": 0.4032,
"grad_norm": 18.48578453063965,
"learning_rate": 7.720224348399867e-07,
"loss": 0.9603,
"step": 1260
},
{
"epoch": 0.4048,
"grad_norm": 10.362027168273926,
"learning_rate": 7.716925107225338e-07,
"loss": 0.8809,
"step": 1265
},
{
"epoch": 0.4064,
"grad_norm": 140.5989227294922,
"learning_rate": 7.710326624876278e-07,
"loss": 0.9277,
"step": 1270
},
{
"epoch": 0.408,
"grad_norm": 11.779434204101562,
"learning_rate": 7.700428901352689e-07,
"loss": 0.8591,
"step": 1275
},
{
"epoch": 0.4096,
"grad_norm": 9.106331825256348,
"learning_rate": 7.693830419003629e-07,
"loss": 0.9062,
"step": 1280
},
{
"epoch": 0.4112,
"grad_norm": 13.993337631225586,
"learning_rate": 7.687231936654569e-07,
"loss": 0.8522,
"step": 1285
},
{
"epoch": 0.4128,
"grad_norm": 3.8709845542907715,
"learning_rate": 7.670735730781919e-07,
"loss": 0.9725,
"step": 1290
},
{
"epoch": 0.4144,
"grad_norm": 4.224457263946533,
"learning_rate": 7.657538766083801e-07,
"loss": 0.7766,
"step": 1295
},
{
"epoch": 0.416,
"grad_norm": 7.536815643310547,
"learning_rate": 7.644341801385681e-07,
"loss": 0.9177,
"step": 1300
},
{
"epoch": 0.4176,
"grad_norm": 3.8237829208374023,
"learning_rate": 7.637743319036621e-07,
"loss": 0.7641,
"step": 1305
},
{
"epoch": 0.4192,
"grad_norm": 9.17503833770752,
"learning_rate": 7.627845595513032e-07,
"loss": 0.8688,
"step": 1310
},
{
"epoch": 0.4208,
"grad_norm": 4.019710540771484,
"learning_rate": 7.614648630814913e-07,
"loss": 0.9075,
"step": 1315
},
{
"epoch": 0.4224,
"grad_norm": 223.65283203125,
"learning_rate": 7.604750907291323e-07,
"loss": 0.788,
"step": 1320
},
{
"epoch": 0.424,
"grad_norm": 7.680087089538574,
"learning_rate": 7.598152424942263e-07,
"loss": 0.874,
"step": 1325
},
{
"epoch": 0.4256,
"grad_norm": 15.5332612991333,
"learning_rate": 7.594853183767733e-07,
"loss": 0.8772,
"step": 1330
},
{
"epoch": 0.4272,
"grad_norm": 4.713462829589844,
"learning_rate": 7.584955460244143e-07,
"loss": 0.777,
"step": 1335
},
{
"epoch": 0.4288,
"grad_norm": 14.855712890625,
"learning_rate": 7.568459254371494e-07,
"loss": 0.9535,
"step": 1340
},
{
"epoch": 0.4304,
"grad_norm": 4.828318119049072,
"learning_rate": 7.561860772022435e-07,
"loss": 0.914,
"step": 1345
},
{
"epoch": 0.432,
"grad_norm": 3.5818397998809814,
"learning_rate": 7.551963048498845e-07,
"loss": 0.8805,
"step": 1350
},
{
"epoch": 0.4336,
"grad_norm": 8.07185173034668,
"learning_rate": 7.538766083800725e-07,
"loss": 0.9222,
"step": 1355
},
{
"epoch": 0.4352,
"grad_norm": 12.217920303344727,
"learning_rate": 7.525569119102606e-07,
"loss": 0.8622,
"step": 1360
},
{
"epoch": 0.4368,
"grad_norm": 3.412161111831665,
"learning_rate": 7.512372154404487e-07,
"loss": 0.927,
"step": 1365
},
{
"epoch": 0.4384,
"grad_norm": 15.6014986038208,
"learning_rate": 7.499175189706367e-07,
"loss": 0.8832,
"step": 1370
},
{
"epoch": 0.44,
"grad_norm": 3.9033894538879395,
"learning_rate": 7.485978225008248e-07,
"loss": 0.9902,
"step": 1375
},
{
"epoch": 0.4416,
"grad_norm": 14.089677810668945,
"learning_rate": 7.476080501484658e-07,
"loss": 0.8143,
"step": 1380
},
{
"epoch": 0.4432,
"grad_norm": 16.55449867248535,
"learning_rate": 7.462883536786539e-07,
"loss": 0.7648,
"step": 1385
},
{
"epoch": 0.4448,
"grad_norm": 4.785461902618408,
"learning_rate": 7.456285054437479e-07,
"loss": 0.8757,
"step": 1390
},
{
"epoch": 0.4464,
"grad_norm": 41.452789306640625,
"learning_rate": 7.44638733091389e-07,
"loss": 0.889,
"step": 1395
},
{
"epoch": 0.448,
"grad_norm": 5.599381446838379,
"learning_rate": 7.44308808973936e-07,
"loss": 0.8478,
"step": 1400
},
{
"epoch": 0.4496,
"grad_norm": 9.091236114501953,
"learning_rate": 7.436489607390299e-07,
"loss": 0.7538,
"step": 1405
},
{
"epoch": 0.4512,
"grad_norm": 28.06082534790039,
"learning_rate": 7.42329264269218e-07,
"loss": 0.8985,
"step": 1410
},
{
"epoch": 0.4528,
"grad_norm": 7.766772747039795,
"learning_rate": 7.410095677994061e-07,
"loss": 1.0558,
"step": 1415
},
{
"epoch": 0.4544,
"grad_norm": 6.254266262054443,
"learning_rate": 7.400197954470472e-07,
"loss": 1.027,
"step": 1420
},
{
"epoch": 0.456,
"grad_norm": 115.81681060791016,
"learning_rate": 7.387000989772352e-07,
"loss": 0.8981,
"step": 1425
},
{
"epoch": 0.4576,
"grad_norm": 5.77361536026001,
"learning_rate": 7.380402507423293e-07,
"loss": 0.8527,
"step": 1430
},
{
"epoch": 0.4592,
"grad_norm": 7.237057685852051,
"learning_rate": 7.370504783899703e-07,
"loss": 0.9068,
"step": 1435
},
{
"epoch": 0.4608,
"grad_norm": 4.4429030418396,
"learning_rate": 7.363906301550643e-07,
"loss": 0.8943,
"step": 1440
},
{
"epoch": 0.4624,
"grad_norm": 8.322125434875488,
"learning_rate": 7.360607060376112e-07,
"loss": 0.8889,
"step": 1445
},
{
"epoch": 0.464,
"grad_norm": 3.899369239807129,
"learning_rate": 7.354008578027053e-07,
"loss": 0.9042,
"step": 1450
},
{
"epoch": 0.4656,
"grad_norm": 4.072367191314697,
"learning_rate": 7.344110854503463e-07,
"loss": 0.8598,
"step": 1455
},
{
"epoch": 0.4672,
"grad_norm": 12.773030281066895,
"learning_rate": 7.340811613328934e-07,
"loss": 1.0138,
"step": 1460
},
{
"epoch": 0.4688,
"grad_norm": 2251799813685248.0,
"learning_rate": 7.330913889805345e-07,
"loss": 0.7626,
"step": 1465
},
{
"epoch": 0.4704,
"grad_norm": 10.782869338989258,
"learning_rate": 7.321016166281755e-07,
"loss": 0.7974,
"step": 1470
},
{
"epoch": 0.472,
"grad_norm": 3.7431559562683105,
"learning_rate": 7.317716925107225e-07,
"loss": 1.0053,
"step": 1475
},
{
"epoch": 0.4736,
"grad_norm": 11.785840034484863,
"learning_rate": 7.311118442758165e-07,
"loss": 0.978,
"step": 1480
},
{
"epoch": 0.4752,
"grad_norm": 5.290721893310547,
"learning_rate": 7.307819201583636e-07,
"loss": 0.7901,
"step": 1485
},
{
"epoch": 0.4768,
"grad_norm": 20.288829803466797,
"learning_rate": 7.294622236885516e-07,
"loss": 0.9493,
"step": 1490
},
{
"epoch": 0.4784,
"grad_norm": 4.2927470207214355,
"learning_rate": 7.288023754536456e-07,
"loss": 0.8284,
"step": 1495
},
{
"epoch": 0.48,
"grad_norm": 52.746498107910156,
"learning_rate": 7.281425272187397e-07,
"loss": 0.9032,
"step": 1500
},
{
"epoch": 0.4816,
"grad_norm": 4.049383640289307,
"learning_rate": 7.278126031012866e-07,
"loss": 0.8374,
"step": 1505
},
{
"epoch": 0.4832,
"grad_norm": 18.03007698059082,
"learning_rate": 7.264929066314748e-07,
"loss": 0.9071,
"step": 1510
},
{
"epoch": 0.4848,
"grad_norm": 4.921152114868164,
"learning_rate": 7.258330583965687e-07,
"loss": 0.8553,
"step": 1515
},
{
"epoch": 0.4864,
"grad_norm": 19.39454460144043,
"learning_rate": 7.248432860442098e-07,
"loss": 0.8372,
"step": 1520
},
{
"epoch": 0.488,
"grad_norm": 73.33831787109375,
"learning_rate": 7.238535136918509e-07,
"loss": 0.9495,
"step": 1525
},
{
"epoch": 0.4896,
"grad_norm": 8.538538932800293,
"learning_rate": 7.225338172220389e-07,
"loss": 0.7998,
"step": 1530
},
{
"epoch": 0.4912,
"grad_norm": 55.98800277709961,
"learning_rate": 7.22203893104586e-07,
"loss": 0.919,
"step": 1535
},
{
"epoch": 0.4928,
"grad_norm": 4.527732849121094,
"learning_rate": 7.2154404486968e-07,
"loss": 0.8856,
"step": 1540
},
{
"epoch": 0.4944,
"grad_norm": 16.924474716186523,
"learning_rate": 7.208841966347739e-07,
"loss": 0.9184,
"step": 1545
},
{
"epoch": 0.496,
"grad_norm": 3.2957756519317627,
"learning_rate": 7.19894424282415e-07,
"loss": 0.8427,
"step": 1550
},
{
"epoch": 0.4976,
"grad_norm": 5.820931911468506,
"learning_rate": 7.18904651930056e-07,
"loss": 0.9121,
"step": 1555
},
{
"epoch": 0.4992,
"grad_norm": 10.651786804199219,
"learning_rate": 7.172550313427911e-07,
"loss": 0.9004,
"step": 1560
},
{
"epoch": 0.5008,
"grad_norm": 8.25007438659668,
"learning_rate": 7.159353348729792e-07,
"loss": 0.9038,
"step": 1565
},
{
"epoch": 0.5024,
"grad_norm": 7.941346645355225,
"learning_rate": 7.152754866380733e-07,
"loss": 0.9053,
"step": 1570
},
{
"epoch": 0.504,
"grad_norm": 5.125521183013916,
"learning_rate": 7.146156384031672e-07,
"loss": 0.819,
"step": 1575
},
{
"epoch": 0.5056,
"grad_norm": 18.527692794799805,
"learning_rate": 7.136258660508084e-07,
"loss": 0.8307,
"step": 1580
},
{
"epoch": 0.5072,
"grad_norm": 5.466524600982666,
"learning_rate": 7.123061695809963e-07,
"loss": 0.9916,
"step": 1585
},
{
"epoch": 0.5088,
"grad_norm": 8.261640548706055,
"learning_rate": 7.109864731111844e-07,
"loss": 0.9495,
"step": 1590
},
{
"epoch": 0.5104,
"grad_norm": 8.580477714538574,
"learning_rate": 7.099967007588254e-07,
"loss": 0.9245,
"step": 1595
},
{
"epoch": 0.512,
"grad_norm": 94.27921295166016,
"learning_rate": 7.086770042890135e-07,
"loss": 0.9472,
"step": 1600
},
{
"epoch": 0.5136,
"grad_norm": 3.9793620109558105,
"learning_rate": 7.086770042890135e-07,
"loss": 0.8802,
"step": 1605
},
{
"epoch": 0.5152,
"grad_norm": 7.255646705627441,
"learning_rate": 7.073573078192016e-07,
"loss": 0.857,
"step": 1610
},
{
"epoch": 0.5168,
"grad_norm": 7.2847113609313965,
"learning_rate": 7.060376113493897e-07,
"loss": 0.8688,
"step": 1615
},
{
"epoch": 0.5184,
"grad_norm": 4.937621116638184,
"learning_rate": 7.050478389970306e-07,
"loss": 0.9292,
"step": 1620
},
{
"epoch": 0.52,
"grad_norm": 3.12896990776062,
"learning_rate": 7.037281425272187e-07,
"loss": 1.0297,
"step": 1625
},
{
"epoch": 0.5216,
"grad_norm": 3.750182867050171,
"learning_rate": 7.024084460574068e-07,
"loss": 0.9461,
"step": 1630
},
{
"epoch": 0.5232,
"grad_norm": 6.590811729431152,
"learning_rate": 7.010887495875948e-07,
"loss": 0.8446,
"step": 1635
},
{
"epoch": 0.5248,
"grad_norm": 8.326841354370117,
"learning_rate": 7.000989772352358e-07,
"loss": 0.9113,
"step": 1640
},
{
"epoch": 0.5264,
"grad_norm": 11.098550796508789,
"learning_rate": 6.99109204882877e-07,
"loss": 0.7781,
"step": 1645
},
{
"epoch": 0.528,
"grad_norm": 49.50584030151367,
"learning_rate": 6.981194325305179e-07,
"loss": 0.9308,
"step": 1650
},
{
"epoch": 0.5296,
"grad_norm": 5.5463409423828125,
"learning_rate": 6.977895084130649e-07,
"loss": 0.8769,
"step": 1655
},
{
"epoch": 0.5312,
"grad_norm": 4.988060474395752,
"learning_rate": 6.96469811943253e-07,
"loss": 0.9039,
"step": 1660
},
{
"epoch": 0.5328,
"grad_norm": 13.49431324005127,
"learning_rate": 6.958099637083471e-07,
"loss": 0.9527,
"step": 1665
},
{
"epoch": 0.5344,
"grad_norm": 4.911532402038574,
"learning_rate": 6.954800395908941e-07,
"loss": 0.8786,
"step": 1670
},
{
"epoch": 0.536,
"grad_norm": 51.589569091796875,
"learning_rate": 6.941603431210821e-07,
"loss": 0.9265,
"step": 1675
},
{
"epoch": 0.5376,
"grad_norm": 3.3866963386535645,
"learning_rate": 6.928406466512702e-07,
"loss": 0.9118,
"step": 1680
},
{
"epoch": 0.5392,
"grad_norm": 30.3978214263916,
"learning_rate": 6.915209501814583e-07,
"loss": 0.8485,
"step": 1685
},
{
"epoch": 0.5408,
"grad_norm": 23.133153915405273,
"learning_rate": 6.898713295941932e-07,
"loss": 0.9654,
"step": 1690
},
{
"epoch": 0.5424,
"grad_norm": 3.781903028488159,
"learning_rate": 6.882217090069283e-07,
"loss": 0.9987,
"step": 1695
},
{
"epoch": 0.544,
"grad_norm": 7.015597343444824,
"learning_rate": 6.869020125371164e-07,
"loss": 0.917,
"step": 1700
},
{
"epoch": 0.5456,
"grad_norm": 7.328158378601074,
"learning_rate": 6.862421643022105e-07,
"loss": 0.9336,
"step": 1705
},
{
"epoch": 0.5472,
"grad_norm": 39.73679733276367,
"learning_rate": 6.852523919498515e-07,
"loss": 0.854,
"step": 1710
},
{
"epoch": 0.5488,
"grad_norm": 11.341618537902832,
"learning_rate": 6.839326954800396e-07,
"loss": 0.8195,
"step": 1715
},
{
"epoch": 0.5504,
"grad_norm": 8.741443634033203,
"learning_rate": 6.829429231276806e-07,
"loss": 0.88,
"step": 1720
},
{
"epoch": 0.552,
"grad_norm": 30.249027252197266,
"learning_rate": 6.816232266578686e-07,
"loss": 0.8276,
"step": 1725
},
{
"epoch": 0.5536,
"grad_norm": 15.74841594696045,
"learning_rate": 6.803035301880567e-07,
"loss": 0.9014,
"step": 1730
},
{
"epoch": 0.5552,
"grad_norm": 24.467269897460938,
"learning_rate": 6.793137578356978e-07,
"loss": 0.9775,
"step": 1735
},
{
"epoch": 0.5568,
"grad_norm": 7.02502965927124,
"learning_rate": 6.779940613658858e-07,
"loss": 0.8199,
"step": 1740
},
{
"epoch": 0.5584,
"grad_norm": 8.438619613647461,
"learning_rate": 6.773342131309799e-07,
"loss": 0.7999,
"step": 1745
},
{
"epoch": 0.56,
"grad_norm": 15.808964729309082,
"learning_rate": 6.76014516661168e-07,
"loss": 0.89,
"step": 1750
},
{
"epoch": 0.5616,
"grad_norm": 7.4749298095703125,
"learning_rate": 6.750247443088089e-07,
"loss": 0.8741,
"step": 1755
},
{
"epoch": 0.5632,
"grad_norm": 3.5460972785949707,
"learning_rate": 6.7403497195645e-07,
"loss": 0.9212,
"step": 1760
},
{
"epoch": 0.5648,
"grad_norm": 6.653393745422363,
"learning_rate": 6.73045199604091e-07,
"loss": 0.9295,
"step": 1765
},
{
"epoch": 0.5664,
"grad_norm": 21.64336395263672,
"learning_rate": 6.727152754866381e-07,
"loss": 0.9069,
"step": 1770
},
{
"epoch": 0.568,
"grad_norm": 4.078982830047607,
"learning_rate": 6.717255031342791e-07,
"loss": 0.7503,
"step": 1775
},
{
"epoch": 0.5696,
"grad_norm": 8.04253101348877,
"learning_rate": 6.704058066644671e-07,
"loss": 0.8824,
"step": 1780
},
{
"epoch": 0.5712,
"grad_norm": 73.6122055053711,
"learning_rate": 6.690861101946553e-07,
"loss": 0.917,
"step": 1785
},
{
"epoch": 0.5728,
"grad_norm": 3.9499595165252686,
"learning_rate": 6.680963378422963e-07,
"loss": 0.8846,
"step": 1790
},
{
"epoch": 0.5744,
"grad_norm": 3.260681390762329,
"learning_rate": 6.674364896073902e-07,
"loss": 0.7533,
"step": 1795
},
{
"epoch": 0.576,
"grad_norm": 4.29716157913208,
"learning_rate": 6.667766413724843e-07,
"loss": 0.8681,
"step": 1800
},
{
"epoch": 0.5776,
"grad_norm": 15.316906929016113,
"learning_rate": 6.661167931375783e-07,
"loss": 1.0024,
"step": 1805
},
{
"epoch": 0.5792,
"grad_norm": 3.502202033996582,
"learning_rate": 6.651270207852193e-07,
"loss": 0.8169,
"step": 1810
},
{
"epoch": 0.5808,
"grad_norm": 36.50135040283203,
"learning_rate": 6.641372484328605e-07,
"loss": 1.0518,
"step": 1815
},
{
"epoch": 0.5824,
"grad_norm": 19.60590934753418,
"learning_rate": 6.624876278455956e-07,
"loss": 0.8677,
"step": 1820
},
{
"epoch": 0.584,
"grad_norm": 8.549176216125488,
"learning_rate": 6.621577037281425e-07,
"loss": 0.8394,
"step": 1825
},
{
"epoch": 0.5856,
"grad_norm": 8.879136085510254,
"learning_rate": 6.614978554932366e-07,
"loss": 0.8785,
"step": 1830
},
{
"epoch": 0.5872,
"grad_norm": 4.66062068939209,
"learning_rate": 6.608380072583305e-07,
"loss": 0.9395,
"step": 1835
},
{
"epoch": 0.5888,
"grad_norm": 3.782684564590454,
"learning_rate": 6.595183107885186e-07,
"loss": 0.9043,
"step": 1840
},
{
"epoch": 0.5904,
"grad_norm": 14.069576263427734,
"learning_rate": 6.581986143187067e-07,
"loss": 0.9974,
"step": 1845
},
{
"epoch": 0.592,
"grad_norm": 7.454391956329346,
"learning_rate": 6.572088419663477e-07,
"loss": 0.8469,
"step": 1850
},
{
"epoch": 0.5936,
"grad_norm": 22.734886169433594,
"learning_rate": 6.558891454965357e-07,
"loss": 0.9162,
"step": 1855
},
{
"epoch": 0.5952,
"grad_norm": 5.628453731536865,
"learning_rate": 6.548993731441768e-07,
"loss": 0.9353,
"step": 1860
},
{
"epoch": 0.5968,
"grad_norm": 22.399959564208984,
"learning_rate": 6.542395249092708e-07,
"loss": 0.7912,
"step": 1865
},
{
"epoch": 0.5984,
"grad_norm": 10.400199890136719,
"learning_rate": 6.532497525569119e-07,
"loss": 0.9107,
"step": 1870
},
{
"epoch": 0.6,
"grad_norm": 9.53079605102539,
"learning_rate": 6.525899043220058e-07,
"loss": 0.7506,
"step": 1875
},
{
"epoch": 0.6016,
"grad_norm": 9.956583976745605,
"learning_rate": 6.516001319696469e-07,
"loss": 0.9878,
"step": 1880
},
{
"epoch": 0.6032,
"grad_norm": 5.657982349395752,
"learning_rate": 6.506103596172879e-07,
"loss": 1.0318,
"step": 1885
},
{
"epoch": 0.6048,
"grad_norm": 10.266519546508789,
"learning_rate": 6.496205872649291e-07,
"loss": 0.8658,
"step": 1890
},
{
"epoch": 0.6064,
"grad_norm": 27.88587188720703,
"learning_rate": 6.486308149125701e-07,
"loss": 0.9357,
"step": 1895
},
{
"epoch": 0.608,
"grad_norm": 5.478061199188232,
"learning_rate": 6.476410425602111e-07,
"loss": 0.926,
"step": 1900
},
{
"epoch": 0.6096,
"grad_norm": 7.111701965332031,
"learning_rate": 6.466512702078522e-07,
"loss": 0.9629,
"step": 1905
},
{
"epoch": 0.6112,
"grad_norm": 63.717201232910156,
"learning_rate": 6.450016496205872e-07,
"loss": 0.8185,
"step": 1910
},
{
"epoch": 0.6128,
"grad_norm": 4.04094934463501,
"learning_rate": 6.446717255031342e-07,
"loss": 1.0045,
"step": 1915
},
{
"epoch": 0.6144,
"grad_norm": 13.979657173156738,
"learning_rate": 6.433520290333223e-07,
"loss": 0.919,
"step": 1920
},
{
"epoch": 0.616,
"grad_norm": 3.902803659439087,
"learning_rate": 6.430221049158693e-07,
"loss": 0.8583,
"step": 1925
},
{
"epoch": 0.6176,
"grad_norm": 4.302829265594482,
"learning_rate": 6.420323325635104e-07,
"loss": 0.7618,
"step": 1930
},
{
"epoch": 0.6192,
"grad_norm": 9.890539169311523,
"learning_rate": 6.410425602111514e-07,
"loss": 0.8488,
"step": 1935
},
{
"epoch": 0.6208,
"grad_norm": 4.619266033172607,
"learning_rate": 6.397228637413395e-07,
"loss": 0.9371,
"step": 1940
},
{
"epoch": 0.6224,
"grad_norm": 4.887425422668457,
"learning_rate": 6.384031672715276e-07,
"loss": 0.7896,
"step": 1945
},
{
"epoch": 0.624,
"grad_norm": 18.042476654052734,
"learning_rate": 6.377433190366216e-07,
"loss": 0.8923,
"step": 1950
},
{
"epoch": 0.6256,
"grad_norm": 18.343812942504883,
"learning_rate": 6.370834708017155e-07,
"loss": 0.967,
"step": 1955
},
{
"epoch": 0.6272,
"grad_norm": 28.72311019897461,
"learning_rate": 6.364236225668096e-07,
"loss": 0.9382,
"step": 1960
},
{
"epoch": 0.6288,
"grad_norm": 3.876980781555176,
"learning_rate": 6.354338502144506e-07,
"loss": 0.883,
"step": 1965
},
{
"epoch": 0.6304,
"grad_norm": 5.639240264892578,
"learning_rate": 6.344440778620916e-07,
"loss": 0.8945,
"step": 1970
},
{
"epoch": 0.632,
"grad_norm": 3.444913387298584,
"learning_rate": 6.337842296271857e-07,
"loss": 0.8506,
"step": 1975
},
{
"epoch": 0.6336,
"grad_norm": 4.057199478149414,
"learning_rate": 6.334543055097328e-07,
"loss": 0.8548,
"step": 1980
},
{
"epoch": 0.6352,
"grad_norm": 12.461103439331055,
"learning_rate": 6.321346090399208e-07,
"loss": 0.9244,
"step": 1985
},
{
"epoch": 0.6368,
"grad_norm": 6.07451057434082,
"learning_rate": 6.311448366875618e-07,
"loss": 0.8571,
"step": 1990
},
{
"epoch": 0.6384,
"grad_norm": 3.4175641536712646,
"learning_rate": 6.2982514021775e-07,
"loss": 0.7847,
"step": 1995
},
{
"epoch": 0.64,
"grad_norm": 3.6678307056427,
"learning_rate": 6.291652919828439e-07,
"loss": 0.9687,
"step": 2000
},
{
"epoch": 0.6416,
"grad_norm": 5.469705581665039,
"learning_rate": 6.285054437479379e-07,
"loss": 0.8874,
"step": 2005
},
{
"epoch": 0.6432,
"grad_norm": 84.01882934570312,
"learning_rate": 6.26855823160673e-07,
"loss": 1.0131,
"step": 2010
},
{
"epoch": 0.6448,
"grad_norm": 4.5674591064453125,
"learning_rate": 6.26195974925767e-07,
"loss": 0.8653,
"step": 2015
},
{
"epoch": 0.6464,
"grad_norm": 11.533543586730957,
"learning_rate": 6.25866050808314e-07,
"loss": 0.7553,
"step": 2020
},
{
"epoch": 0.648,
"grad_norm": 3.472107410430908,
"learning_rate": 6.248762784559552e-07,
"loss": 0.877,
"step": 2025
},
{
"epoch": 0.6496,
"grad_norm": 4.356385231018066,
"learning_rate": 6.238865061035962e-07,
"loss": 0.9498,
"step": 2030
},
{
"epoch": 0.6512,
"grad_norm": 7.418819427490234,
"learning_rate": 6.225668096337842e-07,
"loss": 0.841,
"step": 2035
},
{
"epoch": 0.6528,
"grad_norm": 6.318149089813232,
"learning_rate": 6.215770372814253e-07,
"loss": 0.8548,
"step": 2040
},
{
"epoch": 0.6544,
"grad_norm": 6.089216232299805,
"learning_rate": 6.209171890465192e-07,
"loss": 0.9041,
"step": 2045
},
{
"epoch": 0.656,
"grad_norm": 10.722808837890625,
"learning_rate": 6.205872649290663e-07,
"loss": 0.8449,
"step": 2050
},
{
"epoch": 0.6576,
"grad_norm": 5.446446418762207,
"learning_rate": 6.192675684592543e-07,
"loss": 0.9344,
"step": 2055
},
{
"epoch": 0.6592,
"grad_norm": 5.69526481628418,
"learning_rate": 6.186077202243484e-07,
"loss": 0.7847,
"step": 2060
},
{
"epoch": 0.6608,
"grad_norm": 5.291490077972412,
"learning_rate": 6.179478719894424e-07,
"loss": 0.694,
"step": 2065
},
{
"epoch": 0.6624,
"grad_norm": 8.272130966186523,
"learning_rate": 6.162982514021775e-07,
"loss": 0.8802,
"step": 2070
},
{
"epoch": 0.664,
"grad_norm": 20.310510635375977,
"learning_rate": 6.153084790498186e-07,
"loss": 0.8411,
"step": 2075
},
{
"epoch": 0.6656,
"grad_norm": 11.677947044372559,
"learning_rate": 6.136588584625536e-07,
"loss": 0.8547,
"step": 2080
},
{
"epoch": 0.6672,
"grad_norm": 17.360151290893555,
"learning_rate": 6.120092378752887e-07,
"loss": 0.9439,
"step": 2085
},
{
"epoch": 0.6688,
"grad_norm": 10.135323524475098,
"learning_rate": 6.106895414054767e-07,
"loss": 0.8968,
"step": 2090
},
{
"epoch": 0.6704,
"grad_norm": 13.119566917419434,
"learning_rate": 6.096997690531177e-07,
"loss": 0.8698,
"step": 2095
},
{
"epoch": 0.672,
"grad_norm": 7.024408340454102,
"learning_rate": 6.090399208182118e-07,
"loss": 0.7821,
"step": 2100
},
{
"epoch": 0.6736,
"grad_norm": 64.54296875,
"learning_rate": 6.080501484658528e-07,
"loss": 0.8663,
"step": 2105
},
{
"epoch": 0.6752,
"grad_norm": 3.5862746238708496,
"learning_rate": 6.077202243483999e-07,
"loss": 0.845,
"step": 2110
},
{
"epoch": 0.6768,
"grad_norm": 4.018439769744873,
"learning_rate": 6.064005278785878e-07,
"loss": 0.7758,
"step": 2115
},
{
"epoch": 0.6784,
"grad_norm": 2.6237103939056396,
"learning_rate": 6.057406796436819e-07,
"loss": 0.7842,
"step": 2120
},
{
"epoch": 0.68,
"grad_norm": 10.803105354309082,
"learning_rate": 6.050808314087759e-07,
"loss": 0.867,
"step": 2125
},
{
"epoch": 0.6816,
"grad_norm": 21.798032760620117,
"learning_rate": 6.047509072913229e-07,
"loss": 0.8159,
"step": 2130
},
{
"epoch": 0.6832,
"grad_norm": 3.7663934230804443,
"learning_rate": 6.03761134938964e-07,
"loss": 0.7533,
"step": 2135
},
{
"epoch": 0.6848,
"grad_norm": 103.10760498046875,
"learning_rate": 6.021115143516991e-07,
"loss": 0.8743,
"step": 2140
},
{
"epoch": 0.6864,
"grad_norm": 36.2320671081543,
"learning_rate": 6.011217419993401e-07,
"loss": 0.9975,
"step": 2145
},
{
"epoch": 0.688,
"grad_norm": 5.781691551208496,
"learning_rate": 6.001319696469812e-07,
"loss": 0.8432,
"step": 2150
},
{
"epoch": 0.6896,
"grad_norm": 17.302276611328125,
"learning_rate": 5.991421972946222e-07,
"loss": 0.9363,
"step": 2155
},
{
"epoch": 0.6912,
"grad_norm": 45.33199691772461,
"learning_rate": 5.974925767073573e-07,
"loss": 0.8494,
"step": 2160
},
{
"epoch": 0.6928,
"grad_norm": 20.723081588745117,
"learning_rate": 5.961728802375453e-07,
"loss": 0.7886,
"step": 2165
},
{
"epoch": 0.6944,
"grad_norm": 5.540359973907471,
"learning_rate": 5.958429561200924e-07,
"loss": 0.9844,
"step": 2170
},
{
"epoch": 0.696,
"grad_norm": 31.042057037353516,
"learning_rate": 5.951831078851863e-07,
"loss": 0.8805,
"step": 2175
},
{
"epoch": 0.6976,
"grad_norm": 15.409696578979492,
"learning_rate": 5.941933355328275e-07,
"loss": 0.7926,
"step": 2180
},
{
"epoch": 0.6992,
"grad_norm": 4.268796920776367,
"learning_rate": 5.932035631804685e-07,
"loss": 0.8144,
"step": 2185
},
{
"epoch": 0.7008,
"grad_norm": 6.5286455154418945,
"learning_rate": 5.922137908281096e-07,
"loss": 0.9781,
"step": 2190
},
{
"epoch": 0.7024,
"grad_norm": 4.132389068603516,
"learning_rate": 5.908940943582975e-07,
"loss": 0.8758,
"step": 2195
},
{
"epoch": 0.704,
"grad_norm": 11.472258567810059,
"learning_rate": 5.899043220059386e-07,
"loss": 0.9364,
"step": 2200
},
{
"epoch": 0.7056,
"grad_norm": 3.994596242904663,
"learning_rate": 5.885846255361266e-07,
"loss": 0.7784,
"step": 2205
},
{
"epoch": 0.7072,
"grad_norm": 14.173189163208008,
"learning_rate": 5.879247773012207e-07,
"loss": 0.6973,
"step": 2210
},
{
"epoch": 0.7088,
"grad_norm": 20.464567184448242,
"learning_rate": 5.869350049488617e-07,
"loss": 0.8821,
"step": 2215
},
{
"epoch": 0.7104,
"grad_norm": 5.456864833831787,
"learning_rate": 5.862751567139558e-07,
"loss": 0.9727,
"step": 2220
},
{
"epoch": 0.712,
"grad_norm": 3.735257387161255,
"learning_rate": 5.849554602441438e-07,
"loss": 0.8155,
"step": 2225
},
{
"epoch": 0.7136,
"grad_norm": 32.859928131103516,
"learning_rate": 5.83965687891785e-07,
"loss": 0.8264,
"step": 2230
},
{
"epoch": 0.7152,
"grad_norm": 5.156280040740967,
"learning_rate": 5.829759155394259e-07,
"loss": 0.9076,
"step": 2235
},
{
"epoch": 0.7168,
"grad_norm": 14.474888801574707,
"learning_rate": 5.8231606730452e-07,
"loss": 0.7926,
"step": 2240
},
{
"epoch": 0.7184,
"grad_norm": 12.21646785736084,
"learning_rate": 5.819861431870669e-07,
"loss": 1.0024,
"step": 2245
},
{
"epoch": 0.72,
"grad_norm": 8.003199577331543,
"learning_rate": 5.80666446717255e-07,
"loss": 0.7449,
"step": 2250
},
{
"epoch": 0.7216,
"grad_norm": 5.7520751953125,
"learning_rate": 5.796766743648961e-07,
"loss": 0.8277,
"step": 2255
},
{
"epoch": 0.7232,
"grad_norm": 9.520220756530762,
"learning_rate": 5.7901682612999e-07,
"loss": 0.7679,
"step": 2260
},
{
"epoch": 0.7248,
"grad_norm": 34.2318000793457,
"learning_rate": 5.783569778950841e-07,
"loss": 0.9009,
"step": 2265
},
{
"epoch": 0.7264,
"grad_norm": 44.02891540527344,
"learning_rate": 5.773672055427252e-07,
"loss": 0.8139,
"step": 2270
},
{
"epoch": 0.728,
"grad_norm": 14.515207290649414,
"learning_rate": 5.760475090729132e-07,
"loss": 0.8484,
"step": 2275
},
{
"epoch": 0.7296,
"grad_norm": 42.64527893066406,
"learning_rate": 5.753876608380072e-07,
"loss": 0.8668,
"step": 2280
},
{
"epoch": 0.7312,
"grad_norm": 13.80805492401123,
"learning_rate": 5.743978884856483e-07,
"loss": 0.7362,
"step": 2285
},
{
"epoch": 0.7328,
"grad_norm": 6.953774929046631,
"learning_rate": 5.737380402507423e-07,
"loss": 0.9614,
"step": 2290
},
{
"epoch": 0.7344,
"grad_norm": 3.6324996948242188,
"learning_rate": 5.724183437809303e-07,
"loss": 0.8924,
"step": 2295
},
{
"epoch": 0.736,
"grad_norm": 20.317419052124023,
"learning_rate": 5.717584955460244e-07,
"loss": 0.8832,
"step": 2300
},
{
"epoch": 0.7376,
"grad_norm": 5.0192694664001465,
"learning_rate": 5.704387990762124e-07,
"loss": 0.889,
"step": 2305
},
{
"epoch": 0.7392,
"grad_norm": 4.2270283699035645,
"learning_rate": 5.691191026064006e-07,
"loss": 0.9042,
"step": 2310
},
{
"epoch": 0.7408,
"grad_norm": 21.78707504272461,
"learning_rate": 5.684592543714945e-07,
"loss": 0.9169,
"step": 2315
},
{
"epoch": 0.7424,
"grad_norm": 183.85739135742188,
"learning_rate": 5.684592543714945e-07,
"loss": 0.8324,
"step": 2320
},
{
"epoch": 0.744,
"grad_norm": 50.58498764038086,
"learning_rate": 5.674694820191355e-07,
"loss": 0.7982,
"step": 2325
},
{
"epoch": 0.7456,
"grad_norm": 11.561593055725098,
"learning_rate": 5.658198614318706e-07,
"loss": 0.8657,
"step": 2330
},
{
"epoch": 0.7472,
"grad_norm": 4.108826160430908,
"learning_rate": 5.651600131969647e-07,
"loss": 0.8591,
"step": 2335
},
{
"epoch": 0.7488,
"grad_norm": 3.5814082622528076,
"learning_rate": 5.638403167271527e-07,
"loss": 0.9114,
"step": 2340
},
{
"epoch": 0.7504,
"grad_norm": 20.11736297607422,
"learning_rate": 5.625206202573408e-07,
"loss": 0.9328,
"step": 2345
},
{
"epoch": 0.752,
"grad_norm": 7.44630765914917,
"learning_rate": 5.612009237875289e-07,
"loss": 0.9219,
"step": 2350
},
{
"epoch": 0.7536,
"grad_norm": 4.1158599853515625,
"learning_rate": 5.602111514351698e-07,
"loss": 0.8528,
"step": 2355
},
{
"epoch": 0.7552,
"grad_norm": 5.421479225158691,
"learning_rate": 5.595513032002639e-07,
"loss": 0.8185,
"step": 2360
},
{
"epoch": 0.7568,
"grad_norm": 7.1874470710754395,
"learning_rate": 5.58231606730452e-07,
"loss": 1.0773,
"step": 2365
},
{
"epoch": 0.7584,
"grad_norm": 3.4063172340393066,
"learning_rate": 5.5691191026064e-07,
"loss": 0.9219,
"step": 2370
},
{
"epoch": 0.76,
"grad_norm": 4.115258693695068,
"learning_rate": 5.562520620257341e-07,
"loss": 0.8191,
"step": 2375
},
{
"epoch": 0.7616,
"grad_norm": 8.878199577331543,
"learning_rate": 5.552622896733751e-07,
"loss": 0.8885,
"step": 2380
},
{
"epoch": 0.7632,
"grad_norm": 30.143220901489258,
"learning_rate": 5.549323655559222e-07,
"loss": 0.7794,
"step": 2385
},
{
"epoch": 0.7648,
"grad_norm": 7.8024678230285645,
"learning_rate": 5.536126690861102e-07,
"loss": 0.747,
"step": 2390
},
{
"epoch": 0.7664,
"grad_norm": 5.0675435066223145,
"learning_rate": 5.529528208512042e-07,
"loss": 0.8375,
"step": 2395
},
{
"epoch": 0.768,
"grad_norm": 4.141220569610596,
"learning_rate": 5.526228967337511e-07,
"loss": 0.7367,
"step": 2400
},
{
"epoch": 0.7696,
"grad_norm": 4.141220569610596,
"learning_rate": 5.522929726162982e-07,
"loss": 0.838,
"step": 2405
},
{
"epoch": 0.7712,
"grad_norm": 6.691019058227539,
"learning_rate": 5.513032002639393e-07,
"loss": 0.9046,
"step": 2410
},
{
"epoch": 0.7728,
"grad_norm": 2.8477139472961426,
"learning_rate": 5.503134279115803e-07,
"loss": 0.8433,
"step": 2415
},
{
"epoch": 0.7744,
"grad_norm": 9.880770683288574,
"learning_rate": 5.489937314417684e-07,
"loss": 1.031,
"step": 2420
},
{
"epoch": 0.776,
"grad_norm": 6.648416519165039,
"learning_rate": 5.480039590894095e-07,
"loss": 0.914,
"step": 2425
},
{
"epoch": 0.7776,
"grad_norm": 5.600031852722168,
"learning_rate": 5.466842626195975e-07,
"loss": 0.9449,
"step": 2430
},
{
"epoch": 0.7792,
"grad_norm": 4.502170562744141,
"learning_rate": 5.456944902672384e-07,
"loss": 0.8564,
"step": 2435
},
{
"epoch": 0.7808,
"grad_norm": 11.646117210388184,
"learning_rate": 5.450346420323325e-07,
"loss": 0.7744,
"step": 2440
},
{
"epoch": 0.7824,
"grad_norm": 3.1389410495758057,
"learning_rate": 5.433850214450676e-07,
"loss": 0.8473,
"step": 2445
},
{
"epoch": 0.784,
"grad_norm": 6.659416675567627,
"learning_rate": 5.420653249752557e-07,
"loss": 0.8879,
"step": 2450
},
{
"epoch": 0.7856,
"grad_norm": 3.9825358390808105,
"learning_rate": 5.410755526228967e-07,
"loss": 0.7112,
"step": 2455
},
{
"epoch": 0.7872,
"grad_norm": 6.629247665405273,
"learning_rate": 5.397558561530848e-07,
"loss": 0.8409,
"step": 2460
},
{
"epoch": 0.7888,
"grad_norm": 19.020769119262695,
"learning_rate": 5.390960079181788e-07,
"loss": 0.8697,
"step": 2465
},
{
"epoch": 0.7904,
"grad_norm": 8.768047332763672,
"learning_rate": 5.387660838007259e-07,
"loss": 1.0032,
"step": 2470
},
{
"epoch": 0.792,
"grad_norm": 84.66197967529297,
"learning_rate": 5.374463873309138e-07,
"loss": 0.8547,
"step": 2475
},
{
"epoch": 0.7936,
"grad_norm": 8.829277038574219,
"learning_rate": 5.364566149785548e-07,
"loss": 0.8642,
"step": 2480
},
{
"epoch": 0.7952,
"grad_norm": 17.52671241760254,
"learning_rate": 5.35136918508743e-07,
"loss": 0.8079,
"step": 2485
},
{
"epoch": 0.7968,
"grad_norm": 13.604071617126465,
"learning_rate": 5.34147146156384e-07,
"loss": 0.9697,
"step": 2490
},
{
"epoch": 0.7984,
"grad_norm": 5.450057506561279,
"learning_rate": 5.328274496865721e-07,
"loss": 0.8329,
"step": 2495
},
{
"epoch": 0.8,
"grad_norm": 6.458799362182617,
"learning_rate": 5.318376773342132e-07,
"loss": 0.7486,
"step": 2500
},
{
"epoch": 0.8016,
"grad_norm": 4.318997859954834,
"learning_rate": 5.308479049818542e-07,
"loss": 0.8853,
"step": 2505
},
{
"epoch": 0.8032,
"grad_norm": 8.195480346679688,
"learning_rate": 5.301880567469482e-07,
"loss": 0.8575,
"step": 2510
},
{
"epoch": 0.8048,
"grad_norm": 8.394243240356445,
"learning_rate": 5.288683602771362e-07,
"loss": 0.9858,
"step": 2515
},
{
"epoch": 0.8064,
"grad_norm": 4.469840049743652,
"learning_rate": 5.278785879247772e-07,
"loss": 0.8681,
"step": 2520
},
{
"epoch": 0.808,
"grad_norm": 3.3048176765441895,
"learning_rate": 5.265588914549653e-07,
"loss": 0.9128,
"step": 2525
},
{
"epoch": 0.8096,
"grad_norm": 7.161386966705322,
"learning_rate": 5.252391949851534e-07,
"loss": 0.7938,
"step": 2530
},
{
"epoch": 0.8112,
"grad_norm": 22.420114517211914,
"learning_rate": 5.239194985153415e-07,
"loss": 0.9781,
"step": 2535
},
{
"epoch": 0.8128,
"grad_norm": 11.514907836914062,
"learning_rate": 5.229297261629824e-07,
"loss": 0.8228,
"step": 2540
},
{
"epoch": 0.8144,
"grad_norm": 17.670255661010742,
"learning_rate": 5.229297261629824e-07,
"loss": 0.7817,
"step": 2545
},
{
"epoch": 0.816,
"grad_norm": 7.12204647064209,
"learning_rate": 5.216100296931705e-07,
"loss": 0.8697,
"step": 2550
},
{
"epoch": 0.8176,
"grad_norm": 3.9533708095550537,
"learning_rate": 5.206202573408116e-07,
"loss": 0.8241,
"step": 2555
},
{
"epoch": 0.8192,
"grad_norm": 14.269022941589355,
"learning_rate": 5.199604091059056e-07,
"loss": 0.8777,
"step": 2560
},
{
"epoch": 0.8208,
"grad_norm": 15.042490005493164,
"learning_rate": 5.186407126360937e-07,
"loss": 0.8515,
"step": 2565
},
{
"epoch": 0.8224,
"grad_norm": 20.52051544189453,
"learning_rate": 5.179808644011877e-07,
"loss": 0.9301,
"step": 2570
},
{
"epoch": 0.824,
"grad_norm": 6.648776054382324,
"learning_rate": 5.169910920488288e-07,
"loss": 0.9264,
"step": 2575
},
{
"epoch": 0.8256,
"grad_norm": 3.6181488037109375,
"learning_rate": 5.153414714615639e-07,
"loss": 0.8038,
"step": 2580
},
{
"epoch": 0.8272,
"grad_norm": 15.112207412719727,
"learning_rate": 5.143516991092048e-07,
"loss": 0.7966,
"step": 2585
},
{
"epoch": 0.8288,
"grad_norm": 5.964051246643066,
"learning_rate": 5.130320026393929e-07,
"loss": 0.8931,
"step": 2590
},
{
"epoch": 0.8304,
"grad_norm": 6.896272659301758,
"learning_rate": 5.127020785219399e-07,
"loss": 0.8028,
"step": 2595
},
{
"epoch": 0.832,
"grad_norm": 8.498908042907715,
"learning_rate": 5.12042230287034e-07,
"loss": 0.8463,
"step": 2600
},
{
"epoch": 0.8336,
"grad_norm": 4.350419521331787,
"learning_rate": 5.10722533817222e-07,
"loss": 0.9385,
"step": 2605
},
{
"epoch": 0.8352,
"grad_norm": 35.57833480834961,
"learning_rate": 5.097327614648631e-07,
"loss": 0.8671,
"step": 2610
},
{
"epoch": 0.8368,
"grad_norm": 3.948251247406006,
"learning_rate": 5.090729132299571e-07,
"loss": 0.9688,
"step": 2615
},
{
"epoch": 0.8384,
"grad_norm": 8.386855125427246,
"learning_rate": 5.077532167601452e-07,
"loss": 0.8332,
"step": 2620
},
{
"epoch": 0.84,
"grad_norm": 12.402297973632812,
"learning_rate": 5.067634444077861e-07,
"loss": 0.8422,
"step": 2625
},
{
"epoch": 0.8416,
"grad_norm": 3.467456817626953,
"learning_rate": 5.057736720554272e-07,
"loss": 0.8776,
"step": 2630
},
{
"epoch": 0.8432,
"grad_norm": 10.838342666625977,
"learning_rate": 5.047838997030682e-07,
"loss": 0.7023,
"step": 2635
},
{
"epoch": 0.8448,
"grad_norm": 21.154626846313477,
"learning_rate": 5.034642032332563e-07,
"loss": 0.8953,
"step": 2640
},
{
"epoch": 0.8464,
"grad_norm": 4.979982376098633,
"learning_rate": 5.024744308808974e-07,
"loss": 0.8995,
"step": 2645
},
{
"epoch": 0.848,
"grad_norm": 4.825480937957764,
"learning_rate": 5.014846585285384e-07,
"loss": 0.8329,
"step": 2650
},
{
"epoch": 0.8496,
"grad_norm": 10.598306655883789,
"learning_rate": 5.004948861761795e-07,
"loss": 0.9205,
"step": 2655
},
{
"epoch": 0.8512,
"grad_norm": 3.039351224899292,
"learning_rate": 4.998350379412734e-07,
"loss": 0.9477,
"step": 2660
},
{
"epoch": 0.8528,
"grad_norm": 3.537116289138794,
"learning_rate": 4.991751897063675e-07,
"loss": 0.8284,
"step": 2665
},
{
"epoch": 0.8544,
"grad_norm": 9.574027061462402,
"learning_rate": 4.978554932365556e-07,
"loss": 0.8996,
"step": 2670
},
{
"epoch": 0.856,
"grad_norm": 7.693865776062012,
"learning_rate": 4.971956450016497e-07,
"loss": 0.8738,
"step": 2675
},
{
"epoch": 0.8576,
"grad_norm": 5.004476070404053,
"learning_rate": 4.965357967667436e-07,
"loss": 0.8304,
"step": 2680
},
{
"epoch": 0.8592,
"grad_norm": 34.6345329284668,
"learning_rate": 4.962058726492907e-07,
"loss": 0.7694,
"step": 2685
},
{
"epoch": 0.8608,
"grad_norm": 19.968050003051758,
"learning_rate": 4.952161002969316e-07,
"loss": 0.7917,
"step": 2690
},
{
"epoch": 0.8624,
"grad_norm": 22.815366744995117,
"learning_rate": 4.948861761794787e-07,
"loss": 0.7727,
"step": 2695
},
{
"epoch": 0.864,
"grad_norm": 41.11388397216797,
"learning_rate": 4.938964038271198e-07,
"loss": 0.9227,
"step": 2700
},
{
"epoch": 0.8656,
"grad_norm": 4.934469699859619,
"learning_rate": 4.929066314747608e-07,
"loss": 0.852,
"step": 2705
},
{
"epoch": 0.8672,
"grad_norm": 4.349693775177002,
"learning_rate": 4.925767073573078e-07,
"loss": 0.8236,
"step": 2710
},
{
"epoch": 0.8688,
"grad_norm": 19.219539642333984,
"learning_rate": 4.909270867700429e-07,
"loss": 0.875,
"step": 2715
},
{
"epoch": 0.8704,
"grad_norm": 11.317012786865234,
"learning_rate": 4.896073903002309e-07,
"loss": 0.8299,
"step": 2720
},
{
"epoch": 0.872,
"grad_norm": 12.925087928771973,
"learning_rate": 4.88617617947872e-07,
"loss": 0.9168,
"step": 2725
},
{
"epoch": 0.8736,
"grad_norm": 27.21533966064453,
"learning_rate": 4.872979214780601e-07,
"loss": 0.872,
"step": 2730
},
{
"epoch": 0.8752,
"grad_norm": 19.570598602294922,
"learning_rate": 4.859782250082481e-07,
"loss": 0.9105,
"step": 2735
},
{
"epoch": 0.8768,
"grad_norm": 12.196694374084473,
"learning_rate": 4.843286044209832e-07,
"loss": 0.825,
"step": 2740
},
{
"epoch": 0.8784,
"grad_norm": 13.363386154174805,
"learning_rate": 4.833388320686242e-07,
"loss": 0.9008,
"step": 2745
},
{
"epoch": 0.88,
"grad_norm": 11.008676528930664,
"learning_rate": 4.823490597162653e-07,
"loss": 0.8115,
"step": 2750
},
{
"epoch": 0.8816,
"grad_norm": 12.56668758392334,
"learning_rate": 4.813592873639063e-07,
"loss": 0.9143,
"step": 2755
},
{
"epoch": 0.8832,
"grad_norm": 3.861693859100342,
"learning_rate": 4.800395908940943e-07,
"loss": 1.0582,
"step": 2760
},
{
"epoch": 0.8848,
"grad_norm": 7.831745147705078,
"learning_rate": 4.790498185417353e-07,
"loss": 0.7671,
"step": 2765
},
{
"epoch": 0.8864,
"grad_norm": 6.566120624542236,
"learning_rate": 4.780600461893764e-07,
"loss": 0.8483,
"step": 2770
},
{
"epoch": 0.888,
"grad_norm": 3.596759796142578,
"learning_rate": 4.770702738370175e-07,
"loss": 0.7942,
"step": 2775
},
{
"epoch": 0.8896,
"grad_norm": 5.004523754119873,
"learning_rate": 4.764104256021115e-07,
"loss": 0.9312,
"step": 2780
},
{
"epoch": 0.8912,
"grad_norm": 26.779829025268555,
"learning_rate": 4.7575057736720555e-07,
"loss": 0.8053,
"step": 2785
},
{
"epoch": 0.8928,
"grad_norm": 4.303036689758301,
"learning_rate": 4.750907291322995e-07,
"loss": 0.9962,
"step": 2790
},
{
"epoch": 0.8944,
"grad_norm": 5.383343696594238,
"learning_rate": 4.737710326624876e-07,
"loss": 0.8455,
"step": 2795
},
{
"epoch": 0.896,
"grad_norm": 3.7558069229125977,
"learning_rate": 4.7278126031012867e-07,
"loss": 0.8844,
"step": 2800
},
{
"epoch": 0.8976,
"grad_norm": 11.162704467773438,
"learning_rate": 4.724513361926757e-07,
"loss": 0.8744,
"step": 2805
},
{
"epoch": 0.8992,
"grad_norm": 159.1503143310547,
"learning_rate": 4.711316397228637e-07,
"loss": 0.9221,
"step": 2810
},
{
"epoch": 0.9008,
"grad_norm": 3.2267873287200928,
"learning_rate": 4.7014186737050475e-07,
"loss": 0.8945,
"step": 2815
},
{
"epoch": 0.9024,
"grad_norm": 5.164098739624023,
"learning_rate": 4.691520950181458e-07,
"loss": 0.8151,
"step": 2820
},
{
"epoch": 0.904,
"grad_norm": 5.631259918212891,
"learning_rate": 4.6750247443088084e-07,
"loss": 0.9057,
"step": 2825
},
{
"epoch": 0.9056,
"grad_norm": 5.682581424713135,
"learning_rate": 4.6585285384361594e-07,
"loss": 0.743,
"step": 2830
},
{
"epoch": 0.9072,
"grad_norm": 8.424724578857422,
"learning_rate": 4.6519300560871e-07,
"loss": 0.8453,
"step": 2835
},
{
"epoch": 0.9088,
"grad_norm": 2.9814651012420654,
"learning_rate": 4.64533157373804e-07,
"loss": 0.8384,
"step": 2840
},
{
"epoch": 0.9104,
"grad_norm": 23.35456085205078,
"learning_rate": 4.638733091388981e-07,
"loss": 0.8512,
"step": 2845
},
{
"epoch": 0.912,
"grad_norm": 4.35836124420166,
"learning_rate": 4.6288353678653906e-07,
"loss": 0.8074,
"step": 2850
},
{
"epoch": 0.9136,
"grad_norm": 3.6067960262298584,
"learning_rate": 4.6156384031672713e-07,
"loss": 0.8849,
"step": 2855
},
{
"epoch": 0.9152,
"grad_norm": 5.33393669128418,
"learning_rate": 4.6123391619927416e-07,
"loss": 0.789,
"step": 2860
},
{
"epoch": 0.9168,
"grad_norm": 4.473043441772461,
"learning_rate": 4.599142197294622e-07,
"loss": 0.8883,
"step": 2865
},
{
"epoch": 0.9184,
"grad_norm": 5.744289875030518,
"learning_rate": 4.5859452325965025e-07,
"loss": 0.8656,
"step": 2870
},
{
"epoch": 0.92,
"grad_norm": 3.2158567905426025,
"learning_rate": 4.582645991421973e-07,
"loss": 0.8913,
"step": 2875
},
{
"epoch": 0.9216,
"grad_norm": 6.081568241119385,
"learning_rate": 4.5694490267238535e-07,
"loss": 0.823,
"step": 2880
},
{
"epoch": 0.9232,
"grad_norm": 6.85284948348999,
"learning_rate": 4.5562520620257337e-07,
"loss": 0.9446,
"step": 2885
},
{
"epoch": 0.9248,
"grad_norm": 7.821119785308838,
"learning_rate": 4.5496535796766743e-07,
"loss": 0.7764,
"step": 2890
},
{
"epoch": 0.9264,
"grad_norm": 7.800992012023926,
"learning_rate": 4.536456614978555e-07,
"loss": 0.7969,
"step": 2895
},
{
"epoch": 0.928,
"grad_norm": 5.7979607582092285,
"learning_rate": 4.5265588914549654e-07,
"loss": 0.8884,
"step": 2900
},
{
"epoch": 0.9296,
"grad_norm": 5.360205173492432,
"learning_rate": 4.516661167931375e-07,
"loss": 0.9017,
"step": 2905
},
{
"epoch": 0.9312,
"grad_norm": 5.240516185760498,
"learning_rate": 4.510062685582316e-07,
"loss": 0.9129,
"step": 2910
},
{
"epoch": 0.9328,
"grad_norm": 5.348971843719482,
"learning_rate": 4.493566479709667e-07,
"loss": 0.9122,
"step": 2915
},
{
"epoch": 0.9344,
"grad_norm": 3.8240389823913574,
"learning_rate": 4.4836687561860767e-07,
"loss": 0.7231,
"step": 2920
},
{
"epoch": 0.936,
"grad_norm": 4.806263446807861,
"learning_rate": 4.480369515011547e-07,
"loss": 0.7984,
"step": 2925
},
{
"epoch": 0.9376,
"grad_norm": 17.400897979736328,
"learning_rate": 4.4704717914879574e-07,
"loss": 0.7724,
"step": 2930
},
{
"epoch": 0.9392,
"grad_norm": 3.6956000328063965,
"learning_rate": 4.457274826789838e-07,
"loss": 0.7589,
"step": 2935
},
{
"epoch": 0.9408,
"grad_norm": 4.311309814453125,
"learning_rate": 4.450676344440779e-07,
"loss": 0.781,
"step": 2940
},
{
"epoch": 0.9424,
"grad_norm": 7.509525775909424,
"learning_rate": 4.4407786209171886e-07,
"loss": 0.9257,
"step": 2945
},
{
"epoch": 0.944,
"grad_norm": 7.102635860443115,
"learning_rate": 4.430880897393599e-07,
"loss": 0.7501,
"step": 2950
},
{
"epoch": 0.9456,
"grad_norm": 3.7048957347869873,
"learning_rate": 4.4275816562190693e-07,
"loss": 0.7436,
"step": 2955
},
{
"epoch": 0.9472,
"grad_norm": 3.4321208000183105,
"learning_rate": 4.4176839326954797e-07,
"loss": 0.8061,
"step": 2960
},
{
"epoch": 0.9488,
"grad_norm": 15.857254981994629,
"learning_rate": 4.40778620917189e-07,
"loss": 1.019,
"step": 2965
},
{
"epoch": 0.9504,
"grad_norm": 12.765538215637207,
"learning_rate": 4.3978884856483005e-07,
"loss": 0.7291,
"step": 2970
},
{
"epoch": 0.952,
"grad_norm": 4.807253360748291,
"learning_rate": 4.384691520950181e-07,
"loss": 0.8207,
"step": 2975
},
{
"epoch": 0.9536,
"grad_norm": 17.326221466064453,
"learning_rate": 4.374793797426592e-07,
"loss": 0.9717,
"step": 2980
},
{
"epoch": 0.9552,
"grad_norm": 4.301726818084717,
"learning_rate": 4.3681953150775317e-07,
"loss": 0.7003,
"step": 2985
},
{
"epoch": 0.9568,
"grad_norm": 21.185712814331055,
"learning_rate": 4.3615968327284723e-07,
"loss": 0.8548,
"step": 2990
},
{
"epoch": 0.9584,
"grad_norm": 8.12071418762207,
"learning_rate": 4.348399868030353e-07,
"loss": 0.9718,
"step": 2995
},
{
"epoch": 0.96,
"grad_norm": 4.572994709014893,
"learning_rate": 4.341801385681293e-07,
"loss": 0.8725,
"step": 3000
},
{
"epoch": 0.9616,
"grad_norm": 6.540128707885742,
"learning_rate": 4.3352029033322337e-07,
"loss": 0.8364,
"step": 3005
},
{
"epoch": 0.9632,
"grad_norm": 40.6855354309082,
"learning_rate": 4.331903662157704e-07,
"loss": 0.9501,
"step": 3010
},
{
"epoch": 0.9648,
"grad_norm": 10.740764617919922,
"learning_rate": 4.318706697459584e-07,
"loss": 0.9368,
"step": 3015
},
{
"epoch": 0.9664,
"grad_norm": 45.260108947753906,
"learning_rate": 4.3088089739359946e-07,
"loss": 0.9159,
"step": 3020
},
{
"epoch": 0.968,
"grad_norm": 3.4676690101623535,
"learning_rate": 4.305509732761465e-07,
"loss": 0.811,
"step": 3025
},
{
"epoch": 0.9696,
"grad_norm": 3.733179807662964,
"learning_rate": 4.292312768063345e-07,
"loss": 0.783,
"step": 3030
},
{
"epoch": 0.9712,
"grad_norm": 7.328685283660889,
"learning_rate": 4.2824150445397554e-07,
"loss": 0.8827,
"step": 3035
},
{
"epoch": 0.9728,
"grad_norm": 3.989272356033325,
"learning_rate": 4.279115803365226e-07,
"loss": 0.9425,
"step": 3040
},
{
"epoch": 0.9744,
"grad_norm": 13.896319389343262,
"learning_rate": 4.269218079841636e-07,
"loss": 0.8542,
"step": 3045
},
{
"epoch": 0.976,
"grad_norm": 5.98639440536499,
"learning_rate": 4.2659188386671065e-07,
"loss": 0.8771,
"step": 3050
},
{
"epoch": 0.9776,
"grad_norm": 15.31779956817627,
"learning_rate": 4.2527218739689866e-07,
"loss": 0.8599,
"step": 3055
},
{
"epoch": 0.9792,
"grad_norm": 4.293508052825928,
"learning_rate": 4.242824150445397e-07,
"loss": 0.8145,
"step": 3060
},
{
"epoch": 0.9808,
"grad_norm": 7.322820663452148,
"learning_rate": 4.2395249092708673e-07,
"loss": 0.7455,
"step": 3065
},
{
"epoch": 0.9824,
"grad_norm": 21.308486938476562,
"learning_rate": 4.229627185747278e-07,
"loss": 0.9661,
"step": 3070
},
{
"epoch": 0.984,
"grad_norm": 10.602350234985352,
"learning_rate": 4.213130979874628e-07,
"loss": 0.9197,
"step": 3075
},
{
"epoch": 0.9856,
"grad_norm": 5.298446178436279,
"learning_rate": 4.206532497525569e-07,
"loss": 0.8352,
"step": 3080
},
{
"epoch": 0.9872,
"grad_norm": 3.6425256729125977,
"learning_rate": 4.196634774001979e-07,
"loss": 0.8267,
"step": 3085
},
{
"epoch": 0.9888,
"grad_norm": 3.4868898391723633,
"learning_rate": 4.18673705047839e-07,
"loss": 0.8081,
"step": 3090
},
{
"epoch": 0.9904,
"grad_norm": 4.268869876861572,
"learning_rate": 4.1768393269548e-07,
"loss": 0.8818,
"step": 3095
},
{
"epoch": 0.992,
"grad_norm": 45.732139587402344,
"learning_rate": 4.1669416034312104e-07,
"loss": 0.8173,
"step": 3100
},
{
"epoch": 0.9936,
"grad_norm": 3.521414041519165,
"learning_rate": 4.1504453975585614e-07,
"loss": 0.9666,
"step": 3105
},
{
"epoch": 0.9952,
"grad_norm": 231.39346313476562,
"learning_rate": 4.1372484328604416e-07,
"loss": 0.7825,
"step": 3110
},
{
"epoch": 0.9968,
"grad_norm": 14.747803688049316,
"learning_rate": 4.130649950511382e-07,
"loss": 0.9401,
"step": 3115
},
{
"epoch": 0.9984,
"grad_norm": 6.619961738586426,
"learning_rate": 4.117452985813263e-07,
"loss": 0.8359,
"step": 3120
},
{
"epoch": 1.0,
"grad_norm": 9.53933048248291,
"learning_rate": 4.1009567799406134e-07,
"loss": 0.7629,
"step": 3125
},
{
"epoch": 1.0,
"step": 3125,
"total_flos": 1.130976210294145e+18,
"train_loss": 0.9110154730224609,
"train_runtime": 10176.2047,
"train_samples_per_second": 4.913,
"train_steps_per_second": 0.307
}
],
"logging_steps": 5,
"max_steps": 3125,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.130976210294145e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}