|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.081841432225064, |
|
"eval_steps": 500, |
|
"global_step": 400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010230179028132993, |
|
"grad_norm": 3.7339282035827637, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.6402, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.020460358056265986, |
|
"grad_norm": 3.856332540512085, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.685, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.030690537084398978, |
|
"grad_norm": 3.6486527919769287, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.6308, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04092071611253197, |
|
"grad_norm": 3.6288094520568848, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.6336, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05115089514066496, |
|
"grad_norm": 3.33920955657959, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5891, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.061381074168797956, |
|
"grad_norm": 2.808194160461426, |
|
"learning_rate": 1.9999996908023674e-05, |
|
"loss": 1.5542, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07161125319693094, |
|
"grad_norm": 2.197322368621826, |
|
"learning_rate": 1.9999987632096612e-05, |
|
"loss": 1.4569, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08184143222506395, |
|
"grad_norm": 1.646809458732605, |
|
"learning_rate": 1.999997217222454e-05, |
|
"loss": 1.4049, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09207161125319693, |
|
"grad_norm": 1.207854151725769, |
|
"learning_rate": 1.9999950528417027e-05, |
|
"loss": 1.3218, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.10230179028132992, |
|
"grad_norm": 0.9744110107421875, |
|
"learning_rate": 1.9999922700687455e-05, |
|
"loss": 1.3043, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11253196930946291, |
|
"grad_norm": 0.8862619996070862, |
|
"learning_rate": 1.999988868905303e-05, |
|
"loss": 1.2692, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12276214833759591, |
|
"grad_norm": 0.808510422706604, |
|
"learning_rate": 1.999984849353479e-05, |
|
"loss": 1.241, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1329923273657289, |
|
"grad_norm": 0.7722910046577454, |
|
"learning_rate": 1.999980211415759e-05, |
|
"loss": 1.2167, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1432225063938619, |
|
"grad_norm": 0.735596239566803, |
|
"learning_rate": 1.9999749550950106e-05, |
|
"loss": 1.1848, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1534526854219949, |
|
"grad_norm": 0.7540133595466614, |
|
"learning_rate": 1.9999690803944852e-05, |
|
"loss": 1.1839, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1636828644501279, |
|
"grad_norm": 0.7589508295059204, |
|
"learning_rate": 1.9999625873178146e-05, |
|
"loss": 1.1676, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.17391304347826086, |
|
"grad_norm": 0.7847907543182373, |
|
"learning_rate": 1.999955475869015e-05, |
|
"loss": 1.1459, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.18414322250639387, |
|
"grad_norm": 0.7236718535423279, |
|
"learning_rate": 1.9999477460524835e-05, |
|
"loss": 1.1253, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.19437340153452684, |
|
"grad_norm": 0.7120142579078674, |
|
"learning_rate": 1.9999393978730004e-05, |
|
"loss": 1.1217, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.20460358056265984, |
|
"grad_norm": 0.640995442867279, |
|
"learning_rate": 1.9999304313357282e-05, |
|
"loss": 1.112, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.21483375959079284, |
|
"grad_norm": 0.5680305361747742, |
|
"learning_rate": 1.9999208464462116e-05, |
|
"loss": 1.0985, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.22506393861892582, |
|
"grad_norm": 0.5688360929489136, |
|
"learning_rate": 1.9999106432103785e-05, |
|
"loss": 1.0823, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 0.5916054844856262, |
|
"learning_rate": 1.9998998216345375e-05, |
|
"loss": 1.0523, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.24552429667519182, |
|
"grad_norm": 0.6135437488555908, |
|
"learning_rate": 1.9998883817253813e-05, |
|
"loss": 1.0757, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2557544757033248, |
|
"grad_norm": 0.587724506855011, |
|
"learning_rate": 1.999876323489984e-05, |
|
"loss": 1.0411, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2659846547314578, |
|
"grad_norm": 0.561601459980011, |
|
"learning_rate": 1.9998636469358023e-05, |
|
"loss": 1.0481, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.27621483375959077, |
|
"grad_norm": 0.48791056871414185, |
|
"learning_rate": 1.999850352070676e-05, |
|
"loss": 1.0061, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2864450127877238, |
|
"grad_norm": 0.4305904507637024, |
|
"learning_rate": 1.9998364389028256e-05, |
|
"loss": 1.0076, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2966751918158568, |
|
"grad_norm": 0.36149221658706665, |
|
"learning_rate": 1.9998219074408554e-05, |
|
"loss": 1.0093, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.3069053708439898, |
|
"grad_norm": 0.34878969192504883, |
|
"learning_rate": 1.999806757693752e-05, |
|
"loss": 0.9997, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3171355498721228, |
|
"grad_norm": 0.35569989681243896, |
|
"learning_rate": 1.9997909896708833e-05, |
|
"loss": 0.9904, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.3273657289002558, |
|
"grad_norm": 0.37640881538391113, |
|
"learning_rate": 1.9997746033820005e-05, |
|
"loss": 0.9959, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3375959079283887, |
|
"grad_norm": 0.37813887000083923, |
|
"learning_rate": 1.9997575988372366e-05, |
|
"loss": 0.9708, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.34782608695652173, |
|
"grad_norm": 0.3413299322128296, |
|
"learning_rate": 1.9997399760471072e-05, |
|
"loss": 0.9689, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.35805626598465473, |
|
"grad_norm": 0.30490338802337646, |
|
"learning_rate": 1.9997217350225103e-05, |
|
"loss": 0.9774, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.36828644501278773, |
|
"grad_norm": 0.27457210421562195, |
|
"learning_rate": 1.999702875774726e-05, |
|
"loss": 0.9989, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.37851662404092073, |
|
"grad_norm": 0.26494237780570984, |
|
"learning_rate": 1.9996833983154166e-05, |
|
"loss": 0.9607, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3887468030690537, |
|
"grad_norm": 0.2736992835998535, |
|
"learning_rate": 1.9996633026566265e-05, |
|
"loss": 0.956, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3989769820971867, |
|
"grad_norm": 0.2966112494468689, |
|
"learning_rate": 1.999642588810784e-05, |
|
"loss": 0.9397, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.4092071611253197, |
|
"grad_norm": 0.304120272397995, |
|
"learning_rate": 1.999621256790697e-05, |
|
"loss": 0.9692, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.4194373401534527, |
|
"grad_norm": 0.3025428354740143, |
|
"learning_rate": 1.9995993066095584e-05, |
|
"loss": 0.9315, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.4296675191815857, |
|
"grad_norm": 0.2758133113384247, |
|
"learning_rate": 1.9995767382809414e-05, |
|
"loss": 0.925, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.4398976982097187, |
|
"grad_norm": 0.26389235258102417, |
|
"learning_rate": 1.999553551818802e-05, |
|
"loss": 0.926, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.45012787723785164, |
|
"grad_norm": 0.24357713758945465, |
|
"learning_rate": 1.999529747237479e-05, |
|
"loss": 0.9375, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.46035805626598464, |
|
"grad_norm": 0.2292446494102478, |
|
"learning_rate": 1.9995053245516927e-05, |
|
"loss": 0.9425, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 0.215789332985878, |
|
"learning_rate": 1.9994802837765464e-05, |
|
"loss": 0.9264, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.48081841432225064, |
|
"grad_norm": 0.2154732346534729, |
|
"learning_rate": 1.999454624927525e-05, |
|
"loss": 0.9284, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.49104859335038364, |
|
"grad_norm": 0.2164417803287506, |
|
"learning_rate": 1.9994283480204956e-05, |
|
"loss": 0.9339, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.5012787723785166, |
|
"grad_norm": 0.21539105474948883, |
|
"learning_rate": 1.9994014530717082e-05, |
|
"loss": 0.9049, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.5115089514066496, |
|
"grad_norm": 0.21105124056339264, |
|
"learning_rate": 1.9993739400977938e-05, |
|
"loss": 0.9105, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5217391304347826, |
|
"grad_norm": 0.21185389161109924, |
|
"learning_rate": 1.9993458091157668e-05, |
|
"loss": 0.927, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5319693094629157, |
|
"grad_norm": 0.19724711775779724, |
|
"learning_rate": 1.9993170601430233e-05, |
|
"loss": 0.8957, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5421994884910486, |
|
"grad_norm": 0.1938239187002182, |
|
"learning_rate": 1.999287693197341e-05, |
|
"loss": 0.9174, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.5524296675191815, |
|
"grad_norm": 0.19070130586624146, |
|
"learning_rate": 1.9992577082968808e-05, |
|
"loss": 0.9111, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5626598465473146, |
|
"grad_norm": 0.1937451958656311, |
|
"learning_rate": 1.999227105460185e-05, |
|
"loss": 0.8977, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5728900255754475, |
|
"grad_norm": 0.19541557133197784, |
|
"learning_rate": 1.9991958847061786e-05, |
|
"loss": 0.8845, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5831202046035806, |
|
"grad_norm": 0.18689967691898346, |
|
"learning_rate": 1.999164046054168e-05, |
|
"loss": 0.8851, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5933503836317136, |
|
"grad_norm": 0.18149493634700775, |
|
"learning_rate": 1.999131589523842e-05, |
|
"loss": 0.8919, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.6035805626598465, |
|
"grad_norm": 0.18779127299785614, |
|
"learning_rate": 1.999098515135272e-05, |
|
"loss": 0.898, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6138107416879796, |
|
"grad_norm": 0.17797918617725372, |
|
"learning_rate": 1.9990648229089106e-05, |
|
"loss": 0.9014, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6240409207161125, |
|
"grad_norm": 0.1641537845134735, |
|
"learning_rate": 1.999030512865593e-05, |
|
"loss": 0.8879, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.6342710997442456, |
|
"grad_norm": 0.18192438781261444, |
|
"learning_rate": 1.9989955850265364e-05, |
|
"loss": 0.8735, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6445012787723785, |
|
"grad_norm": 0.17269515991210938, |
|
"learning_rate": 1.99896003941334e-05, |
|
"loss": 0.8808, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6547314578005116, |
|
"grad_norm": 0.17067378759384155, |
|
"learning_rate": 1.9989238760479853e-05, |
|
"loss": 0.8803, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6649616368286445, |
|
"grad_norm": 0.1700741946697235, |
|
"learning_rate": 1.9988870949528358e-05, |
|
"loss": 0.8805, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6751918158567775, |
|
"grad_norm": 0.16453766822814941, |
|
"learning_rate": 1.9988496961506356e-05, |
|
"loss": 0.8854, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6854219948849105, |
|
"grad_norm": 0.21169471740722656, |
|
"learning_rate": 1.998811679664513e-05, |
|
"loss": 0.8772, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6956521739130435, |
|
"grad_norm": 0.17419219017028809, |
|
"learning_rate": 1.998773045517977e-05, |
|
"loss": 0.8685, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.7058823529411765, |
|
"grad_norm": 0.1648634821176529, |
|
"learning_rate": 1.998733793734918e-05, |
|
"loss": 0.8547, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7161125319693095, |
|
"grad_norm": 0.1565054953098297, |
|
"learning_rate": 1.9986939243396103e-05, |
|
"loss": 0.8598, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.7263427109974424, |
|
"grad_norm": 0.1954575926065445, |
|
"learning_rate": 1.9986534373567085e-05, |
|
"loss": 0.8602, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.7365728900255755, |
|
"grad_norm": 0.16202539205551147, |
|
"learning_rate": 1.998612332811249e-05, |
|
"loss": 0.8603, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7468030690537084, |
|
"grad_norm": 0.16558033227920532, |
|
"learning_rate": 1.9985706107286515e-05, |
|
"loss": 0.8624, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.7570332480818415, |
|
"grad_norm": 0.16333763301372528, |
|
"learning_rate": 1.9985282711347164e-05, |
|
"loss": 0.8882, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.7672634271099744, |
|
"grad_norm": 0.1662484109401703, |
|
"learning_rate": 1.9984853140556258e-05, |
|
"loss": 0.8548, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7774936061381074, |
|
"grad_norm": 0.15836550295352936, |
|
"learning_rate": 1.998441739517945e-05, |
|
"loss": 0.8556, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7877237851662404, |
|
"grad_norm": 0.15881623327732086, |
|
"learning_rate": 1.99839754754862e-05, |
|
"loss": 0.8519, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7979539641943734, |
|
"grad_norm": 0.1609843522310257, |
|
"learning_rate": 1.9983527381749787e-05, |
|
"loss": 0.8566, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.8081841432225064, |
|
"grad_norm": 0.15333859622478485, |
|
"learning_rate": 1.998307311424731e-05, |
|
"loss": 0.847, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8184143222506394, |
|
"grad_norm": 0.15633417665958405, |
|
"learning_rate": 1.9982612673259686e-05, |
|
"loss": 0.8644, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.8286445012787724, |
|
"grad_norm": 0.16742706298828125, |
|
"learning_rate": 1.9982146059071653e-05, |
|
"loss": 0.8628, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.8388746803069054, |
|
"grad_norm": 0.160526841878891, |
|
"learning_rate": 1.9981673271971757e-05, |
|
"loss": 0.8198, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.8491048593350383, |
|
"grad_norm": 0.1579267680644989, |
|
"learning_rate": 1.9981194312252375e-05, |
|
"loss": 0.8582, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.8593350383631714, |
|
"grad_norm": 0.15127578377723694, |
|
"learning_rate": 1.9980709180209686e-05, |
|
"loss": 0.8562, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.8695652173913043, |
|
"grad_norm": 0.1515190452337265, |
|
"learning_rate": 1.99802178761437e-05, |
|
"loss": 0.8541, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.8797953964194374, |
|
"grad_norm": 0.1643359363079071, |
|
"learning_rate": 1.9979720400358233e-05, |
|
"loss": 0.8782, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8900255754475703, |
|
"grad_norm": 0.17986010015010834, |
|
"learning_rate": 1.9979216753160918e-05, |
|
"loss": 0.8377, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.9002557544757033, |
|
"grad_norm": 0.1635770946741104, |
|
"learning_rate": 1.9978706934863215e-05, |
|
"loss": 0.8508, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.9104859335038363, |
|
"grad_norm": 0.15602238476276398, |
|
"learning_rate": 1.9978190945780394e-05, |
|
"loss": 0.8412, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.9207161125319693, |
|
"grad_norm": 0.15934546291828156, |
|
"learning_rate": 1.9977668786231536e-05, |
|
"loss": 0.8255, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.9309462915601023, |
|
"grad_norm": 0.15885022282600403, |
|
"learning_rate": 1.997714045653954e-05, |
|
"loss": 0.8492, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.9411764705882353, |
|
"grad_norm": 0.16822269558906555, |
|
"learning_rate": 1.9976605957031124e-05, |
|
"loss": 0.8354, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.9514066496163683, |
|
"grad_norm": 0.1673271358013153, |
|
"learning_rate": 1.9976065288036824e-05, |
|
"loss": 0.8294, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.9616368286445013, |
|
"grad_norm": 0.15639379620552063, |
|
"learning_rate": 1.9975518449890984e-05, |
|
"loss": 0.8586, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.9718670076726342, |
|
"grad_norm": 0.16323138773441315, |
|
"learning_rate": 1.997496544293177e-05, |
|
"loss": 0.8451, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.9820971867007673, |
|
"grad_norm": 0.15901896357536316, |
|
"learning_rate": 1.9974406267501153e-05, |
|
"loss": 0.8409, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.9923273657289002, |
|
"grad_norm": 0.15179632604122162, |
|
"learning_rate": 1.9973840923944926e-05, |
|
"loss": 0.8358, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.19052907824516296, |
|
"learning_rate": 1.9973269412612696e-05, |
|
"loss": 0.8236, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.010230179028133, |
|
"grad_norm": 0.16732874512672424, |
|
"learning_rate": 1.997269173385788e-05, |
|
"loss": 0.8225, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.020460358056266, |
|
"grad_norm": 0.17052046954631805, |
|
"learning_rate": 1.9972107888037718e-05, |
|
"loss": 0.8249, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.030690537084399, |
|
"grad_norm": 0.16232553124427795, |
|
"learning_rate": 1.9971517875513254e-05, |
|
"loss": 0.8347, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.040920716112532, |
|
"grad_norm": 0.17455045878887177, |
|
"learning_rate": 1.997092169664935e-05, |
|
"loss": 0.8411, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.051150895140665, |
|
"grad_norm": 0.1684006005525589, |
|
"learning_rate": 1.9970319351814677e-05, |
|
"loss": 0.8197, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.061381074168798, |
|
"grad_norm": 0.17193110287189484, |
|
"learning_rate": 1.9969710841381723e-05, |
|
"loss": 0.8164, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.0716112531969308, |
|
"grad_norm": 0.18234021961688995, |
|
"learning_rate": 1.9969096165726794e-05, |
|
"loss": 0.819, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.081841432225064, |
|
"grad_norm": 0.1758105605840683, |
|
"learning_rate": 1.9968475325229995e-05, |
|
"loss": 0.8101, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.092071611253197, |
|
"grad_norm": 0.16415317356586456, |
|
"learning_rate": 1.9967848320275253e-05, |
|
"loss": 0.8394, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.10230179028133, |
|
"grad_norm": 0.16018615663051605, |
|
"learning_rate": 1.9967215151250305e-05, |
|
"loss": 0.8166, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.1125319693094629, |
|
"grad_norm": 0.1779460608959198, |
|
"learning_rate": 1.9966575818546702e-05, |
|
"loss": 0.8286, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.1227621483375958, |
|
"grad_norm": 0.1844969242811203, |
|
"learning_rate": 1.9965930322559803e-05, |
|
"loss": 0.8079, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.132992327365729, |
|
"grad_norm": 0.17768210172653198, |
|
"learning_rate": 1.9965278663688776e-05, |
|
"loss": 0.8237, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.143222506393862, |
|
"grad_norm": 0.17491723597049713, |
|
"learning_rate": 1.996462084233661e-05, |
|
"loss": 0.825, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.1534526854219949, |
|
"grad_norm": 0.17149733006954193, |
|
"learning_rate": 1.9963956858910098e-05, |
|
"loss": 0.8289, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.1636828644501278, |
|
"grad_norm": 0.16866537928581238, |
|
"learning_rate": 1.9963286713819836e-05, |
|
"loss": 0.8151, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.1739130434782608, |
|
"grad_norm": 0.1781005710363388, |
|
"learning_rate": 1.9962610407480248e-05, |
|
"loss": 0.8223, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.184143222506394, |
|
"grad_norm": 0.17955420911312103, |
|
"learning_rate": 1.996192794030955e-05, |
|
"loss": 0.8317, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.1943734015345269, |
|
"grad_norm": 0.1959114521741867, |
|
"learning_rate": 1.9961239312729787e-05, |
|
"loss": 0.7925, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.2046035805626598, |
|
"grad_norm": 0.18740878999233246, |
|
"learning_rate": 1.9960544525166794e-05, |
|
"loss": 0.8141, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.2148337595907928, |
|
"grad_norm": 0.17671266198158264, |
|
"learning_rate": 1.995984357805023e-05, |
|
"loss": 0.824, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.2250639386189257, |
|
"grad_norm": 0.1796875149011612, |
|
"learning_rate": 1.9959136471813556e-05, |
|
"loss": 0.7951, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.2352941176470589, |
|
"grad_norm": 0.17882168292999268, |
|
"learning_rate": 1.9958423206894043e-05, |
|
"loss": 0.8056, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.2455242966751918, |
|
"grad_norm": 0.18344885110855103, |
|
"learning_rate": 1.9957703783732768e-05, |
|
"loss": 0.807, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.2557544757033248, |
|
"grad_norm": 0.19720099866390228, |
|
"learning_rate": 1.995697820277462e-05, |
|
"loss": 0.8006, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.265984654731458, |
|
"grad_norm": 0.18990914523601532, |
|
"learning_rate": 1.9956246464468294e-05, |
|
"loss": 0.7878, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.2762148337595907, |
|
"grad_norm": 0.1771262288093567, |
|
"learning_rate": 1.99555085692663e-05, |
|
"loss": 0.8164, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.2864450127877238, |
|
"grad_norm": 0.1780935823917389, |
|
"learning_rate": 1.995476451762494e-05, |
|
"loss": 0.8013, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.2966751918158568, |
|
"grad_norm": 0.1821814477443695, |
|
"learning_rate": 1.995401431000434e-05, |
|
"loss": 0.8041, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.3069053708439897, |
|
"grad_norm": 0.18167997896671295, |
|
"learning_rate": 1.9953257946868418e-05, |
|
"loss": 0.7855, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.317135549872123, |
|
"grad_norm": 0.18690866231918335, |
|
"learning_rate": 1.995249542868491e-05, |
|
"loss": 0.7983, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.3273657289002558, |
|
"grad_norm": 0.2091519981622696, |
|
"learning_rate": 1.995172675592535e-05, |
|
"loss": 0.8237, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.3375959079283888, |
|
"grad_norm": 0.19291919469833374, |
|
"learning_rate": 1.9950951929065085e-05, |
|
"loss": 0.7869, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.3478260869565217, |
|
"grad_norm": 0.19281232357025146, |
|
"learning_rate": 1.9950170948583262e-05, |
|
"loss": 0.8145, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.3580562659846547, |
|
"grad_norm": 0.26430603861808777, |
|
"learning_rate": 1.994938381496284e-05, |
|
"loss": 0.7965, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.3682864450127878, |
|
"grad_norm": 0.19186891615390778, |
|
"learning_rate": 1.994859052869057e-05, |
|
"loss": 0.7961, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.3785166240409208, |
|
"grad_norm": 0.20330464839935303, |
|
"learning_rate": 1.9947791090257023e-05, |
|
"loss": 0.7844, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.3887468030690537, |
|
"grad_norm": 0.20833194255828857, |
|
"learning_rate": 1.9946985500156567e-05, |
|
"loss": 0.7757, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.3989769820971867, |
|
"grad_norm": 0.1985306292772293, |
|
"learning_rate": 1.9946173758887377e-05, |
|
"loss": 0.8061, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.4092071611253196, |
|
"grad_norm": 0.19251206517219543, |
|
"learning_rate": 1.9945355866951426e-05, |
|
"loss": 0.804, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.4194373401534528, |
|
"grad_norm": 0.2225429117679596, |
|
"learning_rate": 1.9944531824854497e-05, |
|
"loss": 0.787, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.4296675191815857, |
|
"grad_norm": 0.1990213543176651, |
|
"learning_rate": 1.994370163310617e-05, |
|
"loss": 0.8052, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.4398976982097187, |
|
"grad_norm": 0.20235106348991394, |
|
"learning_rate": 1.9942865292219837e-05, |
|
"loss": 0.8106, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.4501278772378516, |
|
"grad_norm": 0.20018866658210754, |
|
"learning_rate": 1.9942022802712684e-05, |
|
"loss": 0.8016, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.4603580562659846, |
|
"grad_norm": 0.2079232931137085, |
|
"learning_rate": 1.9941174165105702e-05, |
|
"loss": 0.788, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.4705882352941178, |
|
"grad_norm": 0.2163192331790924, |
|
"learning_rate": 1.994031937992369e-05, |
|
"loss": 0.7913, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.4808184143222507, |
|
"grad_norm": 0.20073741674423218, |
|
"learning_rate": 1.9939458447695235e-05, |
|
"loss": 0.7926, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.4910485933503836, |
|
"grad_norm": 0.20287558436393738, |
|
"learning_rate": 1.993859136895274e-05, |
|
"loss": 0.8034, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.5012787723785166, |
|
"grad_norm": 0.22918002307415009, |
|
"learning_rate": 1.9937718144232397e-05, |
|
"loss": 0.7924, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.5115089514066495, |
|
"grad_norm": 0.20228266716003418, |
|
"learning_rate": 1.9936838774074207e-05, |
|
"loss": 0.7944, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.5217391304347827, |
|
"grad_norm": 0.19842855632305145, |
|
"learning_rate": 1.993595325902197e-05, |
|
"loss": 0.8043, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.5319693094629157, |
|
"grad_norm": 0.22039633989334106, |
|
"learning_rate": 1.9935061599623278e-05, |
|
"loss": 0.7794, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.5421994884910486, |
|
"grad_norm": 0.2239089459180832, |
|
"learning_rate": 1.993416379642954e-05, |
|
"loss": 0.773, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.5524296675191815, |
|
"grad_norm": 0.2272518277168274, |
|
"learning_rate": 1.9933259849995942e-05, |
|
"loss": 0.7771, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.5626598465473145, |
|
"grad_norm": 0.20447060465812683, |
|
"learning_rate": 1.9932349760881483e-05, |
|
"loss": 0.8027, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.5728900255754477, |
|
"grad_norm": 0.20220829546451569, |
|
"learning_rate": 1.9931433529648964e-05, |
|
"loss": 0.7845, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.5831202046035806, |
|
"grad_norm": 0.2160012573003769, |
|
"learning_rate": 1.9930511156864974e-05, |
|
"loss": 0.7807, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.5933503836317136, |
|
"grad_norm": 0.2220025658607483, |
|
"learning_rate": 1.99295826430999e-05, |
|
"loss": 0.7759, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.6035805626598465, |
|
"grad_norm": 0.20744675397872925, |
|
"learning_rate": 1.9928647988927937e-05, |
|
"loss": 0.7777, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.6138107416879794, |
|
"grad_norm": 0.2259499430656433, |
|
"learning_rate": 1.9927707194927067e-05, |
|
"loss": 0.7817, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.6240409207161126, |
|
"grad_norm": 0.21047933399677277, |
|
"learning_rate": 1.992676026167907e-05, |
|
"loss": 0.7803, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.6342710997442456, |
|
"grad_norm": 0.24594929814338684, |
|
"learning_rate": 1.9925807189769533e-05, |
|
"loss": 0.7687, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.6445012787723785, |
|
"grad_norm": 0.22714115679264069, |
|
"learning_rate": 1.9924847979787826e-05, |
|
"loss": 0.7521, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.6547314578005117, |
|
"grad_norm": 0.21901191771030426, |
|
"learning_rate": 1.992388263232712e-05, |
|
"loss": 0.7751, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.6649616368286444, |
|
"grad_norm": 0.25662556290626526, |
|
"learning_rate": 1.992291114798438e-05, |
|
"loss": 0.7801, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.6751918158567776, |
|
"grad_norm": 0.2206140011548996, |
|
"learning_rate": 1.992193352736037e-05, |
|
"loss": 0.7943, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.6854219948849105, |
|
"grad_norm": 0.2689746916294098, |
|
"learning_rate": 1.9920949771059647e-05, |
|
"loss": 0.7697, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.6956521739130435, |
|
"grad_norm": 0.22856131196022034, |
|
"learning_rate": 1.9919959879690553e-05, |
|
"loss": 0.7596, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.7058823529411766, |
|
"grad_norm": 0.252189964056015, |
|
"learning_rate": 1.9918963853865243e-05, |
|
"loss": 0.7893, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.7161125319693094, |
|
"grad_norm": 0.22657926380634308, |
|
"learning_rate": 1.9917961694199653e-05, |
|
"loss": 0.7633, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.7263427109974425, |
|
"grad_norm": 0.20404541492462158, |
|
"learning_rate": 1.9916953401313505e-05, |
|
"loss": 0.7923, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.7365728900255755, |
|
"grad_norm": 0.3168542683124542, |
|
"learning_rate": 1.9915938975830332e-05, |
|
"loss": 0.7906, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.7468030690537084, |
|
"grad_norm": 0.2238597571849823, |
|
"learning_rate": 1.9914918418377444e-05, |
|
"loss": 0.7836, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.7570332480818416, |
|
"grad_norm": 0.23487034440040588, |
|
"learning_rate": 1.9913891729585955e-05, |
|
"loss": 0.7762, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.7672634271099743, |
|
"grad_norm": 0.24799533188343048, |
|
"learning_rate": 1.9912858910090758e-05, |
|
"loss": 0.7786, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.7774936061381075, |
|
"grad_norm": 0.2436627745628357, |
|
"learning_rate": 1.9911819960530548e-05, |
|
"loss": 0.7832, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.7877237851662404, |
|
"grad_norm": 0.25562822818756104, |
|
"learning_rate": 1.9910774881547803e-05, |
|
"loss": 0.771, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.7979539641943734, |
|
"grad_norm": 0.2377200573682785, |
|
"learning_rate": 1.9909723673788797e-05, |
|
"loss": 0.7614, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.8081841432225065, |
|
"grad_norm": 0.2661269009113312, |
|
"learning_rate": 1.9908666337903593e-05, |
|
"loss": 0.7688, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.8184143222506393, |
|
"grad_norm": 0.23161008954048157, |
|
"learning_rate": 1.9907602874546042e-05, |
|
"loss": 0.7792, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.8286445012787724, |
|
"grad_norm": 0.2372981458902359, |
|
"learning_rate": 1.990653328437378e-05, |
|
"loss": 0.7672, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.8388746803069054, |
|
"grad_norm": 0.24873419106006622, |
|
"learning_rate": 1.9905457568048243e-05, |
|
"loss": 0.769, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.8491048593350383, |
|
"grad_norm": 0.24799948930740356, |
|
"learning_rate": 1.9904375726234645e-05, |
|
"loss": 0.7794, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.8593350383631715, |
|
"grad_norm": 0.262285977602005, |
|
"learning_rate": 1.9903287759601994e-05, |
|
"loss": 0.7763, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.8695652173913042, |
|
"grad_norm": 0.2708827555179596, |
|
"learning_rate": 1.9902193668823085e-05, |
|
"loss": 0.7703, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.8797953964194374, |
|
"grad_norm": 0.25064393877983093, |
|
"learning_rate": 1.9901093454574494e-05, |
|
"loss": 0.7911, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.8900255754475703, |
|
"grad_norm": 0.25976258516311646, |
|
"learning_rate": 1.989998711753659e-05, |
|
"loss": 0.776, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.9002557544757033, |
|
"grad_norm": 0.26402992010116577, |
|
"learning_rate": 1.9898874658393525e-05, |
|
"loss": 0.792, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.9104859335038364, |
|
"grad_norm": 0.2455168217420578, |
|
"learning_rate": 1.9897756077833245e-05, |
|
"loss": 0.7744, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.9207161125319692, |
|
"grad_norm": 0.26314792037010193, |
|
"learning_rate": 1.9896631376547467e-05, |
|
"loss": 0.7683, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.9309462915601023, |
|
"grad_norm": 0.25260472297668457, |
|
"learning_rate": 1.9895500555231706e-05, |
|
"loss": 0.7905, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.9411764705882353, |
|
"grad_norm": 0.2529143989086151, |
|
"learning_rate": 1.9894363614585252e-05, |
|
"loss": 0.7773, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.9514066496163682, |
|
"grad_norm": 0.2417425513267517, |
|
"learning_rate": 1.989322055531119e-05, |
|
"loss": 0.7632, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.9616368286445014, |
|
"grad_norm": 0.2735307216644287, |
|
"learning_rate": 1.9892071378116378e-05, |
|
"loss": 0.7788, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.9718670076726341, |
|
"grad_norm": 0.2711434066295624, |
|
"learning_rate": 1.9890916083711463e-05, |
|
"loss": 0.7735, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.9820971867007673, |
|
"grad_norm": 0.24282266199588776, |
|
"learning_rate": 1.9889754672810872e-05, |
|
"loss": 0.759, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.9923273657289002, |
|
"grad_norm": 0.275905966758728, |
|
"learning_rate": 1.9888587146132817e-05, |
|
"loss": 0.8014, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.2884292006492615, |
|
"learning_rate": 1.9887413504399295e-05, |
|
"loss": 0.7734, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.010230179028133, |
|
"grad_norm": 0.28079572319984436, |
|
"learning_rate": 1.988623374833607e-05, |
|
"loss": 0.7619, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 2.020460358056266, |
|
"grad_norm": 0.25205302238464355, |
|
"learning_rate": 1.988504787867271e-05, |
|
"loss": 0.7549, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.030690537084399, |
|
"grad_norm": 0.2778927981853485, |
|
"learning_rate": 1.9883855896142547e-05, |
|
"loss": 0.7601, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 2.040920716112532, |
|
"grad_norm": 0.29224029183387756, |
|
"learning_rate": 1.988265780148269e-05, |
|
"loss": 0.7593, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.051150895140665, |
|
"grad_norm": 0.2718479037284851, |
|
"learning_rate": 1.9881453595434042e-05, |
|
"loss": 0.7704, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 2.061381074168798, |
|
"grad_norm": 0.2963065803050995, |
|
"learning_rate": 1.988024327874128e-05, |
|
"loss": 0.7756, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.071611253196931, |
|
"grad_norm": 0.29864153265953064, |
|
"learning_rate": 1.9879026852152857e-05, |
|
"loss": 0.7447, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 2.081841432225064, |
|
"grad_norm": 0.25865793228149414, |
|
"learning_rate": 1.9877804316421e-05, |
|
"loss": 0.7593, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.0920716112531967, |
|
"grad_norm": 0.33318352699279785, |
|
"learning_rate": 1.9876575672301726e-05, |
|
"loss": 0.755, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 2.10230179028133, |
|
"grad_norm": 0.2629791796207428, |
|
"learning_rate": 1.9875340920554816e-05, |
|
"loss": 0.7556, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.112531969309463, |
|
"grad_norm": 0.26791051030158997, |
|
"learning_rate": 1.9874100061943845e-05, |
|
"loss": 0.7565, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.122762148337596, |
|
"grad_norm": 0.32693561911582947, |
|
"learning_rate": 1.9872853097236142e-05, |
|
"loss": 0.7498, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.132992327365729, |
|
"grad_norm": 0.28684085607528687, |
|
"learning_rate": 1.987160002720283e-05, |
|
"loss": 0.7757, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.1432225063938617, |
|
"grad_norm": 0.3658446669578552, |
|
"learning_rate": 1.9870340852618802e-05, |
|
"loss": 0.7541, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.153452685421995, |
|
"grad_norm": 0.2634979784488678, |
|
"learning_rate": 1.9869075574262728e-05, |
|
"loss": 0.7626, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.163682864450128, |
|
"grad_norm": 0.2918095290660858, |
|
"learning_rate": 1.9867804192917043e-05, |
|
"loss": 0.7437, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.1739130434782608, |
|
"grad_norm": 0.3078654408454895, |
|
"learning_rate": 1.9866526709367967e-05, |
|
"loss": 0.7762, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.184143222506394, |
|
"grad_norm": 0.4585474133491516, |
|
"learning_rate": 1.9865243124405493e-05, |
|
"loss": 0.7744, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.1943734015345266, |
|
"grad_norm": 0.30667614936828613, |
|
"learning_rate": 1.986395343882338e-05, |
|
"loss": 0.7698, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.20460358056266, |
|
"grad_norm": 0.32421836256980896, |
|
"learning_rate": 1.986265765341916e-05, |
|
"loss": 0.7602, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.214833759590793, |
|
"grad_norm": 0.2798852324485779, |
|
"learning_rate": 1.986135576899415e-05, |
|
"loss": 0.7417, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.2250639386189257, |
|
"grad_norm": 0.28992176055908203, |
|
"learning_rate": 1.9860047786353426e-05, |
|
"loss": 0.7729, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.235294117647059, |
|
"grad_norm": 0.3212791085243225, |
|
"learning_rate": 1.985873370630583e-05, |
|
"loss": 0.759, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.2455242966751916, |
|
"grad_norm": 0.27983346581459045, |
|
"learning_rate": 1.9857413529663993e-05, |
|
"loss": 0.7648, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.2557544757033248, |
|
"grad_norm": 0.3009434640407562, |
|
"learning_rate": 1.98560872572443e-05, |
|
"loss": 0.7274, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.265984654731458, |
|
"grad_norm": 0.28235509991645813, |
|
"learning_rate": 1.9854754889866915e-05, |
|
"loss": 0.7667, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.2762148337595907, |
|
"grad_norm": 0.3221166729927063, |
|
"learning_rate": 1.9853416428355765e-05, |
|
"loss": 0.7402, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.286445012787724, |
|
"grad_norm": 0.27435916662216187, |
|
"learning_rate": 1.9852071873538553e-05, |
|
"loss": 0.749, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.296675191815857, |
|
"grad_norm": 0.27836698293685913, |
|
"learning_rate": 1.985072122624674e-05, |
|
"loss": 0.7444, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.3069053708439897, |
|
"grad_norm": 0.287270724773407, |
|
"learning_rate": 1.984936448731556e-05, |
|
"loss": 0.762, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.317135549872123, |
|
"grad_norm": 0.3191712200641632, |
|
"learning_rate": 1.9848001657584016e-05, |
|
"loss": 0.748, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.3273657289002556, |
|
"grad_norm": 0.2816384732723236, |
|
"learning_rate": 1.9846632737894878e-05, |
|
"loss": 0.7445, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.337595907928389, |
|
"grad_norm": 0.294630229473114, |
|
"learning_rate": 1.9845257729094672e-05, |
|
"loss": 0.7477, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.3478260869565215, |
|
"grad_norm": 0.2944187819957733, |
|
"learning_rate": 1.9843876632033707e-05, |
|
"loss": 0.7443, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.3580562659846547, |
|
"grad_norm": 0.26522430777549744, |
|
"learning_rate": 1.984248944756604e-05, |
|
"loss": 0.7632, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.368286445012788, |
|
"grad_norm": 0.33316174149513245, |
|
"learning_rate": 1.98410961765495e-05, |
|
"loss": 0.7519, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.3785166240409206, |
|
"grad_norm": 0.30234989523887634, |
|
"learning_rate": 1.983969681984568e-05, |
|
"loss": 0.7559, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.3887468030690537, |
|
"grad_norm": 0.31727346777915955, |
|
"learning_rate": 1.9838291378319935e-05, |
|
"loss": 0.7484, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.398976982097187, |
|
"grad_norm": 0.2959267497062683, |
|
"learning_rate": 1.9836879852841387e-05, |
|
"loss": 0.7378, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.4092071611253196, |
|
"grad_norm": 0.31137242913246155, |
|
"learning_rate": 1.9835462244282912e-05, |
|
"loss": 0.7481, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.419437340153453, |
|
"grad_norm": 0.3378095030784607, |
|
"learning_rate": 1.9834038553521155e-05, |
|
"loss": 0.7506, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.4296675191815855, |
|
"grad_norm": 0.2913769781589508, |
|
"learning_rate": 1.9832608781436516e-05, |
|
"loss": 0.7432, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.4398976982097187, |
|
"grad_norm": 0.4157828092575073, |
|
"learning_rate": 1.9831172928913163e-05, |
|
"loss": 0.7496, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.4501278772378514, |
|
"grad_norm": 0.3153923451900482, |
|
"learning_rate": 1.982973099683902e-05, |
|
"loss": 0.7611, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.4603580562659846, |
|
"grad_norm": 0.3100709915161133, |
|
"learning_rate": 1.9828282986105768e-05, |
|
"loss": 0.7417, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.4705882352941178, |
|
"grad_norm": 0.355727881193161, |
|
"learning_rate": 1.9826828897608855e-05, |
|
"loss": 0.7421, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.4808184143222505, |
|
"grad_norm": 0.31488820910453796, |
|
"learning_rate": 1.982536873224748e-05, |
|
"loss": 0.7603, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.4910485933503836, |
|
"grad_norm": 0.27576926350593567, |
|
"learning_rate": 1.9823902490924603e-05, |
|
"loss": 0.7527, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.501278772378517, |
|
"grad_norm": 0.3244059085845947, |
|
"learning_rate": 1.9822430174546938e-05, |
|
"loss": 0.7376, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.5115089514066495, |
|
"grad_norm": 0.3047364354133606, |
|
"learning_rate": 1.982095178402496e-05, |
|
"loss": 0.7534, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.5217391304347827, |
|
"grad_norm": 0.29374366998672485, |
|
"learning_rate": 1.9819467320272897e-05, |
|
"loss": 0.7414, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.531969309462916, |
|
"grad_norm": 0.3449955880641937, |
|
"learning_rate": 1.981797678420874e-05, |
|
"loss": 0.7369, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.5421994884910486, |
|
"grad_norm": 0.3467267155647278, |
|
"learning_rate": 1.9816480176754225e-05, |
|
"loss": 0.7434, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.5524296675191813, |
|
"grad_norm": 0.3496444821357727, |
|
"learning_rate": 1.981497749883485e-05, |
|
"loss": 0.7639, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.5626598465473145, |
|
"grad_norm": 0.29089784622192383, |
|
"learning_rate": 1.9813468751379856e-05, |
|
"loss": 0.7516, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.5728900255754477, |
|
"grad_norm": 0.3429134786128998, |
|
"learning_rate": 1.981195393532226e-05, |
|
"loss": 0.7476, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.5831202046035804, |
|
"grad_norm": 0.2926439642906189, |
|
"learning_rate": 1.9810433051598797e-05, |
|
"loss": 0.7456, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.5933503836317136, |
|
"grad_norm": 0.3356344401836395, |
|
"learning_rate": 1.980890610114999e-05, |
|
"loss": 0.7559, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.6035805626598467, |
|
"grad_norm": 0.34643828868865967, |
|
"learning_rate": 1.9807373084920093e-05, |
|
"loss": 0.7477, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.6138107416879794, |
|
"grad_norm": 0.2760990858078003, |
|
"learning_rate": 1.980583400385712e-05, |
|
"loss": 0.778, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.6240409207161126, |
|
"grad_norm": 0.38434210419654846, |
|
"learning_rate": 1.9804288858912825e-05, |
|
"loss": 0.7404, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.634271099744246, |
|
"grad_norm": 0.3008972406387329, |
|
"learning_rate": 1.9802737651042717e-05, |
|
"loss": 0.726, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.6445012787723785, |
|
"grad_norm": 0.30021610856056213, |
|
"learning_rate": 1.9801180381206065e-05, |
|
"loss": 0.7478, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.6547314578005117, |
|
"grad_norm": 0.35837844014167786, |
|
"learning_rate": 1.979961705036587e-05, |
|
"loss": 0.7503, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.6649616368286444, |
|
"grad_norm": 0.3149022161960602, |
|
"learning_rate": 1.979804765948889e-05, |
|
"loss": 0.7574, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.6751918158567776, |
|
"grad_norm": 0.35966795682907104, |
|
"learning_rate": 1.979647220954563e-05, |
|
"loss": 0.761, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.6854219948849103, |
|
"grad_norm": 0.3725556433200836, |
|
"learning_rate": 1.9794890701510337e-05, |
|
"loss": 0.7544, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.6956521739130435, |
|
"grad_norm": 0.30502018332481384, |
|
"learning_rate": 1.9793303136361016e-05, |
|
"loss": 0.74, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.7058823529411766, |
|
"grad_norm": 0.36470985412597656, |
|
"learning_rate": 1.97917095150794e-05, |
|
"loss": 0.7429, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.7161125319693094, |
|
"grad_norm": 0.3593257665634155, |
|
"learning_rate": 1.9790109838650987e-05, |
|
"loss": 0.7362, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.7263427109974425, |
|
"grad_norm": 0.3576882481575012, |
|
"learning_rate": 1.9788504108065002e-05, |
|
"loss": 0.7415, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.7365728900255757, |
|
"grad_norm": 0.48933324217796326, |
|
"learning_rate": 1.978689232431442e-05, |
|
"loss": 0.7347, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.7468030690537084, |
|
"grad_norm": 0.38572192192077637, |
|
"learning_rate": 1.9785274488395966e-05, |
|
"loss": 0.7409, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.7570332480818416, |
|
"grad_norm": 0.4073234498500824, |
|
"learning_rate": 1.9783650601310097e-05, |
|
"loss": 0.7536, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.7672634271099743, |
|
"grad_norm": 0.31672948598861694, |
|
"learning_rate": 1.9782020664061026e-05, |
|
"loss": 0.7461, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.7774936061381075, |
|
"grad_norm": 0.3638211786746979, |
|
"learning_rate": 1.978038467765669e-05, |
|
"loss": 0.7545, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.78772378516624, |
|
"grad_norm": 0.3304184377193451, |
|
"learning_rate": 1.977874264310877e-05, |
|
"loss": 0.75, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.7979539641943734, |
|
"grad_norm": 0.3346056044101715, |
|
"learning_rate": 1.9777094561432707e-05, |
|
"loss": 0.77, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.8081841432225065, |
|
"grad_norm": 0.33290547132492065, |
|
"learning_rate": 1.9775440433647662e-05, |
|
"loss": 0.7494, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.8184143222506393, |
|
"grad_norm": 0.3400842547416687, |
|
"learning_rate": 1.9773780260776534e-05, |
|
"loss": 0.7658, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.8286445012787724, |
|
"grad_norm": 0.36258232593536377, |
|
"learning_rate": 1.9772114043845968e-05, |
|
"loss": 0.7437, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.8388746803069056, |
|
"grad_norm": 0.34615638852119446, |
|
"learning_rate": 1.9770441783886348e-05, |
|
"loss": 0.7733, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.8491048593350383, |
|
"grad_norm": 0.3608517348766327, |
|
"learning_rate": 1.976876348193179e-05, |
|
"loss": 0.7451, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.8593350383631715, |
|
"grad_norm": 0.345670610666275, |
|
"learning_rate": 1.9767079139020148e-05, |
|
"loss": 0.737, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.869565217391304, |
|
"grad_norm": 0.3253295421600342, |
|
"learning_rate": 1.976538875619301e-05, |
|
"loss": 0.7394, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.8797953964194374, |
|
"grad_norm": 0.40524566173553467, |
|
"learning_rate": 1.9763692334495706e-05, |
|
"loss": 0.7517, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.89002557544757, |
|
"grad_norm": 0.4947255849838257, |
|
"learning_rate": 1.9761989874977287e-05, |
|
"loss": 0.744, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.9002557544757033, |
|
"grad_norm": 0.5664758682250977, |
|
"learning_rate": 1.9760281378690554e-05, |
|
"loss": 0.7219, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.9104859335038364, |
|
"grad_norm": 0.4397972822189331, |
|
"learning_rate": 1.9758566846692028e-05, |
|
"loss": 0.7301, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.920716112531969, |
|
"grad_norm": 0.2917048931121826, |
|
"learning_rate": 1.975684628004197e-05, |
|
"loss": 0.7582, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.9309462915601023, |
|
"grad_norm": 0.44785866141319275, |
|
"learning_rate": 1.975511967980437e-05, |
|
"loss": 0.7594, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.9411764705882355, |
|
"grad_norm": 0.5528475046157837, |
|
"learning_rate": 1.9753387047046945e-05, |
|
"loss": 0.7289, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.9514066496163682, |
|
"grad_norm": 0.5927398800849915, |
|
"learning_rate": 1.9751648382841152e-05, |
|
"loss": 0.7496, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.9616368286445014, |
|
"grad_norm": 0.43449971079826355, |
|
"learning_rate": 1.974990368826217e-05, |
|
"loss": 0.7334, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.971867007672634, |
|
"grad_norm": 0.410898357629776, |
|
"learning_rate": 1.9748152964388912e-05, |
|
"loss": 0.7367, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.9820971867007673, |
|
"grad_norm": 0.6079134941101074, |
|
"learning_rate": 1.9746396212304018e-05, |
|
"loss": 0.7298, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.9923273657289, |
|
"grad_norm": 0.37779679894447327, |
|
"learning_rate": 1.9744633433093852e-05, |
|
"loss": 0.7449, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.38931500911712646, |
|
"learning_rate": 1.974286462784851e-05, |
|
"loss": 0.7486, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.010230179028133, |
|
"grad_norm": 0.45001041889190674, |
|
"learning_rate": 1.9741089797661816e-05, |
|
"loss": 0.7467, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 3.020460358056266, |
|
"grad_norm": 0.4439624845981598, |
|
"learning_rate": 1.9739308943631306e-05, |
|
"loss": 0.7217, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.030690537084399, |
|
"grad_norm": 0.5038313865661621, |
|
"learning_rate": 1.9737522066858264e-05, |
|
"loss": 0.7262, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 3.040920716112532, |
|
"grad_norm": 0.336954802274704, |
|
"learning_rate": 1.973572916844768e-05, |
|
"loss": 0.7372, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.051150895140665, |
|
"grad_norm": 0.5147439241409302, |
|
"learning_rate": 1.9733930249508276e-05, |
|
"loss": 0.7407, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 3.061381074168798, |
|
"grad_norm": 0.6436253190040588, |
|
"learning_rate": 1.973212531115249e-05, |
|
"loss": 0.7124, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.071611253196931, |
|
"grad_norm": 0.322561651468277, |
|
"learning_rate": 1.9730314354496497e-05, |
|
"loss": 0.7462, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 3.081841432225064, |
|
"grad_norm": 1.0073673725128174, |
|
"learning_rate": 1.9728497380660175e-05, |
|
"loss": 0.7351, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.0920716112531967, |
|
"grad_norm": 0.8805359601974487, |
|
"learning_rate": 1.9726674390767135e-05, |
|
"loss": 0.7195, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 3.10230179028133, |
|
"grad_norm": 0.7060922980308533, |
|
"learning_rate": 1.9724845385944705e-05, |
|
"loss": 0.7399, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.112531969309463, |
|
"grad_norm": 1.5701392889022827, |
|
"learning_rate": 1.9723010367323937e-05, |
|
"loss": 0.7259, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 3.122762148337596, |
|
"grad_norm": 0.5668526291847229, |
|
"learning_rate": 1.972116933603959e-05, |
|
"loss": 0.7188, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.132992327365729, |
|
"grad_norm": 0.9209063053131104, |
|
"learning_rate": 1.9719322293230158e-05, |
|
"loss": 0.7248, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 3.1432225063938617, |
|
"grad_norm": 0.6814517378807068, |
|
"learning_rate": 1.9717469240037835e-05, |
|
"loss": 0.7404, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.153452685421995, |
|
"grad_norm": 1.6327886581420898, |
|
"learning_rate": 1.9715610177608547e-05, |
|
"loss": 0.7392, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 3.163682864450128, |
|
"grad_norm": 0.8322291970252991, |
|
"learning_rate": 1.9713745107091924e-05, |
|
"loss": 0.7244, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 3.1739130434782608, |
|
"grad_norm": 0.9853781461715698, |
|
"learning_rate": 1.971187402964132e-05, |
|
"loss": 0.7342, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 3.184143222506394, |
|
"grad_norm": 0.869482159614563, |
|
"learning_rate": 1.97099969464138e-05, |
|
"loss": 0.7376, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 3.1943734015345266, |
|
"grad_norm": 0.5311108827590942, |
|
"learning_rate": 1.9708113858570143e-05, |
|
"loss": 0.7339, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 3.20460358056266, |
|
"grad_norm": 1.897441029548645, |
|
"learning_rate": 1.9706224767274844e-05, |
|
"loss": 0.7537, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 3.214833759590793, |
|
"grad_norm": 1.1312215328216553, |
|
"learning_rate": 1.9704329673696104e-05, |
|
"loss": 0.7368, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 3.2250639386189257, |
|
"grad_norm": 0.859919548034668, |
|
"learning_rate": 1.9702428579005844e-05, |
|
"loss": 0.7492, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 3.235294117647059, |
|
"grad_norm": 0.8550412654876709, |
|
"learning_rate": 1.9700521484379687e-05, |
|
"loss": 0.7204, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 3.2455242966751916, |
|
"grad_norm": 0.6723067164421082, |
|
"learning_rate": 1.9698608390996975e-05, |
|
"loss": 0.7349, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 3.2557544757033248, |
|
"grad_norm": 0.7956928610801697, |
|
"learning_rate": 1.9696689300040758e-05, |
|
"loss": 0.738, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 3.265984654731458, |
|
"grad_norm": 1.1205618381500244, |
|
"learning_rate": 1.9694764212697784e-05, |
|
"loss": 0.7219, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 3.2762148337595907, |
|
"grad_norm": 0.4344213008880615, |
|
"learning_rate": 1.9692833130158528e-05, |
|
"loss": 0.7473, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 3.286445012787724, |
|
"grad_norm": 0.8046640157699585, |
|
"learning_rate": 1.9690896053617156e-05, |
|
"loss": 0.736, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 3.296675191815857, |
|
"grad_norm": 0.5893542170524597, |
|
"learning_rate": 1.968895298427155e-05, |
|
"loss": 0.7178, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 3.3069053708439897, |
|
"grad_norm": 0.543119490146637, |
|
"learning_rate": 1.9687003923323292e-05, |
|
"loss": 0.7386, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 3.317135549872123, |
|
"grad_norm": 0.9876613616943359, |
|
"learning_rate": 1.9685048871977678e-05, |
|
"loss": 0.732, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 3.3273657289002556, |
|
"grad_norm": 0.42082035541534424, |
|
"learning_rate": 1.968308783144369e-05, |
|
"loss": 0.7272, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 3.337595907928389, |
|
"grad_norm": 0.7916600108146667, |
|
"learning_rate": 1.9681120802934042e-05, |
|
"loss": 0.7303, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 3.3478260869565215, |
|
"grad_norm": 0.6041877865791321, |
|
"learning_rate": 1.9679147787665128e-05, |
|
"loss": 0.7395, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 3.3580562659846547, |
|
"grad_norm": 0.8722060918807983, |
|
"learning_rate": 1.9677168786857044e-05, |
|
"loss": 0.7226, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 3.368286445012788, |
|
"grad_norm": 0.577777624130249, |
|
"learning_rate": 1.967518380173361e-05, |
|
"loss": 0.7301, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.3785166240409206, |
|
"grad_norm": 0.5443961024284363, |
|
"learning_rate": 1.967319283352232e-05, |
|
"loss": 0.7355, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 3.3887468030690537, |
|
"grad_norm": 0.695266842842102, |
|
"learning_rate": 1.967119588345438e-05, |
|
"loss": 0.7361, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 3.398976982097187, |
|
"grad_norm": 0.41574743390083313, |
|
"learning_rate": 1.9669192952764698e-05, |
|
"loss": 0.7307, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 3.4092071611253196, |
|
"grad_norm": 0.9063800573348999, |
|
"learning_rate": 1.9667184042691877e-05, |
|
"loss": 0.7374, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 3.419437340153453, |
|
"grad_norm": 0.45360273122787476, |
|
"learning_rate": 1.9665169154478214e-05, |
|
"loss": 0.7449, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 3.4296675191815855, |
|
"grad_norm": 0.6068630218505859, |
|
"learning_rate": 1.966314828936971e-05, |
|
"loss": 0.7381, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 3.4398976982097187, |
|
"grad_norm": 0.5979428291320801, |
|
"learning_rate": 1.9661121448616057e-05, |
|
"loss": 0.736, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 3.4501278772378514, |
|
"grad_norm": 0.5259372591972351, |
|
"learning_rate": 1.9659088633470646e-05, |
|
"loss": 0.7411, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 3.4603580562659846, |
|
"grad_norm": 0.7314724326133728, |
|
"learning_rate": 1.965704984519055e-05, |
|
"loss": 0.7396, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 3.4705882352941178, |
|
"grad_norm": 0.40047791600227356, |
|
"learning_rate": 1.9655005085036563e-05, |
|
"loss": 0.696, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.4808184143222505, |
|
"grad_norm": 0.6751558780670166, |
|
"learning_rate": 1.965295435427314e-05, |
|
"loss": 0.7283, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 3.4910485933503836, |
|
"grad_norm": 0.4402068853378296, |
|
"learning_rate": 1.965089765416845e-05, |
|
"loss": 0.7171, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 3.501278772378517, |
|
"grad_norm": 0.8496968746185303, |
|
"learning_rate": 1.964883498599435e-05, |
|
"loss": 0.7121, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 3.5115089514066495, |
|
"grad_norm": 0.42892828583717346, |
|
"learning_rate": 1.9646766351026378e-05, |
|
"loss": 0.7461, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.5217391304347827, |
|
"grad_norm": 0.6416417956352234, |
|
"learning_rate": 1.964469175054377e-05, |
|
"loss": 0.7429, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.531969309462916, |
|
"grad_norm": 0.361579030752182, |
|
"learning_rate": 1.9642611185829444e-05, |
|
"loss": 0.7302, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.5421994884910486, |
|
"grad_norm": 0.5479419231414795, |
|
"learning_rate": 1.9640524658170023e-05, |
|
"loss": 0.7304, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 3.5524296675191813, |
|
"grad_norm": 0.4338844418525696, |
|
"learning_rate": 1.9638432168855797e-05, |
|
"loss": 0.6999, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.5626598465473145, |
|
"grad_norm": 0.5022692680358887, |
|
"learning_rate": 1.9636333719180755e-05, |
|
"loss": 0.7068, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 3.5728900255754477, |
|
"grad_norm": 0.4946191608905792, |
|
"learning_rate": 1.9634229310442564e-05, |
|
"loss": 0.7336, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.5831202046035804, |
|
"grad_norm": 0.5408332943916321, |
|
"learning_rate": 1.9632118943942587e-05, |
|
"loss": 0.741, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 3.5933503836317136, |
|
"grad_norm": 0.7036914825439453, |
|
"learning_rate": 1.9630002620985856e-05, |
|
"loss": 0.7257, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.6035805626598467, |
|
"grad_norm": 0.5046015977859497, |
|
"learning_rate": 1.9627880342881106e-05, |
|
"loss": 0.729, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 3.6138107416879794, |
|
"grad_norm": 0.632416307926178, |
|
"learning_rate": 1.9625752110940736e-05, |
|
"loss": 0.7089, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.6240409207161126, |
|
"grad_norm": 0.3495820164680481, |
|
"learning_rate": 1.962361792648084e-05, |
|
"loss": 0.7391, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.634271099744246, |
|
"grad_norm": 0.9268649816513062, |
|
"learning_rate": 1.962147779082118e-05, |
|
"loss": 0.7361, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.6445012787723785, |
|
"grad_norm": 0.5460264086723328, |
|
"learning_rate": 1.961933170528521e-05, |
|
"loss": 0.7213, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 3.6547314578005117, |
|
"grad_norm": 0.6042870283126831, |
|
"learning_rate": 1.961717967120006e-05, |
|
"loss": 0.7295, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.6649616368286444, |
|
"grad_norm": 0.4831192195415497, |
|
"learning_rate": 1.9615021689896533e-05, |
|
"loss": 0.7404, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 3.6751918158567776, |
|
"grad_norm": 0.8810766935348511, |
|
"learning_rate": 1.9612857762709122e-05, |
|
"loss": 0.7192, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.6854219948849103, |
|
"grad_norm": 0.4142047166824341, |
|
"learning_rate": 1.961068789097599e-05, |
|
"loss": 0.7352, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 3.6956521739130435, |
|
"grad_norm": 0.6235917806625366, |
|
"learning_rate": 1.9608512076038964e-05, |
|
"loss": 0.7345, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.7058823529411766, |
|
"grad_norm": 0.4514963626861572, |
|
"learning_rate": 1.9606330319243568e-05, |
|
"loss": 0.721, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 3.7161125319693094, |
|
"grad_norm": 0.7020410299301147, |
|
"learning_rate": 1.9604142621938986e-05, |
|
"loss": 0.7126, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.7263427109974425, |
|
"grad_norm": 0.3949349820613861, |
|
"learning_rate": 1.9601948985478078e-05, |
|
"loss": 0.7412, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.7365728900255757, |
|
"grad_norm": 0.6317225098609924, |
|
"learning_rate": 1.9599749411217385e-05, |
|
"loss": 0.7418, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.7468030690537084, |
|
"grad_norm": 0.41394850611686707, |
|
"learning_rate": 1.9597543900517106e-05, |
|
"loss": 0.7051, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 3.7570332480818416, |
|
"grad_norm": 0.6289117336273193, |
|
"learning_rate": 1.959533245474112e-05, |
|
"loss": 0.7397, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.7672634271099743, |
|
"grad_norm": 0.5273838639259338, |
|
"learning_rate": 1.9593115075256978e-05, |
|
"loss": 0.7129, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 3.7774936061381075, |
|
"grad_norm": 0.4530670940876007, |
|
"learning_rate": 1.9590891763435892e-05, |
|
"loss": 0.7211, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.78772378516624, |
|
"grad_norm": 0.5219662189483643, |
|
"learning_rate": 1.9588662520652756e-05, |
|
"loss": 0.732, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 3.7979539641943734, |
|
"grad_norm": 0.4979133605957031, |
|
"learning_rate": 1.958642734828611e-05, |
|
"loss": 0.7375, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.8081841432225065, |
|
"grad_norm": 0.5304292440414429, |
|
"learning_rate": 1.958418624771819e-05, |
|
"loss": 0.7349, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 3.8184143222506393, |
|
"grad_norm": 0.4747787117958069, |
|
"learning_rate": 1.958193922033487e-05, |
|
"loss": 0.7338, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.8286445012787724, |
|
"grad_norm": 0.451325923204422, |
|
"learning_rate": 1.9579686267525704e-05, |
|
"loss": 0.7084, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.8388746803069056, |
|
"grad_norm": 0.43068253993988037, |
|
"learning_rate": 1.957742739068391e-05, |
|
"loss": 0.7348, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.8491048593350383, |
|
"grad_norm": 0.4494073987007141, |
|
"learning_rate": 1.9575162591206362e-05, |
|
"loss": 0.7466, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 3.8593350383631715, |
|
"grad_norm": 0.46458712220191956, |
|
"learning_rate": 1.9572891870493604e-05, |
|
"loss": 0.7257, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.869565217391304, |
|
"grad_norm": 0.4800684452056885, |
|
"learning_rate": 1.9570615229949844e-05, |
|
"loss": 0.7171, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 3.8797953964194374, |
|
"grad_norm": 0.4564310312271118, |
|
"learning_rate": 1.9568332670982934e-05, |
|
"loss": 0.7225, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.89002557544757, |
|
"grad_norm": 0.39550548791885376, |
|
"learning_rate": 1.956604419500441e-05, |
|
"loss": 0.7119, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 3.9002557544757033, |
|
"grad_norm": 0.4126419425010681, |
|
"learning_rate": 1.9563749803429447e-05, |
|
"loss": 0.7359, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.9104859335038364, |
|
"grad_norm": 0.4096467196941376, |
|
"learning_rate": 1.9561449497676888e-05, |
|
"loss": 0.7322, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 3.920716112531969, |
|
"grad_norm": 0.407622754573822, |
|
"learning_rate": 1.955914327916923e-05, |
|
"loss": 0.7202, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.9309462915601023, |
|
"grad_norm": 0.363885760307312, |
|
"learning_rate": 1.955683114933263e-05, |
|
"loss": 0.7171, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.9411764705882355, |
|
"grad_norm": 0.3362027704715729, |
|
"learning_rate": 1.9554513109596898e-05, |
|
"loss": 0.717, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.9514066496163682, |
|
"grad_norm": 0.3913104832172394, |
|
"learning_rate": 1.9552189161395496e-05, |
|
"loss": 0.7141, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 3.9616368286445014, |
|
"grad_norm": 0.3984425961971283, |
|
"learning_rate": 1.9549859306165543e-05, |
|
"loss": 0.7404, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.971867007672634, |
|
"grad_norm": 0.4172505736351013, |
|
"learning_rate": 1.9547523545347816e-05, |
|
"loss": 0.7402, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 3.9820971867007673, |
|
"grad_norm": 0.3991503119468689, |
|
"learning_rate": 1.954518188038673e-05, |
|
"loss": 0.7306, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.9923273657289, |
|
"grad_norm": 0.5286377668380737, |
|
"learning_rate": 1.9542834312730366e-05, |
|
"loss": 0.728, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.49067217111587524, |
|
"learning_rate": 1.9540480843830443e-05, |
|
"loss": 0.7302, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 4.010230179028133, |
|
"grad_norm": 0.7454357147216797, |
|
"learning_rate": 1.9538121475142345e-05, |
|
"loss": 0.7218, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 4.020460358056266, |
|
"grad_norm": 0.44008755683898926, |
|
"learning_rate": 1.9535756208125082e-05, |
|
"loss": 0.716, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 4.030690537084399, |
|
"grad_norm": 0.6294509172439575, |
|
"learning_rate": 1.9533385044241335e-05, |
|
"loss": 0.7321, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 4.040920716112532, |
|
"grad_norm": 0.46539950370788574, |
|
"learning_rate": 1.9531007984957408e-05, |
|
"loss": 0.742, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 4.051150895140665, |
|
"grad_norm": 0.6322250366210938, |
|
"learning_rate": 1.9528625031743278e-05, |
|
"loss": 0.7217, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 4.061381074168798, |
|
"grad_norm": 0.5787230134010315, |
|
"learning_rate": 1.952623618607254e-05, |
|
"loss": 0.7298, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 4.071611253196931, |
|
"grad_norm": 0.5471862554550171, |
|
"learning_rate": 1.952384144942245e-05, |
|
"loss": 0.7135, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 4.081841432225064, |
|
"grad_norm": 0.543489396572113, |
|
"learning_rate": 1.9521440823273902e-05, |
|
"loss": 0.7164, |
|
"step": 400 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 4000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 42, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.570007282502746e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|