| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.7138122673736808, | |
| "eval_steps": 500, | |
| "global_step": 14000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0005098659052669148, | |
| "grad_norm": 251.65310853808273, | |
| "learning_rate": 7.640067911714771e-08, | |
| "loss": 7.2611, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0010197318105338296, | |
| "grad_norm": 223.4280231492118, | |
| "learning_rate": 1.6129032258064518e-07, | |
| "loss": 7.5935, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0015295977158007444, | |
| "grad_norm": 193.4556196678711, | |
| "learning_rate": 2.461799660441426e-07, | |
| "loss": 7.1911, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.002039463621067659, | |
| "grad_norm": 154.93275414899998, | |
| "learning_rate": 3.310696095076401e-07, | |
| "loss": 6.6369, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.002549329526334574, | |
| "grad_norm": 179.53329242395242, | |
| "learning_rate": 4.1595925297113753e-07, | |
| "loss": 5.5123, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0030591954316014887, | |
| "grad_norm": 141.11077623847112, | |
| "learning_rate": 5.00848896434635e-07, | |
| "loss": 4.381, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0035690613368684035, | |
| "grad_norm": 46.48467601369476, | |
| "learning_rate": 5.857385398981324e-07, | |
| "loss": 3.1309, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.004078927242135318, | |
| "grad_norm": 53.96813671274727, | |
| "learning_rate": 6.706281833616299e-07, | |
| "loss": 2.5806, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0045887931474022335, | |
| "grad_norm": 34.935060044045606, | |
| "learning_rate": 7.555178268251275e-07, | |
| "loss": 1.699, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.005098659052669148, | |
| "grad_norm": 67.7244290501232, | |
| "learning_rate": 8.404074702886249e-07, | |
| "loss": 1.2787, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.005608524957936063, | |
| "grad_norm": 48.186462961125564, | |
| "learning_rate": 9.252971137521223e-07, | |
| "loss": 1.104, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.0061183908632029774, | |
| "grad_norm": 71.68206146928328, | |
| "learning_rate": 1.0101867572156197e-06, | |
| "loss": 0.9308, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.006628256768469893, | |
| "grad_norm": 84.19717289430702, | |
| "learning_rate": 1.0950764006791174e-06, | |
| "loss": 0.9437, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.007138122673736807, | |
| "grad_norm": 22.01714846636104, | |
| "learning_rate": 1.1799660441426147e-06, | |
| "loss": 0.9429, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.007647988579003722, | |
| "grad_norm": 35.42757584454689, | |
| "learning_rate": 1.2648556876061122e-06, | |
| "loss": 0.865, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.008157854484270637, | |
| "grad_norm": 55.96764262360704, | |
| "learning_rate": 1.3497453310696096e-06, | |
| "loss": 0.9103, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.008667720389537552, | |
| "grad_norm": 29.05124237209439, | |
| "learning_rate": 1.434634974533107e-06, | |
| "loss": 0.8996, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.009177586294804467, | |
| "grad_norm": 26.211170552849907, | |
| "learning_rate": 1.5195246179966044e-06, | |
| "loss": 0.9332, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.00968745220007138, | |
| "grad_norm": 18.947652048653687, | |
| "learning_rate": 1.604414261460102e-06, | |
| "loss": 0.9318, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.010197318105338296, | |
| "grad_norm": 30.129455976188698, | |
| "learning_rate": 1.6893039049235995e-06, | |
| "loss": 0.8478, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.010707184010605211, | |
| "grad_norm": 82.99133324029893, | |
| "learning_rate": 1.774193548387097e-06, | |
| "loss": 0.832, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.011217049915872126, | |
| "grad_norm": 26.23660369155024, | |
| "learning_rate": 1.8590831918505945e-06, | |
| "loss": 0.8242, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.01172691582113904, | |
| "grad_norm": 23.197604257350026, | |
| "learning_rate": 1.943972835314092e-06, | |
| "loss": 0.8753, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.012236781726405955, | |
| "grad_norm": 19.501104263510726, | |
| "learning_rate": 2.028862478777589e-06, | |
| "loss": 0.8768, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.01274664763167287, | |
| "grad_norm": 24.897662656911052, | |
| "learning_rate": 2.113752122241087e-06, | |
| "loss": 0.869, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.013256513536939785, | |
| "grad_norm": 21.333577680411484, | |
| "learning_rate": 2.1986417657045842e-06, | |
| "loss": 0.9116, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.013766379442206699, | |
| "grad_norm": 62.90240224505213, | |
| "learning_rate": 2.2835314091680816e-06, | |
| "loss": 0.841, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.014276245347473614, | |
| "grad_norm": 20.59553192116265, | |
| "learning_rate": 2.368421052631579e-06, | |
| "loss": 0.904, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.01478611125274053, | |
| "grad_norm": 20.161810893917714, | |
| "learning_rate": 2.4533106960950766e-06, | |
| "loss": 0.8194, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.015295977158007444, | |
| "grad_norm": 16.206444109540872, | |
| "learning_rate": 2.538200339558574e-06, | |
| "loss": 0.85, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.015805843063274358, | |
| "grad_norm": 24.712582437250955, | |
| "learning_rate": 2.6230899830220713e-06, | |
| "loss": 0.858, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.016315708968541273, | |
| "grad_norm": 11.762759748871197, | |
| "learning_rate": 2.707979626485569e-06, | |
| "loss": 0.916, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.01682557487380819, | |
| "grad_norm": 12.553238322483976, | |
| "learning_rate": 2.7928692699490667e-06, | |
| "loss": 0.8499, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.017335440779075104, | |
| "grad_norm": 16.760823082900814, | |
| "learning_rate": 2.877758913412564e-06, | |
| "loss": 0.9279, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.01784530668434202, | |
| "grad_norm": 28.504528294646803, | |
| "learning_rate": 2.9626485568760614e-06, | |
| "loss": 0.9199, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.018355172589608934, | |
| "grad_norm": 14.4930273261429, | |
| "learning_rate": 3.0475382003395587e-06, | |
| "loss": 0.8613, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.01886503849487585, | |
| "grad_norm": 29.69282833969532, | |
| "learning_rate": 3.1324278438030564e-06, | |
| "loss": 0.9243, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.01937490440014276, | |
| "grad_norm": 11.766751225603937, | |
| "learning_rate": 3.2173174872665538e-06, | |
| "loss": 0.8861, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.019884770305409676, | |
| "grad_norm": 14.676218010280364, | |
| "learning_rate": 3.302207130730051e-06, | |
| "loss": 0.7766, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.02039463621067659, | |
| "grad_norm": 24.371393167451448, | |
| "learning_rate": 3.3870967741935484e-06, | |
| "loss": 0.8223, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.020904502115943507, | |
| "grad_norm": 43.948339877931836, | |
| "learning_rate": 3.471986417657046e-06, | |
| "loss": 0.8368, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.021414368021210422, | |
| "grad_norm": 33.83725422441187, | |
| "learning_rate": 3.556876061120544e-06, | |
| "loss": 0.8302, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.021924233926477337, | |
| "grad_norm": 20.093752196073595, | |
| "learning_rate": 3.641765704584041e-06, | |
| "loss": 0.904, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.022434099831744252, | |
| "grad_norm": 17.199162746926195, | |
| "learning_rate": 3.7266553480475385e-06, | |
| "loss": 0.8352, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.022943965737011168, | |
| "grad_norm": 16.80175387256325, | |
| "learning_rate": 3.811544991511036e-06, | |
| "loss": 0.8199, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.02345383164227808, | |
| "grad_norm": 13.110708597973506, | |
| "learning_rate": 3.896434634974533e-06, | |
| "loss": 0.8551, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.023963697547544995, | |
| "grad_norm": 14.670368305103178, | |
| "learning_rate": 3.981324278438031e-06, | |
| "loss": 0.8223, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.02447356345281191, | |
| "grad_norm": 19.68234113783656, | |
| "learning_rate": 4.066213921901529e-06, | |
| "loss": 0.8651, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.024983429358078825, | |
| "grad_norm": 12.331385026856294, | |
| "learning_rate": 4.1511035653650255e-06, | |
| "loss": 0.8172, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.02549329526334574, | |
| "grad_norm": 20.223300847863285, | |
| "learning_rate": 4.235993208828523e-06, | |
| "loss": 0.9137, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.026003161168612655, | |
| "grad_norm": 12.887254567357738, | |
| "learning_rate": 4.320882852292021e-06, | |
| "loss": 0.8477, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.02651302707387957, | |
| "grad_norm": 31.28097745090668, | |
| "learning_rate": 4.405772495755518e-06, | |
| "loss": 0.8975, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.027022892979146486, | |
| "grad_norm": 14.546358470787888, | |
| "learning_rate": 4.490662139219016e-06, | |
| "loss": 0.8026, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.027532758884413398, | |
| "grad_norm": 12.893626716420755, | |
| "learning_rate": 4.575551782682513e-06, | |
| "loss": 0.8576, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.028042624789680313, | |
| "grad_norm": 15.633191864977086, | |
| "learning_rate": 4.66044142614601e-06, | |
| "loss": 0.8412, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.028552490694947228, | |
| "grad_norm": 17.734319032596773, | |
| "learning_rate": 4.745331069609508e-06, | |
| "loss": 0.8408, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.029062356600214143, | |
| "grad_norm": 14.072877238199819, | |
| "learning_rate": 4.830220713073006e-06, | |
| "loss": 0.8334, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.02957222250548106, | |
| "grad_norm": 15.606824188231757, | |
| "learning_rate": 4.915110356536503e-06, | |
| "loss": 0.827, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.030082088410747974, | |
| "grad_norm": 38.3978369308503, | |
| "learning_rate": 5e-06, | |
| "loss": 0.8455, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.03059195431601489, | |
| "grad_norm": 12.840980533572772, | |
| "learning_rate": 4.999996591164963e-06, | |
| "loss": 0.8228, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.031101820221281804, | |
| "grad_norm": 30.338227470007126, | |
| "learning_rate": 4.999986364669145e-06, | |
| "loss": 0.7974, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.031611686126548716, | |
| "grad_norm": 18.602004650159476, | |
| "learning_rate": 4.999969320540435e-06, | |
| "loss": 0.8005, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.03212155203181563, | |
| "grad_norm": 55.38008193860322, | |
| "learning_rate": 4.999945458825315e-06, | |
| "loss": 0.8082, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.032631417937082546, | |
| "grad_norm": 36.39600509819533, | |
| "learning_rate": 4.9999147795888545e-06, | |
| "loss": 0.7613, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.03314128384234946, | |
| "grad_norm": 11.625306380518289, | |
| "learning_rate": 4.999877282914722e-06, | |
| "loss": 0.8314, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.03365114974761638, | |
| "grad_norm": 14.287588770540392, | |
| "learning_rate": 4.999832968905171e-06, | |
| "loss": 0.8271, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.03416101565288329, | |
| "grad_norm": 19.005445741004017, | |
| "learning_rate": 4.999781837681048e-06, | |
| "loss": 0.8386, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.03467088155815021, | |
| "grad_norm": 26.674618201945194, | |
| "learning_rate": 4.999723889381793e-06, | |
| "loss": 0.7765, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.03518074746341712, | |
| "grad_norm": 13.637855817556927, | |
| "learning_rate": 4.999659124165434e-06, | |
| "loss": 0.8344, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.03569061336868404, | |
| "grad_norm": 21.01966042290558, | |
| "learning_rate": 4.999587542208591e-06, | |
| "loss": 0.7641, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.03620047927395095, | |
| "grad_norm": 30.965125642767354, | |
| "learning_rate": 4.999509143706472e-06, | |
| "loss": 0.8969, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.03671034517921787, | |
| "grad_norm": 14.067836644004847, | |
| "learning_rate": 4.999423928872876e-06, | |
| "loss": 0.8912, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.03722021108448478, | |
| "grad_norm": 22.189111598465264, | |
| "learning_rate": 4.999331897940189e-06, | |
| "loss": 0.755, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.0377300769897517, | |
| "grad_norm": 23.40728571879822, | |
| "learning_rate": 4.999233051159385e-06, | |
| "loss": 0.8145, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.03823994289501861, | |
| "grad_norm": 11.14841713621582, | |
| "learning_rate": 4.999127388800028e-06, | |
| "loss": 0.805, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.03874980880028552, | |
| "grad_norm": 23.128314203665813, | |
| "learning_rate": 4.999014911150264e-06, | |
| "loss": 0.796, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.03925967470555244, | |
| "grad_norm": 26.492358982029966, | |
| "learning_rate": 4.998895618516829e-06, | |
| "loss": 0.8336, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.03976954061081935, | |
| "grad_norm": 22.55100498684785, | |
| "learning_rate": 4.998769511225041e-06, | |
| "loss": 0.766, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.04027940651608627, | |
| "grad_norm": 34.760159393788676, | |
| "learning_rate": 4.998636589618803e-06, | |
| "loss": 0.8454, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.04078927242135318, | |
| "grad_norm": 14.886385472521404, | |
| "learning_rate": 4.998496854060603e-06, | |
| "loss": 0.7684, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.0412991383266201, | |
| "grad_norm": 20.35215207377879, | |
| "learning_rate": 4.998350304931507e-06, | |
| "loss": 0.7806, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.04180900423188701, | |
| "grad_norm": 53.835011257055584, | |
| "learning_rate": 4.998196942631166e-06, | |
| "loss": 0.7874, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.04231887013715393, | |
| "grad_norm": 19.79335731744932, | |
| "learning_rate": 4.998036767577809e-06, | |
| "loss": 0.7609, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.042828736042420844, | |
| "grad_norm": 41.363363492847824, | |
| "learning_rate": 4.997869780208244e-06, | |
| "loss": 0.8284, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.04333860194768776, | |
| "grad_norm": 14.006984552833496, | |
| "learning_rate": 4.997695980977858e-06, | |
| "loss": 0.784, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.043848467852954674, | |
| "grad_norm": 13.221811726431245, | |
| "learning_rate": 4.997515370360613e-06, | |
| "loss": 0.8128, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.04435833375822159, | |
| "grad_norm": 38.14389153979424, | |
| "learning_rate": 4.997327948849044e-06, | |
| "loss": 0.7496, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.044868199663488505, | |
| "grad_norm": 16.60716058264655, | |
| "learning_rate": 4.997133716954266e-06, | |
| "loss": 0.8008, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.04537806556875542, | |
| "grad_norm": 10.131272882306668, | |
| "learning_rate": 4.996932675205961e-06, | |
| "loss": 0.7418, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.045887931474022335, | |
| "grad_norm": 19.781623800186882, | |
| "learning_rate": 4.996724824152382e-06, | |
| "loss": 0.7843, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.04639779737928925, | |
| "grad_norm": 11.205179068676344, | |
| "learning_rate": 4.996510164360355e-06, | |
| "loss": 0.801, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.04690766328455616, | |
| "grad_norm": 7.133132254705291, | |
| "learning_rate": 4.996288696415271e-06, | |
| "loss": 0.7436, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.047417529189823074, | |
| "grad_norm": 7.361016575983099, | |
| "learning_rate": 4.9960604209210885e-06, | |
| "loss": 0.7697, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.04792739509508999, | |
| "grad_norm": 14.875145921702854, | |
| "learning_rate": 4.99582533850033e-06, | |
| "loss": 0.7774, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.048437261000356904, | |
| "grad_norm": 11.641434144165558, | |
| "learning_rate": 4.995583449794081e-06, | |
| "loss": 0.7916, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.04894712690562382, | |
| "grad_norm": 12.61376024781143, | |
| "learning_rate": 4.99533475546199e-06, | |
| "loss": 0.721, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.049456992810890735, | |
| "grad_norm": 10.992135613925235, | |
| "learning_rate": 4.995079256182261e-06, | |
| "loss": 0.8025, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.04996685871615765, | |
| "grad_norm": 23.077584592510522, | |
| "learning_rate": 4.99481695265166e-06, | |
| "loss": 0.7668, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.050476724621424565, | |
| "grad_norm": 20.805552814430307, | |
| "learning_rate": 4.994547845585505e-06, | |
| "loss": 0.7682, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.05098659052669148, | |
| "grad_norm": 37.749437539255986, | |
| "learning_rate": 4.994271935717671e-06, | |
| "loss": 0.8547, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.051496456431958396, | |
| "grad_norm": 26.52405401012962, | |
| "learning_rate": 4.99398922380058e-06, | |
| "loss": 0.8675, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.05200632233722531, | |
| "grad_norm": 11.386281013139724, | |
| "learning_rate": 4.99369971060521e-06, | |
| "loss": 0.7781, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.052516188242492226, | |
| "grad_norm": 23.97388675424806, | |
| "learning_rate": 4.993403396921082e-06, | |
| "loss": 0.7452, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.05302605414775914, | |
| "grad_norm": 26.25744423502963, | |
| "learning_rate": 4.993100283556262e-06, | |
| "loss": 0.7255, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.053535920053026056, | |
| "grad_norm": 30.57573685033861, | |
| "learning_rate": 4.992790371337363e-06, | |
| "loss": 0.8226, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.05404578595829297, | |
| "grad_norm": 11.6456271061302, | |
| "learning_rate": 4.992473661109535e-06, | |
| "loss": 0.7672, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.05455565186355989, | |
| "grad_norm": 15.587338850002327, | |
| "learning_rate": 4.99215015373647e-06, | |
| "loss": 0.7913, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.055065517768826795, | |
| "grad_norm": 8.306218641787943, | |
| "learning_rate": 4.991819850100394e-06, | |
| "loss": 0.8413, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.05557538367409371, | |
| "grad_norm": 13.813826007031592, | |
| "learning_rate": 4.991482751102066e-06, | |
| "loss": 0.7028, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.056085249579360626, | |
| "grad_norm": 39.13157110924138, | |
| "learning_rate": 4.99113885766078e-06, | |
| "loss": 0.7273, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.05659511548462754, | |
| "grad_norm": 14.998429544518908, | |
| "learning_rate": 4.990788170714355e-06, | |
| "loss": 0.7754, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.057104981389894456, | |
| "grad_norm": 37.8467854367974, | |
| "learning_rate": 4.99043069121914e-06, | |
| "loss": 0.7971, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.05761484729516137, | |
| "grad_norm": 9.645482089818124, | |
| "learning_rate": 4.990066420150005e-06, | |
| "loss": 0.7079, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.05812471320042829, | |
| "grad_norm": 36.873667812807035, | |
| "learning_rate": 4.989695358500342e-06, | |
| "loss": 0.7912, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.0586345791056952, | |
| "grad_norm": 19.38112557912608, | |
| "learning_rate": 4.989317507282061e-06, | |
| "loss": 0.809, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.05914444501096212, | |
| "grad_norm": 14.049014149225295, | |
| "learning_rate": 4.988932867525588e-06, | |
| "loss": 0.6839, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.05965431091622903, | |
| "grad_norm": 12.306775901881515, | |
| "learning_rate": 4.988541440279862e-06, | |
| "loss": 0.8302, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.06016417682149595, | |
| "grad_norm": 18.42396715041709, | |
| "learning_rate": 4.988143226612333e-06, | |
| "loss": 0.7578, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.06067404272676286, | |
| "grad_norm": 33.067925037551674, | |
| "learning_rate": 4.987738227608954e-06, | |
| "loss": 0.7718, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.06118390863202978, | |
| "grad_norm": 19.216201729152147, | |
| "learning_rate": 4.987326444374189e-06, | |
| "loss": 0.7885, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.06169377453729669, | |
| "grad_norm": 9.210294599142257, | |
| "learning_rate": 4.986907878030994e-06, | |
| "loss": 0.6944, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.06220364044256361, | |
| "grad_norm": 8.483381230394919, | |
| "learning_rate": 4.986482529720831e-06, | |
| "loss": 0.7622, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.06271350634783052, | |
| "grad_norm": 22.65788410832388, | |
| "learning_rate": 4.986050400603653e-06, | |
| "loss": 0.7881, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.06322337225309743, | |
| "grad_norm": 12.272586735224095, | |
| "learning_rate": 4.985611491857906e-06, | |
| "loss": 0.7109, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.06373323815836435, | |
| "grad_norm": 24.66263007900279, | |
| "learning_rate": 4.9851658046805226e-06, | |
| "loss": 0.715, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.06424310406363126, | |
| "grad_norm": 14.842375084400794, | |
| "learning_rate": 4.9847133402869235e-06, | |
| "loss": 0.7369, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.06475296996889818, | |
| "grad_norm": 22.216116344429377, | |
| "learning_rate": 4.984254099911009e-06, | |
| "loss": 0.6989, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.06526283587416509, | |
| "grad_norm": 45.589598576972115, | |
| "learning_rate": 4.98378808480516e-06, | |
| "loss": 0.7822, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.06577270177943201, | |
| "grad_norm": 86.77969204698395, | |
| "learning_rate": 4.98331529624023e-06, | |
| "loss": 0.7097, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.06628256768469892, | |
| "grad_norm": 17.69167242840336, | |
| "learning_rate": 4.982835735505545e-06, | |
| "loss": 0.8053, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.06679243358996584, | |
| "grad_norm": 13.284491837077573, | |
| "learning_rate": 4.982349403908902e-06, | |
| "loss": 0.7353, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.06730229949523275, | |
| "grad_norm": 8.48209924744741, | |
| "learning_rate": 4.98185630277656e-06, | |
| "loss": 0.7611, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.06781216540049967, | |
| "grad_norm": 8.856486864478638, | |
| "learning_rate": 4.981356433453238e-06, | |
| "loss": 0.6889, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.06832203130576658, | |
| "grad_norm": 107.59240891987359, | |
| "learning_rate": 4.9808497973021146e-06, | |
| "loss": 0.8094, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.0688318972110335, | |
| "grad_norm": 17.91017719005489, | |
| "learning_rate": 4.98033639570482e-06, | |
| "loss": 0.7585, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.06934176311630041, | |
| "grad_norm": 10.058369133706213, | |
| "learning_rate": 4.979816230061436e-06, | |
| "loss": 0.7193, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.06985162902156733, | |
| "grad_norm": 20.144328160083482, | |
| "learning_rate": 4.97928930179049e-06, | |
| "loss": 0.7395, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.07036149492683424, | |
| "grad_norm": 8.706394083724875, | |
| "learning_rate": 4.978755612328951e-06, | |
| "loss": 0.7464, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.07087136083210116, | |
| "grad_norm": 15.894421245682398, | |
| "learning_rate": 4.978215163132226e-06, | |
| "loss": 0.7485, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.07138122673736808, | |
| "grad_norm": 62.536082721075196, | |
| "learning_rate": 4.977667955674158e-06, | |
| "loss": 0.7638, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.07189109264263499, | |
| "grad_norm": 13.081976664557795, | |
| "learning_rate": 4.977113991447017e-06, | |
| "loss": 0.731, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.0724009585479019, | |
| "grad_norm": 15.993828063445294, | |
| "learning_rate": 4.976553271961503e-06, | |
| "loss": 0.843, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.07291082445316882, | |
| "grad_norm": 18.85801856028921, | |
| "learning_rate": 4.975985798746736e-06, | |
| "loss": 0.7829, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.07342069035843574, | |
| "grad_norm": 15.059259870020968, | |
| "learning_rate": 4.975411573350252e-06, | |
| "loss": 0.7585, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.07393055626370265, | |
| "grad_norm": 37.30959187853398, | |
| "learning_rate": 4.974830597338004e-06, | |
| "loss": 0.7719, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.07444042216896957, | |
| "grad_norm": 16.804542039630977, | |
| "learning_rate": 4.974242872294354e-06, | |
| "loss": 0.7128, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.07495028807423648, | |
| "grad_norm": 11.815174351719946, | |
| "learning_rate": 4.973648399822068e-06, | |
| "loss": 0.7417, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.0754601539795034, | |
| "grad_norm": 13.040163493202366, | |
| "learning_rate": 4.9730471815423124e-06, | |
| "loss": 0.8031, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.07597001988477031, | |
| "grad_norm": 12.047839519102967, | |
| "learning_rate": 4.972439219094649e-06, | |
| "loss": 0.7246, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.07647988579003721, | |
| "grad_norm": 17.021489876135128, | |
| "learning_rate": 4.971824514137035e-06, | |
| "loss": 0.681, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.07698975169530413, | |
| "grad_norm": 9.685468993709286, | |
| "learning_rate": 4.971203068345811e-06, | |
| "loss": 0.732, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.07749961760057104, | |
| "grad_norm": 15.944959721940311, | |
| "learning_rate": 4.970574883415704e-06, | |
| "loss": 0.7806, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.07800948350583796, | |
| "grad_norm": 24.268530969836814, | |
| "learning_rate": 4.969939961059814e-06, | |
| "loss": 0.7435, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.07851934941110487, | |
| "grad_norm": 13.030992822878428, | |
| "learning_rate": 4.969298303009621e-06, | |
| "loss": 0.7274, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.07902921531637179, | |
| "grad_norm": 13.947966567662132, | |
| "learning_rate": 4.968649911014967e-06, | |
| "loss": 0.6783, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.0795390812216387, | |
| "grad_norm": 14.570495534169975, | |
| "learning_rate": 4.9679947868440625e-06, | |
| "loss": 0.7491, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.08004894712690562, | |
| "grad_norm": 7.132526560379979, | |
| "learning_rate": 4.967332932283476e-06, | |
| "loss": 0.7223, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.08055881303217254, | |
| "grad_norm": 7.875957907008374, | |
| "learning_rate": 4.966664349138129e-06, | |
| "loss": 0.7368, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.08106867893743945, | |
| "grad_norm": 14.20041503911161, | |
| "learning_rate": 4.9659890392312935e-06, | |
| "loss": 0.7542, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.08157854484270637, | |
| "grad_norm": 37.488065773340224, | |
| "learning_rate": 4.965307004404586e-06, | |
| "loss": 0.7465, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.08208841074797328, | |
| "grad_norm": 14.65121446955515, | |
| "learning_rate": 4.964618246517962e-06, | |
| "loss": 0.7168, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.0825982766532402, | |
| "grad_norm": 35.950774694044625, | |
| "learning_rate": 4.96392276744971e-06, | |
| "loss": 0.6847, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.08310814255850711, | |
| "grad_norm": 25.12291831378265, | |
| "learning_rate": 4.9632205690964505e-06, | |
| "loss": 0.6943, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.08361800846377403, | |
| "grad_norm": 8.204862149437655, | |
| "learning_rate": 4.962511653373124e-06, | |
| "loss": 0.699, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.08412787436904094, | |
| "grad_norm": 7.350705825407557, | |
| "learning_rate": 4.961796022212994e-06, | |
| "loss": 0.7053, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.08463774027430786, | |
| "grad_norm": 11.340599409165993, | |
| "learning_rate": 4.961073677567634e-06, | |
| "loss": 0.6997, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.08514760617957477, | |
| "grad_norm": 14.015749677589326, | |
| "learning_rate": 4.960344621406927e-06, | |
| "loss": 0.8495, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.08565747208484169, | |
| "grad_norm": 18.004132825340353, | |
| "learning_rate": 4.959608855719059e-06, | |
| "loss": 0.7093, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.0861673379901086, | |
| "grad_norm": 12.254937804304983, | |
| "learning_rate": 4.958866382510515e-06, | |
| "loss": 0.758, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.08667720389537552, | |
| "grad_norm": 16.92387976586447, | |
| "learning_rate": 4.958117203806067e-06, | |
| "loss": 0.6934, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.08718706980064243, | |
| "grad_norm": 10.301187293252587, | |
| "learning_rate": 4.957361321648777e-06, | |
| "loss": 0.6859, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.08769693570590935, | |
| "grad_norm": 26.036964612219144, | |
| "learning_rate": 4.956598738099988e-06, | |
| "loss": 0.751, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.08820680161117626, | |
| "grad_norm": 19.235345452442886, | |
| "learning_rate": 4.955829455239316e-06, | |
| "loss": 0.7173, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.08871666751644318, | |
| "grad_norm": 12.24883597729765, | |
| "learning_rate": 4.95505347516465e-06, | |
| "loss": 0.742, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.0892265334217101, | |
| "grad_norm": 14.574956559044802, | |
| "learning_rate": 4.954270799992138e-06, | |
| "loss": 0.7574, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.08973639932697701, | |
| "grad_norm": 34.43135113831238, | |
| "learning_rate": 4.953481431856189e-06, | |
| "loss": 0.7639, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.09024626523224392, | |
| "grad_norm": 11.963547537782372, | |
| "learning_rate": 4.952685372909465e-06, | |
| "loss": 0.697, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.09075613113751084, | |
| "grad_norm": 14.879992437433783, | |
| "learning_rate": 4.951882625322871e-06, | |
| "loss": 0.7327, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.09126599704277776, | |
| "grad_norm": 10.687966849821434, | |
| "learning_rate": 4.951073191285555e-06, | |
| "loss": 0.7173, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.09177586294804467, | |
| "grad_norm": 173.65543370635663, | |
| "learning_rate": 4.9502570730048995e-06, | |
| "loss": 0.7795, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.09228572885331159, | |
| "grad_norm": 33.83810505997628, | |
| "learning_rate": 4.949434272706514e-06, | |
| "loss": 0.6531, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.0927955947585785, | |
| "grad_norm": 15.988283096618108, | |
| "learning_rate": 4.9486047926342294e-06, | |
| "loss": 0.6574, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.0933054606638454, | |
| "grad_norm": 14.479744812619012, | |
| "learning_rate": 4.947768635050098e-06, | |
| "loss": 0.7244, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.09381532656911232, | |
| "grad_norm": 12.06402074877087, | |
| "learning_rate": 4.946925802234373e-06, | |
| "loss": 0.7125, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.09432519247437923, | |
| "grad_norm": 8.125406898256518, | |
| "learning_rate": 4.946076296485522e-06, | |
| "loss": 0.6822, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.09483505837964615, | |
| "grad_norm": 20.671526831422415, | |
| "learning_rate": 4.945220120120203e-06, | |
| "loss": 0.6637, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.09534492428491306, | |
| "grad_norm": 17.716989185929332, | |
| "learning_rate": 4.9443572754732675e-06, | |
| "loss": 0.6626, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.09585479019017998, | |
| "grad_norm": 159.8354306714128, | |
| "learning_rate": 4.943487764897749e-06, | |
| "loss": 0.7197, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.0963646560954469, | |
| "grad_norm": 12.61619164444456, | |
| "learning_rate": 4.942611590764866e-06, | |
| "loss": 0.6883, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.09687452200071381, | |
| "grad_norm": 11.46693117898443, | |
| "learning_rate": 4.941728755464003e-06, | |
| "loss": 0.7083, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.09738438790598072, | |
| "grad_norm": 11.479207415736344, | |
| "learning_rate": 4.940839261402711e-06, | |
| "loss": 0.7645, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.09789425381124764, | |
| "grad_norm": 16.330332090944445, | |
| "learning_rate": 4.939943111006702e-06, | |
| "loss": 0.6721, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.09840411971651455, | |
| "grad_norm": 24.904337194527546, | |
| "learning_rate": 4.93904030671984e-06, | |
| "loss": 0.7213, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.09891398562178147, | |
| "grad_norm": 13.987479654668915, | |
| "learning_rate": 4.938130851004131e-06, | |
| "loss": 0.8048, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.09942385152704838, | |
| "grad_norm": 27.075097531744582, | |
| "learning_rate": 4.937214746339726e-06, | |
| "loss": 0.7305, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.0999337174323153, | |
| "grad_norm": 8.923073340927486, | |
| "learning_rate": 4.936291995224902e-06, | |
| "loss": 0.7636, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.10044358333758222, | |
| "grad_norm": 7.439882170995172, | |
| "learning_rate": 4.935362600176064e-06, | |
| "loss": 0.7253, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.10095344924284913, | |
| "grad_norm": 4.931935226467838, | |
| "learning_rate": 4.934426563727739e-06, | |
| "loss": 0.719, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.10146331514811605, | |
| "grad_norm": 13.417900875214697, | |
| "learning_rate": 4.933483888432558e-06, | |
| "loss": 0.7065, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.10197318105338296, | |
| "grad_norm": 12.076546088417926, | |
| "learning_rate": 4.932534576861263e-06, | |
| "loss": 0.7119, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.10248304695864988, | |
| "grad_norm": 9.339538376806033, | |
| "learning_rate": 4.931578631602691e-06, | |
| "loss": 0.7747, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.10299291286391679, | |
| "grad_norm": 9.290207456660328, | |
| "learning_rate": 4.930616055263768e-06, | |
| "loss": 0.7296, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.1035027787691837, | |
| "grad_norm": 13.938471789842117, | |
| "learning_rate": 4.9296468504695075e-06, | |
| "loss": 0.716, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.10401264467445062, | |
| "grad_norm": 7.794410431885056, | |
| "learning_rate": 4.928671019862995e-06, | |
| "loss": 0.7138, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.10452251057971754, | |
| "grad_norm": 7.090928909024373, | |
| "learning_rate": 4.927688566105388e-06, | |
| "loss": 0.7331, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.10503237648498445, | |
| "grad_norm": 17.67001545795707, | |
| "learning_rate": 4.926699491875905e-06, | |
| "loss": 0.758, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.10554224239025137, | |
| "grad_norm": 18.577244712263273, | |
| "learning_rate": 4.925703799871818e-06, | |
| "loss": 0.7324, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.10605210829551828, | |
| "grad_norm": 11.69929635491638, | |
| "learning_rate": 4.924701492808447e-06, | |
| "loss": 0.7171, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.1065619742007852, | |
| "grad_norm": 7.939657680078957, | |
| "learning_rate": 4.923692573419152e-06, | |
| "loss": 0.7254, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.10707184010605211, | |
| "grad_norm": 13.41877483458534, | |
| "learning_rate": 4.922677044455324e-06, | |
| "loss": 0.8326, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.10758170601131903, | |
| "grad_norm": 9.545281303803277, | |
| "learning_rate": 4.921654908686381e-06, | |
| "loss": 0.7448, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.10809157191658594, | |
| "grad_norm": 7.3352190302911, | |
| "learning_rate": 4.920626168899755e-06, | |
| "loss": 0.7348, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.10860143782185286, | |
| "grad_norm": 26.685965241526155, | |
| "learning_rate": 4.91959082790089e-06, | |
| "loss": 0.7507, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.10911130372711977, | |
| "grad_norm": 12.909643886403847, | |
| "learning_rate": 4.918548888513232e-06, | |
| "loss": 0.651, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.10962116963238669, | |
| "grad_norm": 15.646611507641264, | |
| "learning_rate": 4.91750035357822e-06, | |
| "loss": 0.6946, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.11013103553765359, | |
| "grad_norm": 12.998595411207628, | |
| "learning_rate": 4.9164452259552805e-06, | |
| "loss": 0.7434, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.1106409014429205, | |
| "grad_norm": 11.660405346251727, | |
| "learning_rate": 4.9153835085218175e-06, | |
| "loss": 0.7105, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.11115076734818742, | |
| "grad_norm": 14.143309967401331, | |
| "learning_rate": 4.9143152041732074e-06, | |
| "loss": 0.7133, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.11166063325345434, | |
| "grad_norm": 19.03028150637859, | |
| "learning_rate": 4.91324031582279e-06, | |
| "loss": 0.7891, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.11217049915872125, | |
| "grad_norm": 16.352679932832064, | |
| "learning_rate": 4.9121588464018555e-06, | |
| "loss": 0.7343, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.11268036506398817, | |
| "grad_norm": 13.050194377945338, | |
| "learning_rate": 4.911070798859647e-06, | |
| "loss": 0.6784, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.11319023096925508, | |
| "grad_norm": 16.676160812772732, | |
| "learning_rate": 4.909976176163345e-06, | |
| "loss": 0.7425, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.113700096874522, | |
| "grad_norm": 24.842180760788374, | |
| "learning_rate": 4.908874981298058e-06, | |
| "loss": 0.7591, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.11420996277978891, | |
| "grad_norm": 27.043355467028597, | |
| "learning_rate": 4.90776721726682e-06, | |
| "loss": 0.7038, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.11471982868505583, | |
| "grad_norm": 17.602292546113226, | |
| "learning_rate": 4.90665288709058e-06, | |
| "loss": 0.7011, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.11522969459032274, | |
| "grad_norm": 9.491025673003486, | |
| "learning_rate": 4.905531993808191e-06, | |
| "loss": 0.6496, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.11573956049558966, | |
| "grad_norm": 10.245512707949505, | |
| "learning_rate": 4.904404540476405e-06, | |
| "loss": 0.7504, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.11624942640085657, | |
| "grad_norm": 12.898093672630402, | |
| "learning_rate": 4.903270530169865e-06, | |
| "loss": 0.7097, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.11675929230612349, | |
| "grad_norm": 6.129617674226962, | |
| "learning_rate": 4.902129965981094e-06, | |
| "loss": 0.6947, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.1172691582113904, | |
| "grad_norm": 11.65520326626469, | |
| "learning_rate": 4.900982851020487e-06, | |
| "loss": 0.7449, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.11777902411665732, | |
| "grad_norm": 13.834178026043013, | |
| "learning_rate": 4.899829188416306e-06, | |
| "loss": 0.6752, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.11828889002192423, | |
| "grad_norm": 12.848364562587328, | |
| "learning_rate": 4.898668981314667e-06, | |
| "loss": 0.6874, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.11879875592719115, | |
| "grad_norm": 20.406006817456138, | |
| "learning_rate": 4.897502232879533e-06, | |
| "loss": 0.7308, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.11930862183245806, | |
| "grad_norm": 8.308281701307099, | |
| "learning_rate": 4.896328946292706e-06, | |
| "loss": 0.6662, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.11981848773772498, | |
| "grad_norm": 34.003846931457545, | |
| "learning_rate": 4.895149124753821e-06, | |
| "loss": 0.7199, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.1203283536429919, | |
| "grad_norm": 17.039906833615753, | |
| "learning_rate": 4.893962771480329e-06, | |
| "loss": 0.7301, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.12083821954825881, | |
| "grad_norm": 7.361968459417762, | |
| "learning_rate": 4.892769889707497e-06, | |
| "loss": 0.7211, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.12134808545352573, | |
| "grad_norm": 11.789122532853575, | |
| "learning_rate": 4.891570482688395e-06, | |
| "loss": 0.6497, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.12185795135879264, | |
| "grad_norm": 30.87304515963633, | |
| "learning_rate": 4.890364553693886e-06, | |
| "loss": 0.7441, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.12236781726405956, | |
| "grad_norm": 29.669298108318465, | |
| "learning_rate": 4.889152106012623e-06, | |
| "loss": 0.7157, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.12287768316932647, | |
| "grad_norm": 11.71514161705899, | |
| "learning_rate": 4.88793314295103e-06, | |
| "loss": 0.6329, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.12338754907459339, | |
| "grad_norm": 20.714383976038274, | |
| "learning_rate": 4.886707667833306e-06, | |
| "loss": 0.7138, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.1238974149798603, | |
| "grad_norm": 6.276871926336472, | |
| "learning_rate": 4.885475684001401e-06, | |
| "loss": 0.6407, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.12440728088512722, | |
| "grad_norm": 4.832640606709299, | |
| "learning_rate": 4.884237194815023e-06, | |
| "loss": 0.6979, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.12491714679039413, | |
| "grad_norm": 9.349322693892326, | |
| "learning_rate": 4.882992203651613e-06, | |
| "loss": 0.7813, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.12542701269566103, | |
| "grad_norm": 7.413743472965248, | |
| "learning_rate": 4.881740713906348e-06, | |
| "loss": 0.7791, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.12593687860092795, | |
| "grad_norm": 5.503594248421507, | |
| "learning_rate": 4.880482728992126e-06, | |
| "loss": 0.8045, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.12644674450619486, | |
| "grad_norm": 12.070817085771614, | |
| "learning_rate": 4.8792182523395555e-06, | |
| "loss": 0.6917, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.12695661041146178, | |
| "grad_norm": 15.300633215523083, | |
| "learning_rate": 4.877947287396952e-06, | |
| "loss": 0.6394, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.1274664763167287, | |
| "grad_norm": 19.470260448454457, | |
| "learning_rate": 4.876669837630324e-06, | |
| "loss": 0.6972, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.1279763422219956, | |
| "grad_norm": 17.203397243947155, | |
| "learning_rate": 4.875385906523361e-06, | |
| "loss": 0.7206, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.12848620812726252, | |
| "grad_norm": 14.59124108252345, | |
| "learning_rate": 4.874095497577434e-06, | |
| "loss": 0.6691, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.12899607403252944, | |
| "grad_norm": 6.680251131013468, | |
| "learning_rate": 4.872798614311574e-06, | |
| "loss": 0.6992, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.12950593993779635, | |
| "grad_norm": 9.142831606794571, | |
| "learning_rate": 4.87149526026247e-06, | |
| "loss": 0.7222, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.13001580584306327, | |
| "grad_norm": 11.653155354293201, | |
| "learning_rate": 4.870185438984458e-06, | |
| "loss": 0.7009, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.13052567174833019, | |
| "grad_norm": 6.406235878833066, | |
| "learning_rate": 4.868869154049509e-06, | |
| "loss": 0.6904, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.1310355376535971, | |
| "grad_norm": 8.058447568972042, | |
| "learning_rate": 4.867546409047221e-06, | |
| "loss": 0.7581, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.13154540355886402, | |
| "grad_norm": 6.710720353699215, | |
| "learning_rate": 4.866217207584811e-06, | |
| "loss": 0.6432, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.13205526946413093, | |
| "grad_norm": 7.407470629761652, | |
| "learning_rate": 4.864881553287101e-06, | |
| "loss": 0.6199, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.13256513536939785, | |
| "grad_norm": 11.56809676811302, | |
| "learning_rate": 4.863539449796511e-06, | |
| "loss": 0.7045, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.13307500127466476, | |
| "grad_norm": 8.306230590884352, | |
| "learning_rate": 4.86219090077305e-06, | |
| "loss": 0.671, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.13358486717993168, | |
| "grad_norm": 13.957448529489147, | |
| "learning_rate": 4.8608359098943014e-06, | |
| "loss": 0.6508, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.1340947330851986, | |
| "grad_norm": 7.784662376461, | |
| "learning_rate": 4.859474480855417e-06, | |
| "loss": 0.6498, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.1346045989904655, | |
| "grad_norm": 29.944496551875012, | |
| "learning_rate": 4.858106617369108e-06, | |
| "loss": 0.7051, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.13511446489573242, | |
| "grad_norm": 10.71685346194294, | |
| "learning_rate": 4.85673232316563e-06, | |
| "loss": 0.7297, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.13562433080099934, | |
| "grad_norm": 49.808581866792125, | |
| "learning_rate": 4.855351601992777e-06, | |
| "loss": 0.6998, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.13613419670626625, | |
| "grad_norm": 13.335763810361753, | |
| "learning_rate": 4.853964457615871e-06, | |
| "loss": 0.6777, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.13664406261153317, | |
| "grad_norm": 9.028936515529828, | |
| "learning_rate": 4.852570893817747e-06, | |
| "loss": 0.6742, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.13715392851680008, | |
| "grad_norm": 11.470738059837727, | |
| "learning_rate": 4.851170914398749e-06, | |
| "loss": 0.671, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.137663794422067, | |
| "grad_norm": 32.73029360165739, | |
| "learning_rate": 4.849764523176716e-06, | |
| "loss": 0.7377, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.1381736603273339, | |
| "grad_norm": 12.819394755469444, | |
| "learning_rate": 4.848351723986974e-06, | |
| "loss": 0.7482, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.13868352623260083, | |
| "grad_norm": 7.937408615887619, | |
| "learning_rate": 4.84693252068232e-06, | |
| "loss": 0.6308, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.13919339213786774, | |
| "grad_norm": 7.448706904228568, | |
| "learning_rate": 4.845506917133021e-06, | |
| "loss": 0.6944, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.13970325804313466, | |
| "grad_norm": 7.180411136542497, | |
| "learning_rate": 4.844074917226792e-06, | |
| "loss": 0.6722, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.14021312394840157, | |
| "grad_norm": 11.468772906657533, | |
| "learning_rate": 4.842636524868796e-06, | |
| "loss": 0.6626, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.1407229898536685, | |
| "grad_norm": 6.371452808310344, | |
| "learning_rate": 4.8411917439816245e-06, | |
| "loss": 0.6499, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.1412328557589354, | |
| "grad_norm": 19.49041066130283, | |
| "learning_rate": 4.839740578505297e-06, | |
| "loss": 0.7403, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.14174272166420232, | |
| "grad_norm": 12.99945610718753, | |
| "learning_rate": 4.838283032397237e-06, | |
| "loss": 0.7243, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.14225258756946924, | |
| "grad_norm": 17.642109740820253, | |
| "learning_rate": 4.8368191096322734e-06, | |
| "loss": 0.7556, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.14276245347473615, | |
| "grad_norm": 3.9040981516739803, | |
| "learning_rate": 4.835348814202624e-06, | |
| "loss": 0.7256, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.14327231938000307, | |
| "grad_norm": 71.87809054881508, | |
| "learning_rate": 4.833872150117883e-06, | |
| "loss": 0.7095, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.14378218528526998, | |
| "grad_norm": 6.094260739225736, | |
| "learning_rate": 4.832389121405013e-06, | |
| "loss": 0.7223, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.1442920511905369, | |
| "grad_norm": 8.473852645006467, | |
| "learning_rate": 4.830899732108337e-06, | |
| "loss": 0.7042, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.1448019170958038, | |
| "grad_norm": 7.816652193027694, | |
| "learning_rate": 4.829403986289519e-06, | |
| "loss": 0.7272, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.14531178300107073, | |
| "grad_norm": 17.989801416107305, | |
| "learning_rate": 4.827901888027561e-06, | |
| "loss": 0.6549, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.14582164890633764, | |
| "grad_norm": 13.395657682163579, | |
| "learning_rate": 4.826393441418785e-06, | |
| "loss": 0.7444, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.14633151481160456, | |
| "grad_norm": 8.540968081865213, | |
| "learning_rate": 4.824878650576829e-06, | |
| "loss": 0.7088, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.14684138071687147, | |
| "grad_norm": 8.779541567306202, | |
| "learning_rate": 4.823357519632631e-06, | |
| "loss": 0.6694, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.1473512466221384, | |
| "grad_norm": 8.84843067718907, | |
| "learning_rate": 4.821830052734418e-06, | |
| "loss": 0.6761, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.1478611125274053, | |
| "grad_norm": 8.526698581121803, | |
| "learning_rate": 4.820296254047695e-06, | |
| "loss": 0.7178, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.14837097843267222, | |
| "grad_norm": 13.165812013075216, | |
| "learning_rate": 4.8187561277552376e-06, | |
| "loss": 0.7361, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.14888084433793913, | |
| "grad_norm": 17.809515537069476, | |
| "learning_rate": 4.817209678057073e-06, | |
| "loss": 0.7227, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.14939071024320605, | |
| "grad_norm": 5.14603155282455, | |
| "learning_rate": 4.815656909170476e-06, | |
| "loss": 0.6763, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.14990057614847296, | |
| "grad_norm": 11.830391658586644, | |
| "learning_rate": 4.814097825329953e-06, | |
| "loss": 0.7027, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.15041044205373988, | |
| "grad_norm": 13.309313102975352, | |
| "learning_rate": 4.81253243078723e-06, | |
| "loss": 0.6576, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.1509203079590068, | |
| "grad_norm": 14.121127903581213, | |
| "learning_rate": 4.810960729811247e-06, | |
| "loss": 0.7173, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.1514301738642737, | |
| "grad_norm": 12.247498251382844, | |
| "learning_rate": 4.8093827266881375e-06, | |
| "loss": 0.6516, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.15194003976954062, | |
| "grad_norm": 11.887753993952547, | |
| "learning_rate": 4.807798425721224e-06, | |
| "loss": 0.6781, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.1524499056748075, | |
| "grad_norm": 10.393760480263255, | |
| "learning_rate": 4.806207831231004e-06, | |
| "loss": 0.6878, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.15295977158007443, | |
| "grad_norm": 15.180506006866409, | |
| "learning_rate": 4.804610947555135e-06, | |
| "loss": 0.6866, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.15346963748534134, | |
| "grad_norm": 8.098256391581655, | |
| "learning_rate": 4.803007779048428e-06, | |
| "loss": 0.6332, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.15397950339060826, | |
| "grad_norm": 18.217729986739197, | |
| "learning_rate": 4.801398330082834e-06, | |
| "loss": 0.7395, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.15448936929587517, | |
| "grad_norm": 8.190425991582323, | |
| "learning_rate": 4.7997826050474284e-06, | |
| "loss": 0.6646, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.1549992352011421, | |
| "grad_norm": 11.396659175805825, | |
| "learning_rate": 4.798160608348404e-06, | |
| "loss": 0.7352, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.155509101106409, | |
| "grad_norm": 7.578872856762376, | |
| "learning_rate": 4.796532344409055e-06, | |
| "loss": 0.6937, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.15601896701167592, | |
| "grad_norm": 12.65465529170778, | |
| "learning_rate": 4.794897817669769e-06, | |
| "loss": 0.7079, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.15652883291694283, | |
| "grad_norm": 8.332077761823149, | |
| "learning_rate": 4.7932570325880114e-06, | |
| "loss": 0.6553, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.15703869882220975, | |
| "grad_norm": 7.493874145117778, | |
| "learning_rate": 4.791609993638315e-06, | |
| "loss": 0.6221, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.15754856472747666, | |
| "grad_norm": 9.006760792815905, | |
| "learning_rate": 4.789956705312266e-06, | |
| "loss": 0.7162, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.15805843063274358, | |
| "grad_norm": 6.626245426845903, | |
| "learning_rate": 4.7882971721184955e-06, | |
| "loss": 0.6945, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.1585682965380105, | |
| "grad_norm": 5.003198013650634, | |
| "learning_rate": 4.786631398582663e-06, | |
| "loss": 0.6242, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.1590781624432774, | |
| "grad_norm": 70.12439147426733, | |
| "learning_rate": 4.784959389247445e-06, | |
| "loss": 0.694, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.15958802834854433, | |
| "grad_norm": 21.090289465260433, | |
| "learning_rate": 4.7832811486725275e-06, | |
| "loss": 0.7578, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.16009789425381124, | |
| "grad_norm": 11.217345377559456, | |
| "learning_rate": 4.781596681434584e-06, | |
| "loss": 0.6675, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.16060776015907816, | |
| "grad_norm": 27.493496228858692, | |
| "learning_rate": 4.779905992127273e-06, | |
| "loss": 0.6608, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.16111762606434507, | |
| "grad_norm": 7.606733875884907, | |
| "learning_rate": 4.778209085361217e-06, | |
| "loss": 0.759, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.16162749196961199, | |
| "grad_norm": 6.704916972776797, | |
| "learning_rate": 4.776505965763999e-06, | |
| "loss": 0.6207, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.1621373578748789, | |
| "grad_norm": 6.560606605224566, | |
| "learning_rate": 4.77479663798014e-06, | |
| "loss": 0.7267, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.16264722378014582, | |
| "grad_norm": 6.665804741801049, | |
| "learning_rate": 4.773081106671094e-06, | |
| "loss": 0.7096, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.16315708968541273, | |
| "grad_norm": 19.678768608251875, | |
| "learning_rate": 4.771359376515231e-06, | |
| "loss": 0.7245, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.16366695559067965, | |
| "grad_norm": 7.586591364128146, | |
| "learning_rate": 4.769631452207828e-06, | |
| "loss": 0.6273, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.16417682149594656, | |
| "grad_norm": 17.479097758456334, | |
| "learning_rate": 4.76789733846105e-06, | |
| "loss": 0.6805, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.16468668740121348, | |
| "grad_norm": 6.709811820698195, | |
| "learning_rate": 4.766157040003944e-06, | |
| "loss": 0.5998, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.1651965533064804, | |
| "grad_norm": 10.454340440990537, | |
| "learning_rate": 4.7644105615824226e-06, | |
| "loss": 0.6798, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.1657064192117473, | |
| "grad_norm": 51.363584768392315, | |
| "learning_rate": 4.7626579079592504e-06, | |
| "loss": 0.645, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.16621628511701422, | |
| "grad_norm": 8.002930949956061, | |
| "learning_rate": 4.760899083914035e-06, | |
| "loss": 0.6527, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.16672615102228114, | |
| "grad_norm": 9.644972860936063, | |
| "learning_rate": 4.759134094243206e-06, | |
| "loss": 0.696, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.16723601692754805, | |
| "grad_norm": 7.7053569212568265, | |
| "learning_rate": 4.757362943760013e-06, | |
| "loss": 0.6, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.16774588283281497, | |
| "grad_norm": 18.816000872116042, | |
| "learning_rate": 4.755585637294503e-06, | |
| "loss": 0.6497, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.16825574873808188, | |
| "grad_norm": 9.174567143984415, | |
| "learning_rate": 4.753802179693512e-06, | |
| "loss": 0.6686, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.1687656146433488, | |
| "grad_norm": 18.969088719334287, | |
| "learning_rate": 4.7520125758206495e-06, | |
| "loss": 0.6925, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.16927548054861571, | |
| "grad_norm": 5.900876067555642, | |
| "learning_rate": 4.750216830556287e-06, | |
| "loss": 0.6485, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.16978534645388263, | |
| "grad_norm": 6.471858950566868, | |
| "learning_rate": 4.748414948797545e-06, | |
| "loss": 0.695, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.17029521235914954, | |
| "grad_norm": 15.140328131097245, | |
| "learning_rate": 4.746606935458277e-06, | |
| "loss": 0.6557, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.17080507826441646, | |
| "grad_norm": 17.19040083808326, | |
| "learning_rate": 4.744792795469058e-06, | |
| "loss": 0.7159, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.17131494416968338, | |
| "grad_norm": 9.99786870874283, | |
| "learning_rate": 4.742972533777172e-06, | |
| "loss": 0.6673, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.1718248100749503, | |
| "grad_norm": 7.203764779303473, | |
| "learning_rate": 4.741146155346596e-06, | |
| "loss": 0.6452, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.1723346759802172, | |
| "grad_norm": 6.320161749809006, | |
| "learning_rate": 4.739313665157988e-06, | |
| "loss": 0.6395, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.17284454188548412, | |
| "grad_norm": 11.183461388166837, | |
| "learning_rate": 4.737475068208673e-06, | |
| "loss": 0.6887, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.17335440779075104, | |
| "grad_norm": 22.535580807371765, | |
| "learning_rate": 4.7356303695126315e-06, | |
| "loss": 0.6866, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.17386427369601795, | |
| "grad_norm": 9.11855222839424, | |
| "learning_rate": 4.733779574100482e-06, | |
| "loss": 0.6396, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.17437413960128487, | |
| "grad_norm": 16.74772610855082, | |
| "learning_rate": 4.7319226870194676e-06, | |
| "loss": 0.675, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.17488400550655178, | |
| "grad_norm": 10.312136704375964, | |
| "learning_rate": 4.730059713333448e-06, | |
| "loss": 0.7076, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.1753938714118187, | |
| "grad_norm": 23.686147104032347, | |
| "learning_rate": 4.728190658122878e-06, | |
| "loss": 0.5949, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.1759037373170856, | |
| "grad_norm": 12.583921383761359, | |
| "learning_rate": 4.726315526484799e-06, | |
| "loss": 0.6345, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.17641360322235253, | |
| "grad_norm": 8.080013852290937, | |
| "learning_rate": 4.724434323532822e-06, | |
| "loss": 0.6328, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.17692346912761944, | |
| "grad_norm": 19.18676141068983, | |
| "learning_rate": 4.722547054397114e-06, | |
| "loss": 0.6573, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.17743333503288636, | |
| "grad_norm": 13.275647879472974, | |
| "learning_rate": 4.720653724224389e-06, | |
| "loss": 0.6805, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.17794320093815327, | |
| "grad_norm": 14.808744880492414, | |
| "learning_rate": 4.718754338177887e-06, | |
| "loss": 0.611, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.1784530668434202, | |
| "grad_norm": 8.849696816700126, | |
| "learning_rate": 4.716848901437361e-06, | |
| "loss": 0.6432, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.1789629327486871, | |
| "grad_norm": 7.567552114191146, | |
| "learning_rate": 4.714937419199067e-06, | |
| "loss": 0.6534, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.17947279865395402, | |
| "grad_norm": 13.33853831817063, | |
| "learning_rate": 4.713019896675749e-06, | |
| "loss": 0.7478, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.17998266455922093, | |
| "grad_norm": 7.1558035745268365, | |
| "learning_rate": 4.711096339096619e-06, | |
| "loss": 0.6852, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.18049253046448785, | |
| "grad_norm": 11.762580657805662, | |
| "learning_rate": 4.709166751707351e-06, | |
| "loss": 0.6843, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.18100239636975476, | |
| "grad_norm": 14.327525728374841, | |
| "learning_rate": 4.7072311397700605e-06, | |
| "loss": 0.6476, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.18151226227502168, | |
| "grad_norm": 17.965119556017292, | |
| "learning_rate": 4.705289508563293e-06, | |
| "loss": 0.6633, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.1820221281802886, | |
| "grad_norm": 11.95934033677814, | |
| "learning_rate": 4.703341863382009e-06, | |
| "loss": 0.6615, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.1825319940855555, | |
| "grad_norm": 6.200151009583062, | |
| "learning_rate": 4.701388209537569e-06, | |
| "loss": 0.651, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.18304185999082243, | |
| "grad_norm": 10.837831286173747, | |
| "learning_rate": 4.69942855235772e-06, | |
| "loss": 0.702, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.18355172589608934, | |
| "grad_norm": 12.987261700429512, | |
| "learning_rate": 4.697462897186581e-06, | |
| "loss": 0.6598, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.18406159180135626, | |
| "grad_norm": 1953.3292817352285, | |
| "learning_rate": 4.695491249384628e-06, | |
| "loss": 0.6659, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.18457145770662317, | |
| "grad_norm": 6.83565071579364, | |
| "learning_rate": 4.693513614328676e-06, | |
| "loss": 0.6841, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.1850813236118901, | |
| "grad_norm": 12.146874651168142, | |
| "learning_rate": 4.691529997411873e-06, | |
| "loss": 0.738, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.185591189517157, | |
| "grad_norm": 22.745683956578276, | |
| "learning_rate": 4.689540404043677e-06, | |
| "loss": 0.6929, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.1861010554224239, | |
| "grad_norm": 11.50571746380886, | |
| "learning_rate": 4.687544839649842e-06, | |
| "loss": 0.6619, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.1866109213276908, | |
| "grad_norm": 13.388716539489481, | |
| "learning_rate": 4.685543309672411e-06, | |
| "loss": 0.6279, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.18712078723295772, | |
| "grad_norm": 12.157108250773055, | |
| "learning_rate": 4.683535819569691e-06, | |
| "loss": 0.705, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.18763065313822463, | |
| "grad_norm": 12.180479581675126, | |
| "learning_rate": 4.681522374816244e-06, | |
| "loss": 0.6288, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.18814051904349155, | |
| "grad_norm": 16.82193055581822, | |
| "learning_rate": 4.679502980902871e-06, | |
| "loss": 0.6532, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.18865038494875846, | |
| "grad_norm": 16.649758754627584, | |
| "learning_rate": 4.6774776433365965e-06, | |
| "loss": 0.5991, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.18916025085402538, | |
| "grad_norm": 9.18637457024438, | |
| "learning_rate": 4.6754463676406545e-06, | |
| "loss": 0.6731, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.1896701167592923, | |
| "grad_norm": 36.51851079033295, | |
| "learning_rate": 4.6734091593544705e-06, | |
| "loss": 0.6507, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.1901799826645592, | |
| "grad_norm": 23.088525840764767, | |
| "learning_rate": 4.671366024033651e-06, | |
| "loss": 0.6136, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.19068984856982613, | |
| "grad_norm": 11.393600238288462, | |
| "learning_rate": 4.669316967249966e-06, | |
| "loss": 0.721, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.19119971447509304, | |
| "grad_norm": 23.092054098850237, | |
| "learning_rate": 4.667261994591331e-06, | |
| "loss": 0.6668, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.19170958038035996, | |
| "grad_norm": 9.043611501137145, | |
| "learning_rate": 4.665201111661797e-06, | |
| "loss": 0.7159, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.19221944628562687, | |
| "grad_norm": 6.873039056267214, | |
| "learning_rate": 4.663134324081533e-06, | |
| "loss": 0.6576, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.1927293121908938, | |
| "grad_norm": 18.365114545851974, | |
| "learning_rate": 4.6610616374868066e-06, | |
| "loss": 0.7423, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.1932391780961607, | |
| "grad_norm": 7.330732108249972, | |
| "learning_rate": 4.658983057529978e-06, | |
| "loss": 0.552, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.19374904400142762, | |
| "grad_norm": 44.58519185418861, | |
| "learning_rate": 4.656898589879475e-06, | |
| "loss": 0.6274, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.19425890990669453, | |
| "grad_norm": 10.959535571320709, | |
| "learning_rate": 4.654808240219782e-06, | |
| "loss": 0.678, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.19476877581196145, | |
| "grad_norm": 9.443844486690475, | |
| "learning_rate": 4.652712014251426e-06, | |
| "loss": 0.6216, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.19527864171722836, | |
| "grad_norm": 29.311966069946706, | |
| "learning_rate": 4.650609917690957e-06, | |
| "loss": 0.5818, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.19578850762249528, | |
| "grad_norm": 10.093424824180262, | |
| "learning_rate": 4.648501956270936e-06, | |
| "loss": 0.687, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.1962983735277622, | |
| "grad_norm": 10.636454112575786, | |
| "learning_rate": 4.646388135739915e-06, | |
| "loss": 0.6343, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.1968082394330291, | |
| "grad_norm": 23.62252499935202, | |
| "learning_rate": 4.64426846186243e-06, | |
| "loss": 0.5967, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.19731810533829602, | |
| "grad_norm": 5.867497909475541, | |
| "learning_rate": 4.642142940418973e-06, | |
| "loss": 0.7358, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.19782797124356294, | |
| "grad_norm": 16.58867776820823, | |
| "learning_rate": 4.640011577205987e-06, | |
| "loss": 0.6752, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.19833783714882985, | |
| "grad_norm": 10.374371049382303, | |
| "learning_rate": 4.637874378035845e-06, | |
| "loss": 0.691, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.19884770305409677, | |
| "grad_norm": 7.507499494510421, | |
| "learning_rate": 4.635731348736832e-06, | |
| "loss": 0.6807, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.19935756895936368, | |
| "grad_norm": 52.904799589576946, | |
| "learning_rate": 4.633582495153137e-06, | |
| "loss": 0.6904, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.1998674348646306, | |
| "grad_norm": 8.485499924274286, | |
| "learning_rate": 4.631427823144829e-06, | |
| "loss": 0.6324, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.20037730076989752, | |
| "grad_norm": 11.64397173810255, | |
| "learning_rate": 4.6292673385878466e-06, | |
| "loss": 0.6987, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.20088716667516443, | |
| "grad_norm": 19.05962502430355, | |
| "learning_rate": 4.6271010473739754e-06, | |
| "loss": 0.766, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.20139703258043135, | |
| "grad_norm": 12.18391244127119, | |
| "learning_rate": 4.624928955410841e-06, | |
| "loss": 0.6047, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.20190689848569826, | |
| "grad_norm": 8.895134290225426, | |
| "learning_rate": 4.622751068621886e-06, | |
| "loss": 0.6302, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.20241676439096518, | |
| "grad_norm": 30.39435655960845, | |
| "learning_rate": 4.620567392946355e-06, | |
| "loss": 0.644, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.2029266302962321, | |
| "grad_norm": 7.3427483079553175, | |
| "learning_rate": 4.618377934339279e-06, | |
| "loss": 0.6704, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.203436496201499, | |
| "grad_norm": 18.877200527683843, | |
| "learning_rate": 4.616182698771463e-06, | |
| "loss": 0.6056, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.20394636210676592, | |
| "grad_norm": 11.482887115059379, | |
| "learning_rate": 4.613981692229462e-06, | |
| "loss": 0.6122, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.20445622801203284, | |
| "grad_norm": 7.986812774045366, | |
| "learning_rate": 4.611774920715572e-06, | |
| "loss": 0.6509, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.20496609391729975, | |
| "grad_norm": 10.669994016142317, | |
| "learning_rate": 4.609562390247808e-06, | |
| "loss": 0.652, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.20547595982256667, | |
| "grad_norm": 8.22738123199278, | |
| "learning_rate": 4.607344106859891e-06, | |
| "loss": 0.6581, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.20598582572783358, | |
| "grad_norm": 8.962593320055948, | |
| "learning_rate": 4.605120076601231e-06, | |
| "loss": 0.6999, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.2064956916331005, | |
| "grad_norm": 14.157563821046, | |
| "learning_rate": 4.602890305536911e-06, | |
| "loss": 0.6804, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.2070055575383674, | |
| "grad_norm": 10.129186014658032, | |
| "learning_rate": 4.6006547997476666e-06, | |
| "loss": 0.706, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.20751542344363433, | |
| "grad_norm": 7.004517518261975, | |
| "learning_rate": 4.598413565329876e-06, | |
| "loss": 0.6189, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.20802528934890124, | |
| "grad_norm": 9.96490109965192, | |
| "learning_rate": 4.596166608395535e-06, | |
| "loss": 0.6288, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.20853515525416816, | |
| "grad_norm": 122.00941188724634, | |
| "learning_rate": 4.593913935072251e-06, | |
| "loss": 0.6688, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.20904502115943507, | |
| "grad_norm": 5.837271689245049, | |
| "learning_rate": 4.591655551503215e-06, | |
| "loss": 0.6697, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.209554887064702, | |
| "grad_norm": 7.591682709402776, | |
| "learning_rate": 4.589391463847194e-06, | |
| "loss": 0.7342, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.2100647529699689, | |
| "grad_norm": 8.603671132021212, | |
| "learning_rate": 4.58712167827851e-06, | |
| "loss": 0.6465, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.21057461887523582, | |
| "grad_norm": 11.385898364821118, | |
| "learning_rate": 4.584846200987022e-06, | |
| "loss": 0.669, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.21108448478050273, | |
| "grad_norm": 13.24419242506655, | |
| "learning_rate": 4.582565038178109e-06, | |
| "loss": 0.6474, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.21159435068576965, | |
| "grad_norm": 12.744833317770105, | |
| "learning_rate": 4.58027819607266e-06, | |
| "loss": 0.665, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.21210421659103657, | |
| "grad_norm": 8.386511453662118, | |
| "learning_rate": 4.577985680907049e-06, | |
| "loss": 0.6657, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.21261408249630348, | |
| "grad_norm": 16.427408003447585, | |
| "learning_rate": 4.575687498933119e-06, | |
| "loss": 0.6532, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.2131239484015704, | |
| "grad_norm": 7.603221921574408, | |
| "learning_rate": 4.573383656418169e-06, | |
| "loss": 0.707, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.2136338143068373, | |
| "grad_norm": 10.770116825808305, | |
| "learning_rate": 4.571074159644936e-06, | |
| "loss": 0.6435, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.21414368021210423, | |
| "grad_norm": 26.871687440222775, | |
| "learning_rate": 4.568759014911573e-06, | |
| "loss": 0.621, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.21465354611737114, | |
| "grad_norm": 6.392324082965119, | |
| "learning_rate": 4.566438228531638e-06, | |
| "loss": 0.6996, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.21516341202263806, | |
| "grad_norm": 18.486331848454444, | |
| "learning_rate": 4.564111806834073e-06, | |
| "loss": 0.5856, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.21567327792790497, | |
| "grad_norm": 7.730881101997795, | |
| "learning_rate": 4.5617797561631885e-06, | |
| "loss": 0.7293, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.2161831438331719, | |
| "grad_norm": 13.315083409768397, | |
| "learning_rate": 4.559442082878645e-06, | |
| "loss": 0.6832, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.2166930097384388, | |
| "grad_norm": 36.886853297901304, | |
| "learning_rate": 4.557098793355436e-06, | |
| "loss": 0.5822, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.21720287564370572, | |
| "grad_norm": 11.837773372638708, | |
| "learning_rate": 4.554749893983874e-06, | |
| "loss": 0.5926, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.21771274154897263, | |
| "grad_norm": 17.923474859429835, | |
| "learning_rate": 4.552395391169564e-06, | |
| "loss": 0.6853, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.21822260745423955, | |
| "grad_norm": 14.002965124115262, | |
| "learning_rate": 4.550035291333398e-06, | |
| "loss": 0.5624, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.21873247335950646, | |
| "grad_norm": 6.554152865278325, | |
| "learning_rate": 4.547669600911527e-06, | |
| "loss": 0.7063, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.21924233926477338, | |
| "grad_norm": 14.860982258088095, | |
| "learning_rate": 4.545298326355351e-06, | |
| "loss": 0.634, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.21975220517004027, | |
| "grad_norm": 8.1627141188177, | |
| "learning_rate": 4.542921474131497e-06, | |
| "loss": 0.6895, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.22026207107530718, | |
| "grad_norm": 6.129164499117784, | |
| "learning_rate": 4.540539050721801e-06, | |
| "loss": 0.6775, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.2207719369805741, | |
| "grad_norm": 9.514850934457902, | |
| "learning_rate": 4.538151062623296e-06, | |
| "loss": 0.5793, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.221281802885841, | |
| "grad_norm": 6.705922905219092, | |
| "learning_rate": 4.535757516348186e-06, | |
| "loss": 0.6321, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.22179166879110793, | |
| "grad_norm": 8.845979013568288, | |
| "learning_rate": 4.533358418423837e-06, | |
| "loss": 0.6229, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.22230153469637484, | |
| "grad_norm": 25.37797825124637, | |
| "learning_rate": 4.530953775392749e-06, | |
| "loss": 0.6514, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.22281140060164176, | |
| "grad_norm": 14.002012381654973, | |
| "learning_rate": 4.52854359381255e-06, | |
| "loss": 0.6388, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.22332126650690867, | |
| "grad_norm": 7.685017242976436, | |
| "learning_rate": 4.5261278802559675e-06, | |
| "loss": 0.6719, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.2238311324121756, | |
| "grad_norm": 8.792948491297658, | |
| "learning_rate": 4.523706641310817e-06, | |
| "loss": 0.629, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.2243409983174425, | |
| "grad_norm": 10.547436698801492, | |
| "learning_rate": 4.521279883579982e-06, | |
| "loss": 0.5841, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.22485086422270942, | |
| "grad_norm": 11.439199546981827, | |
| "learning_rate": 4.518847613681397e-06, | |
| "loss": 0.7102, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.22536073012797633, | |
| "grad_norm": 5.762383327340069, | |
| "learning_rate": 4.516409838248026e-06, | |
| "loss": 0.6329, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.22587059603324325, | |
| "grad_norm": 7.5671046403384965, | |
| "learning_rate": 4.513966563927849e-06, | |
| "loss": 0.635, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.22638046193851016, | |
| "grad_norm": 15.470629027815528, | |
| "learning_rate": 4.511517797383841e-06, | |
| "loss": 0.6468, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.22689032784377708, | |
| "grad_norm": 20.905186400076417, | |
| "learning_rate": 4.509063545293954e-06, | |
| "loss": 0.6701, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.227400193749044, | |
| "grad_norm": 13.868107533971003, | |
| "learning_rate": 4.506603814351103e-06, | |
| "loss": 0.5854, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.2279100596543109, | |
| "grad_norm": 16.533672160629905, | |
| "learning_rate": 4.5041386112631394e-06, | |
| "loss": 0.6443, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.22841992555957782, | |
| "grad_norm": 9.198456027284987, | |
| "learning_rate": 4.501667942752841e-06, | |
| "loss": 0.5938, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.22892979146484474, | |
| "grad_norm": 9.416687423454851, | |
| "learning_rate": 4.499191815557888e-06, | |
| "loss": 0.6675, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.22943965737011165, | |
| "grad_norm": 72.40302034695415, | |
| "learning_rate": 4.496710236430848e-06, | |
| "loss": 0.6253, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.22994952327537857, | |
| "grad_norm": 8.499089870026229, | |
| "learning_rate": 4.4942232121391565e-06, | |
| "loss": 0.6272, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.23045938918064549, | |
| "grad_norm": 38.011204918994885, | |
| "learning_rate": 4.4917307494650975e-06, | |
| "loss": 0.5957, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.2309692550859124, | |
| "grad_norm": 15.172657432433963, | |
| "learning_rate": 4.489232855205787e-06, | |
| "loss": 0.6854, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.23147912099117932, | |
| "grad_norm": 19.75927037161284, | |
| "learning_rate": 4.4867295361731515e-06, | |
| "loss": 0.6695, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.23198898689644623, | |
| "grad_norm": 13.9619388763187, | |
| "learning_rate": 4.484220799193913e-06, | |
| "loss": 0.6195, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.23249885280171315, | |
| "grad_norm": 7.928460498876682, | |
| "learning_rate": 4.481706651109567e-06, | |
| "loss": 0.5898, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.23300871870698006, | |
| "grad_norm": 8.027758710964898, | |
| "learning_rate": 4.479187098776368e-06, | |
| "loss": 0.6324, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.23351858461224698, | |
| "grad_norm": 7.7963017806518, | |
| "learning_rate": 4.476662149065306e-06, | |
| "loss": 0.6704, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.2340284505175139, | |
| "grad_norm": 8.785758250481065, | |
| "learning_rate": 4.474131808862089e-06, | |
| "loss": 0.7118, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.2345383164227808, | |
| "grad_norm": 372.06481432872613, | |
| "learning_rate": 4.471596085067129e-06, | |
| "loss": 0.6486, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.23504818232804772, | |
| "grad_norm": 5.977610469289572, | |
| "learning_rate": 4.469054984595517e-06, | |
| "loss": 0.6187, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.23555804823331464, | |
| "grad_norm": 15.41921778689407, | |
| "learning_rate": 4.466508514377006e-06, | |
| "loss": 0.637, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.23606791413858155, | |
| "grad_norm": 9.985726301822796, | |
| "learning_rate": 4.463956681355993e-06, | |
| "loss": 0.6474, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.23657778004384847, | |
| "grad_norm": 8.63927014722445, | |
| "learning_rate": 4.461399492491502e-06, | |
| "loss": 0.6374, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.23708764594911538, | |
| "grad_norm": 10.214411222798754, | |
| "learning_rate": 4.458836954757161e-06, | |
| "loss": 0.6459, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.2375975118543823, | |
| "grad_norm": 11.80289354020228, | |
| "learning_rate": 4.456269075141183e-06, | |
| "loss": 0.6386, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.2381073777596492, | |
| "grad_norm": 6.858411227021501, | |
| "learning_rate": 4.4536958606463506e-06, | |
| "loss": 0.6957, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.23861724366491613, | |
| "grad_norm": 10.1818081988257, | |
| "learning_rate": 4.451117318289996e-06, | |
| "loss": 0.663, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.23912710957018304, | |
| "grad_norm": 4.4783201565409945, | |
| "learning_rate": 4.448533455103979e-06, | |
| "loss": 0.6272, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.23963697547544996, | |
| "grad_norm": 13.274713770638163, | |
| "learning_rate": 4.445944278134671e-06, | |
| "loss": 0.7047, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.24014684138071687, | |
| "grad_norm": 13.898990696837775, | |
| "learning_rate": 4.4433497944429325e-06, | |
| "loss": 0.6115, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.2406567072859838, | |
| "grad_norm": 7.610144075363796, | |
| "learning_rate": 4.440750011104098e-06, | |
| "loss": 0.5935, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.2411665731912507, | |
| "grad_norm": 7.152607433595776, | |
| "learning_rate": 4.438144935207953e-06, | |
| "loss": 0.5946, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.24167643909651762, | |
| "grad_norm": 10.716962757277734, | |
| "learning_rate": 4.435534573858717e-06, | |
| "loss": 0.6335, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.24218630500178454, | |
| "grad_norm": 4.684618729043833, | |
| "learning_rate": 4.432918934175023e-06, | |
| "loss": 0.6108, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.24269617090705145, | |
| "grad_norm": 8.294094418125644, | |
| "learning_rate": 4.430298023289897e-06, | |
| "loss": 0.6291, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.24320603681231837, | |
| "grad_norm": 17.780227528515248, | |
| "learning_rate": 4.427671848350744e-06, | |
| "loss": 0.6159, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.24371590271758528, | |
| "grad_norm": 5.890151944674759, | |
| "learning_rate": 4.425040416519319e-06, | |
| "loss": 0.5393, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.2442257686228522, | |
| "grad_norm": 7.1241490526375255, | |
| "learning_rate": 4.422403734971718e-06, | |
| "loss": 0.5862, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.2447356345281191, | |
| "grad_norm": 7.856600008931303, | |
| "learning_rate": 4.419761810898349e-06, | |
| "loss": 0.6156, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.24524550043338603, | |
| "grad_norm": 13.581267616482041, | |
| "learning_rate": 4.4171146515039206e-06, | |
| "loss": 0.6246, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.24575536633865294, | |
| "grad_norm": 5.113231100197785, | |
| "learning_rate": 4.414462264007414e-06, | |
| "loss": 0.6797, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.24626523224391986, | |
| "grad_norm": 11.231112402680115, | |
| "learning_rate": 4.4118046556420725e-06, | |
| "loss": 0.6074, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.24677509814918677, | |
| "grad_norm": 8.052570264788237, | |
| "learning_rate": 4.409141833655375e-06, | |
| "loss": 0.5878, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.2472849640544537, | |
| "grad_norm": 6.660041473213604, | |
| "learning_rate": 4.406473805309016e-06, | |
| "loss": 0.6666, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.2477948299597206, | |
| "grad_norm": 7.323309513429638, | |
| "learning_rate": 4.403800577878892e-06, | |
| "loss": 0.5889, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.24830469586498752, | |
| "grad_norm": 20.81071641709809, | |
| "learning_rate": 4.401122158655076e-06, | |
| "loss": 0.6094, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 0.24881456177025443, | |
| "grad_norm": 10.74009093392373, | |
| "learning_rate": 4.3984385549418e-06, | |
| "loss": 0.6416, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 0.24932442767552135, | |
| "grad_norm": 6.980032132546093, | |
| "learning_rate": 4.395749774057432e-06, | |
| "loss": 0.6539, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.24983429358078826, | |
| "grad_norm": 82.35304909424602, | |
| "learning_rate": 4.393055823334461e-06, | |
| "loss": 0.6577, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.25034415948605515, | |
| "grad_norm": 68.9873160694173, | |
| "learning_rate": 4.390356710119476e-06, | |
| "loss": 0.6415, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 0.25085402539132207, | |
| "grad_norm": 8.53782382775481, | |
| "learning_rate": 4.38765244177314e-06, | |
| "loss": 0.6141, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 0.251363891296589, | |
| "grad_norm": 10.124372790395688, | |
| "learning_rate": 4.3849430256701765e-06, | |
| "loss": 0.601, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 0.2518737572018559, | |
| "grad_norm": 7.77404112045145, | |
| "learning_rate": 4.38222846919935e-06, | |
| "loss": 0.6295, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 0.2523836231071228, | |
| "grad_norm": 15.321610414707543, | |
| "learning_rate": 4.379508779763438e-06, | |
| "loss": 0.719, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.2528934890123897, | |
| "grad_norm": 8.797795586013368, | |
| "learning_rate": 4.376783964779221e-06, | |
| "loss": 0.6559, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 0.25340335491765664, | |
| "grad_norm": 14.17555125096599, | |
| "learning_rate": 4.3740540316774535e-06, | |
| "loss": 0.6231, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 0.25391322082292356, | |
| "grad_norm": 9.755110695106975, | |
| "learning_rate": 4.3713189879028485e-06, | |
| "loss": 0.5748, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 0.2544230867281905, | |
| "grad_norm": 6.830509512931204, | |
| "learning_rate": 4.3685788409140564e-06, | |
| "loss": 0.6683, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 0.2549329526334574, | |
| "grad_norm": 22.277722816391652, | |
| "learning_rate": 4.365833598183645e-06, | |
| "loss": 0.6583, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.2554428185387243, | |
| "grad_norm": 8.773826215610796, | |
| "learning_rate": 4.363083267198079e-06, | |
| "loss": 0.6008, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 0.2559526844439912, | |
| "grad_norm": 7.782050291060473, | |
| "learning_rate": 4.360327855457696e-06, | |
| "loss": 0.5959, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 0.25646255034925813, | |
| "grad_norm": 6.610786664149157, | |
| "learning_rate": 4.357567370476693e-06, | |
| "loss": 0.5854, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 0.25697241625452505, | |
| "grad_norm": 9.898788342889924, | |
| "learning_rate": 4.354801819783099e-06, | |
| "loss": 0.5716, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 0.25748228215979196, | |
| "grad_norm": 7.0022547012783445, | |
| "learning_rate": 4.35203121091876e-06, | |
| "loss": 0.5536, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 0.2579921480650589, | |
| "grad_norm": 8.798700085731559, | |
| "learning_rate": 4.349255551439314e-06, | |
| "loss": 0.6537, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 0.2585020139703258, | |
| "grad_norm": 7.284575104853222, | |
| "learning_rate": 4.346474848914174e-06, | |
| "loss": 0.6439, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 0.2590118798755927, | |
| "grad_norm": 12.366489512249572, | |
| "learning_rate": 4.343689110926504e-06, | |
| "loss": 0.6505, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 0.2595217457808596, | |
| "grad_norm": 18.160906934404036, | |
| "learning_rate": 4.340898345073202e-06, | |
| "loss": 0.7467, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 0.26003161168612654, | |
| "grad_norm": 6.074259845271382, | |
| "learning_rate": 4.338102558964876e-06, | |
| "loss": 0.683, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.26054147759139346, | |
| "grad_norm": 20.71328456392585, | |
| "learning_rate": 4.335301760225824e-06, | |
| "loss": 0.6004, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 0.26105134349666037, | |
| "grad_norm": 27.825218691728683, | |
| "learning_rate": 4.3324959564940165e-06, | |
| "loss": 0.5665, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.2615612094019273, | |
| "grad_norm": 8.576618170922808, | |
| "learning_rate": 4.329685155421069e-06, | |
| "loss": 0.6139, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 0.2620710753071942, | |
| "grad_norm": 5.191563874692266, | |
| "learning_rate": 4.326869364672229e-06, | |
| "loss": 0.6224, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 0.2625809412124611, | |
| "grad_norm": 7.644239450475295, | |
| "learning_rate": 4.324048591926349e-06, | |
| "loss": 0.6289, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 0.26309080711772803, | |
| "grad_norm": 7.166245372410849, | |
| "learning_rate": 4.321222844875869e-06, | |
| "loss": 0.6658, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 0.26360067302299495, | |
| "grad_norm": 5.350234410317429, | |
| "learning_rate": 4.318392131226791e-06, | |
| "loss": 0.6947, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 0.26411053892826186, | |
| "grad_norm": 5.755572274000322, | |
| "learning_rate": 4.315556458698665e-06, | |
| "loss": 0.6358, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 0.2646204048335288, | |
| "grad_norm": 38.17798848705542, | |
| "learning_rate": 4.312715835024565e-06, | |
| "loss": 0.6454, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 0.2651302707387957, | |
| "grad_norm": 11.241404253833712, | |
| "learning_rate": 4.309870267951061e-06, | |
| "loss": 0.7519, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.2656401366440626, | |
| "grad_norm": 22.28328130251413, | |
| "learning_rate": 4.30701976523821e-06, | |
| "loss": 0.6471, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 0.2661500025493295, | |
| "grad_norm": 9.842249709147955, | |
| "learning_rate": 4.3041643346595285e-06, | |
| "loss": 0.5849, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 0.26665986845459644, | |
| "grad_norm": 9.637874026517801, | |
| "learning_rate": 4.3013039840019675e-06, | |
| "loss": 0.6153, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 0.26716973435986335, | |
| "grad_norm": 4.617183170224206, | |
| "learning_rate": 4.298438721065899e-06, | |
| "loss": 0.6211, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 0.26767960026513027, | |
| "grad_norm": 10.65168368583943, | |
| "learning_rate": 4.295568553665089e-06, | |
| "loss": 0.6364, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.2681894661703972, | |
| "grad_norm": 10.613247539596093, | |
| "learning_rate": 4.292693489626681e-06, | |
| "loss": 0.6323, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 0.2686993320756641, | |
| "grad_norm": 8.49272818687361, | |
| "learning_rate": 4.289813536791168e-06, | |
| "loss": 0.575, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 0.269209197980931, | |
| "grad_norm": 22.6704390447368, | |
| "learning_rate": 4.2869287030123786e-06, | |
| "loss": 0.6592, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 0.26971906388619793, | |
| "grad_norm": 7.121342392959829, | |
| "learning_rate": 4.284038996157451e-06, | |
| "loss": 0.6357, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 0.27022892979146484, | |
| "grad_norm": 24.88598041603986, | |
| "learning_rate": 4.2811444241068115e-06, | |
| "loss": 0.5871, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.27073879569673176, | |
| "grad_norm": 5.368482850926287, | |
| "learning_rate": 4.278244994754155e-06, | |
| "loss": 0.5931, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 0.2712486616019987, | |
| "grad_norm": 5.180107118782333, | |
| "learning_rate": 4.275340716006424e-06, | |
| "loss": 0.566, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 0.2717585275072656, | |
| "grad_norm": 9.352302095228602, | |
| "learning_rate": 4.272431595783783e-06, | |
| "loss": 0.6013, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 0.2722683934125325, | |
| "grad_norm": 5.016831399628619, | |
| "learning_rate": 4.269517642019601e-06, | |
| "loss": 0.7052, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 0.2727782593177994, | |
| "grad_norm": 11.577872049295284, | |
| "learning_rate": 4.2665988626604285e-06, | |
| "loss": 0.623, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 0.27328812522306634, | |
| "grad_norm": 12.110900648060454, | |
| "learning_rate": 4.2636752656659745e-06, | |
| "loss": 0.6656, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 0.27379799112833325, | |
| "grad_norm": 6.509298716020258, | |
| "learning_rate": 4.260746859009087e-06, | |
| "loss": 0.621, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 0.27430785703360017, | |
| "grad_norm": 8.948515262505117, | |
| "learning_rate": 4.257813650675732e-06, | |
| "loss": 0.6315, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 0.2748177229388671, | |
| "grad_norm": 6.910625223428903, | |
| "learning_rate": 4.254875648664965e-06, | |
| "loss": 0.6059, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 0.275327588844134, | |
| "grad_norm": 9.995100150449323, | |
| "learning_rate": 4.251932860988921e-06, | |
| "loss": 0.5617, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.2758374547494009, | |
| "grad_norm": 8.682632389217074, | |
| "learning_rate": 4.24898529567278e-06, | |
| "loss": 0.6523, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 0.2763473206546678, | |
| "grad_norm": 8.280215194644136, | |
| "learning_rate": 4.246032960754753e-06, | |
| "loss": 0.6414, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 0.27685718655993474, | |
| "grad_norm": 10.10137835136439, | |
| "learning_rate": 4.243075864286059e-06, | |
| "loss": 0.5732, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 0.27736705246520166, | |
| "grad_norm": 10.8290914906575, | |
| "learning_rate": 4.2401140143309e-06, | |
| "loss": 0.5839, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 0.2778769183704686, | |
| "grad_norm": 6.645724769592051, | |
| "learning_rate": 4.237147418966444e-06, | |
| "loss": 0.5774, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 0.2783867842757355, | |
| "grad_norm": 72.81404017838632, | |
| "learning_rate": 4.234176086282797e-06, | |
| "loss": 0.5864, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 0.2788966501810024, | |
| "grad_norm": 5.314994274785593, | |
| "learning_rate": 4.231200024382987e-06, | |
| "loss": 0.6449, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 0.2794065160862693, | |
| "grad_norm": 31.29101037439269, | |
| "learning_rate": 4.228219241382936e-06, | |
| "loss": 0.5623, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 0.27991638199153623, | |
| "grad_norm": 25.923064324099325, | |
| "learning_rate": 4.2252337454114426e-06, | |
| "loss": 0.6535, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 0.28042624789680315, | |
| "grad_norm": 22.96804242136826, | |
| "learning_rate": 4.2222435446101555e-06, | |
| "loss": 0.6227, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.28093611380207006, | |
| "grad_norm": 9.39528145131879, | |
| "learning_rate": 4.219248647133559e-06, | |
| "loss": 0.5898, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 0.281445979707337, | |
| "grad_norm": 14.895333434529494, | |
| "learning_rate": 4.216249061148939e-06, | |
| "loss": 0.6934, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 0.2819558456126039, | |
| "grad_norm": 17.44854189744529, | |
| "learning_rate": 4.213244794836373e-06, | |
| "loss": 0.6571, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 0.2824657115178708, | |
| "grad_norm": 6.045811920782836, | |
| "learning_rate": 4.210235856388699e-06, | |
| "loss": 0.6137, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 0.2829755774231377, | |
| "grad_norm": 6.494290828572964, | |
| "learning_rate": 4.2072222540114965e-06, | |
| "loss": 0.641, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 0.28348544332840464, | |
| "grad_norm": 8.506416783659864, | |
| "learning_rate": 4.204203995923064e-06, | |
| "loss": 0.5892, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 0.28399530923367156, | |
| "grad_norm": 10.074725916553763, | |
| "learning_rate": 4.201181090354396e-06, | |
| "loss": 0.6319, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 0.28450517513893847, | |
| "grad_norm": 12.321876819156738, | |
| "learning_rate": 4.198153545549164e-06, | |
| "loss": 0.5932, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 0.2850150410442054, | |
| "grad_norm": 8.281688914683107, | |
| "learning_rate": 4.195121369763687e-06, | |
| "loss": 0.6197, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 0.2855249069494723, | |
| "grad_norm": 10.71683516397999, | |
| "learning_rate": 4.192084571266915e-06, | |
| "loss": 0.6103, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.2860347728547392, | |
| "grad_norm": 5.796597428515388, | |
| "learning_rate": 4.189043158340403e-06, | |
| "loss": 0.6021, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 0.28654463876000613, | |
| "grad_norm": 10.228637952287452, | |
| "learning_rate": 4.185997139278292e-06, | |
| "loss": 0.5835, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 0.28705450466527305, | |
| "grad_norm": 12.590399688941602, | |
| "learning_rate": 4.182946522387283e-06, | |
| "loss": 0.6131, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 0.28756437057053996, | |
| "grad_norm": 11.901456061427117, | |
| "learning_rate": 4.179891315986617e-06, | |
| "loss": 0.5923, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 0.2880742364758069, | |
| "grad_norm": 4.729735791734294, | |
| "learning_rate": 4.1768315284080475e-06, | |
| "loss": 0.5941, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 0.2885841023810738, | |
| "grad_norm": 41.878376647383455, | |
| "learning_rate": 4.173767167995825e-06, | |
| "loss": 0.6108, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 0.2890939682863407, | |
| "grad_norm": 8.57145358299838, | |
| "learning_rate": 4.170698243106668e-06, | |
| "loss": 0.6225, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 0.2896038341916076, | |
| "grad_norm": 57.68706708658153, | |
| "learning_rate": 4.1676247621097445e-06, | |
| "loss": 0.5881, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 0.29011370009687454, | |
| "grad_norm": 14.15474838073681, | |
| "learning_rate": 4.164546733386644e-06, | |
| "loss": 0.6116, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 0.29062356600214145, | |
| "grad_norm": 12.984062057200392, | |
| "learning_rate": 4.161464165331363e-06, | |
| "loss": 0.6717, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.29113343190740837, | |
| "grad_norm": 10.624920059707007, | |
| "learning_rate": 4.158377066350273e-06, | |
| "loss": 0.5929, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 0.2916432978126753, | |
| "grad_norm": 30.251451662052922, | |
| "learning_rate": 4.1552854448621025e-06, | |
| "loss": 0.6397, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 0.2921531637179422, | |
| "grad_norm": 28.098357274037717, | |
| "learning_rate": 4.152189309297914e-06, | |
| "loss": 0.5996, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 0.2926630296232091, | |
| "grad_norm": 15.49814773098564, | |
| "learning_rate": 4.14908866810108e-06, | |
| "loss": 0.6222, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 0.29317289552847603, | |
| "grad_norm": 10.342825730948135, | |
| "learning_rate": 4.14598352972726e-06, | |
| "loss": 0.5807, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 0.29368276143374294, | |
| "grad_norm": 6.7777361109333825, | |
| "learning_rate": 4.142873902644378e-06, | |
| "loss": 0.6487, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 0.29419262733900986, | |
| "grad_norm": 55.36024537902725, | |
| "learning_rate": 4.139759795332597e-06, | |
| "loss": 0.6478, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 0.2947024932442768, | |
| "grad_norm": 7.415282240745671, | |
| "learning_rate": 4.1366412162843015e-06, | |
| "loss": 0.5682, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 0.2952123591495437, | |
| "grad_norm": 16.320079976157704, | |
| "learning_rate": 4.133518174004068e-06, | |
| "loss": 0.5993, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 0.2957222250548106, | |
| "grad_norm": 14.575740442925616, | |
| "learning_rate": 4.130390677008644e-06, | |
| "loss": 0.6656, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.2962320909600775, | |
| "grad_norm": 23.443828943004323, | |
| "learning_rate": 4.127258733826929e-06, | |
| "loss": 0.5881, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 0.29674195686534444, | |
| "grad_norm": 8.338215537290909, | |
| "learning_rate": 4.1241223529999425e-06, | |
| "loss": 0.6088, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 0.29725182277061135, | |
| "grad_norm": 3.9193795434016976, | |
| "learning_rate": 4.12098154308081e-06, | |
| "loss": 0.6227, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 0.29776168867587827, | |
| "grad_norm": 34.505152074166546, | |
| "learning_rate": 4.117836312634734e-06, | |
| "loss": 0.6023, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 0.2982715545811452, | |
| "grad_norm": 10.358330475944964, | |
| "learning_rate": 4.114686670238971e-06, | |
| "loss": 0.5958, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 0.2987814204864121, | |
| "grad_norm": 10.171023349901475, | |
| "learning_rate": 4.111532624482811e-06, | |
| "loss": 0.6487, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 0.299291286391679, | |
| "grad_norm": 14.306133179044457, | |
| "learning_rate": 4.108374183967549e-06, | |
| "loss": 0.6554, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 0.2998011522969459, | |
| "grad_norm": 15.567524939241922, | |
| "learning_rate": 4.10521135730647e-06, | |
| "loss": 0.6807, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 0.30031101820221284, | |
| "grad_norm": 38.31191365953914, | |
| "learning_rate": 4.1020441531248165e-06, | |
| "loss": 0.5854, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 0.30082088410747976, | |
| "grad_norm": 19.86350978038444, | |
| "learning_rate": 4.09887258005977e-06, | |
| "loss": 0.5814, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.3013307500127467, | |
| "grad_norm": 15.358100181159678, | |
| "learning_rate": 4.095696646760425e-06, | |
| "loss": 0.6046, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 0.3018406159180136, | |
| "grad_norm": 15.721665747424774, | |
| "learning_rate": 4.09251636188777e-06, | |
| "loss": 0.656, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 0.3023504818232805, | |
| "grad_norm": 4.81030272554478, | |
| "learning_rate": 4.0893317341146545e-06, | |
| "loss": 0.6183, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 0.3028603477285474, | |
| "grad_norm": 22.743414607219858, | |
| "learning_rate": 4.086142772125779e-06, | |
| "loss": 0.6162, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 0.30337021363381433, | |
| "grad_norm": 339.9381590434665, | |
| "learning_rate": 4.082949484617656e-06, | |
| "loss": 0.5904, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 0.30388007953908125, | |
| "grad_norm": 9.861498398640471, | |
| "learning_rate": 4.079751880298601e-06, | |
| "loss": 0.6336, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 0.3043899454443481, | |
| "grad_norm": 11.112861647101575, | |
| "learning_rate": 4.076549967888697e-06, | |
| "loss": 0.6418, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 0.304899811349615, | |
| "grad_norm": 9.57530538775308, | |
| "learning_rate": 4.073343756119778e-06, | |
| "loss": 0.5854, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 0.30540967725488194, | |
| "grad_norm": 27.895928908583112, | |
| "learning_rate": 4.070133253735399e-06, | |
| "loss": 0.5991, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 0.30591954316014885, | |
| "grad_norm": 5.340717821375443, | |
| "learning_rate": 4.066918469490822e-06, | |
| "loss": 0.6264, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.30642940906541577, | |
| "grad_norm": 26.112994712108293, | |
| "learning_rate": 4.063699412152979e-06, | |
| "loss": 0.5982, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 0.3069392749706827, | |
| "grad_norm": 14.195337759571771, | |
| "learning_rate": 4.060476090500462e-06, | |
| "loss": 0.6299, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 0.3074491408759496, | |
| "grad_norm": 5.544117867891527, | |
| "learning_rate": 4.057248513323484e-06, | |
| "loss": 0.6132, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 0.3079590067812165, | |
| "grad_norm": 7.769017752910073, | |
| "learning_rate": 4.054016689423871e-06, | |
| "loss": 0.6122, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 0.30846887268648343, | |
| "grad_norm": 5.614632316438232, | |
| "learning_rate": 4.050780627615025e-06, | |
| "loss": 0.6126, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 0.30897873859175035, | |
| "grad_norm": 25.512901060709, | |
| "learning_rate": 4.047540336721909e-06, | |
| "loss": 0.5766, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 0.30948860449701726, | |
| "grad_norm": 20.54637693246705, | |
| "learning_rate": 4.044295825581013e-06, | |
| "loss": 0.5903, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 0.3099984704022842, | |
| "grad_norm": 4.685213134791145, | |
| "learning_rate": 4.041047103040343e-06, | |
| "loss": 0.59, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 0.3105083363075511, | |
| "grad_norm": 14.698272645628181, | |
| "learning_rate": 4.0377941779593835e-06, | |
| "loss": 0.5881, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 0.311018202212818, | |
| "grad_norm": 11.911269514103715, | |
| "learning_rate": 4.034537059209085e-06, | |
| "loss": 0.5514, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 0.3115280681180849, | |
| "grad_norm": 16.715726537650962, | |
| "learning_rate": 4.03127575567183e-06, | |
| "loss": 0.5983, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 0.31203793402335184, | |
| "grad_norm": 4.360422453535343, | |
| "learning_rate": 4.028010276241416e-06, | |
| "loss": 0.5862, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 0.31254779992861875, | |
| "grad_norm": 3.997756637077826, | |
| "learning_rate": 4.0247406298230285e-06, | |
| "loss": 0.617, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 0.31305766583388567, | |
| "grad_norm": 4.76148406920648, | |
| "learning_rate": 4.021466825333215e-06, | |
| "loss": 0.5919, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 0.3135675317391526, | |
| "grad_norm": 13.942277089049702, | |
| "learning_rate": 4.018188871699861e-06, | |
| "loss": 0.6223, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 0.3140773976444195, | |
| "grad_norm": 13.662148648384237, | |
| "learning_rate": 4.014906777862172e-06, | |
| "loss": 0.6097, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 0.3145872635496864, | |
| "grad_norm": 5.804025458086976, | |
| "learning_rate": 4.01162055277064e-06, | |
| "loss": 0.6503, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 0.31509712945495333, | |
| "grad_norm": 16.82069434398167, | |
| "learning_rate": 4.008330205387024e-06, | |
| "loss": 0.6452, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 0.31560699536022024, | |
| "grad_norm": 16.53797565480657, | |
| "learning_rate": 4.005035744684325e-06, | |
| "loss": 0.5796, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 0.31611686126548716, | |
| "grad_norm": 8.015851023261215, | |
| "learning_rate": 4.0017371796467635e-06, | |
| "loss": 0.6059, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 0.3166267271707541, | |
| "grad_norm": 6.7882106521077805, | |
| "learning_rate": 3.998434519269749e-06, | |
| "loss": 0.5796, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 0.317136593076021, | |
| "grad_norm": 10.623587735462868, | |
| "learning_rate": 3.9951277725598604e-06, | |
| "loss": 0.6933, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 0.3176464589812879, | |
| "grad_norm": 7.334096120730939, | |
| "learning_rate": 3.991816948534823e-06, | |
| "loss": 0.5632, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 0.3181563248865548, | |
| "grad_norm": 16.456732933661677, | |
| "learning_rate": 3.988502056223477e-06, | |
| "loss": 0.5692, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 0.31866619079182174, | |
| "grad_norm": 137.52656755217927, | |
| "learning_rate": 3.98518310466576e-06, | |
| "loss": 0.5884, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 0.31917605669708865, | |
| "grad_norm": 25.065009101683582, | |
| "learning_rate": 3.98186010291268e-06, | |
| "loss": 0.5745, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 0.31968592260235557, | |
| "grad_norm": 8.153973542505792, | |
| "learning_rate": 3.978533060026288e-06, | |
| "loss": 0.6411, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 0.3201957885076225, | |
| "grad_norm": 22.665329895940015, | |
| "learning_rate": 3.975201985079655e-06, | |
| "loss": 0.5821, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 0.3207056544128894, | |
| "grad_norm": 8.578356130956463, | |
| "learning_rate": 3.971866887156851e-06, | |
| "loss": 0.5774, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 0.3212155203181563, | |
| "grad_norm": 10.787741255106576, | |
| "learning_rate": 3.968527775352914e-06, | |
| "loss": 0.6307, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 0.3217253862234232, | |
| "grad_norm": 8.963049497956545, | |
| "learning_rate": 3.965184658773828e-06, | |
| "loss": 0.6402, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 0.32223525212869014, | |
| "grad_norm": 8.266683813731634, | |
| "learning_rate": 3.961837546536501e-06, | |
| "loss": 0.5837, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 0.32274511803395706, | |
| "grad_norm": 9.180307846998755, | |
| "learning_rate": 3.958486447768736e-06, | |
| "loss": 0.5806, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 0.32325498393922397, | |
| "grad_norm": 25.651525690537756, | |
| "learning_rate": 3.955131371609206e-06, | |
| "loss": 0.6398, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 0.3237648498444909, | |
| "grad_norm": 7.1700263175610806, | |
| "learning_rate": 3.951772327207432e-06, | |
| "loss": 0.6487, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 0.3242747157497578, | |
| "grad_norm": 6.599676709961129, | |
| "learning_rate": 3.948409323723756e-06, | |
| "loss": 0.6023, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 0.3247845816550247, | |
| "grad_norm": 20.95163227633846, | |
| "learning_rate": 3.945042370329319e-06, | |
| "loss": 0.5539, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 0.32529444756029163, | |
| "grad_norm": 7.088550847545342, | |
| "learning_rate": 3.9416714762060325e-06, | |
| "loss": 0.6551, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 0.32580431346555855, | |
| "grad_norm": 15.30777318161728, | |
| "learning_rate": 3.938296650546552e-06, | |
| "loss": 0.5995, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 0.32631417937082546, | |
| "grad_norm": 13.542125007259056, | |
| "learning_rate": 3.934917902554257e-06, | |
| "loss": 0.569, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 0.3268240452760924, | |
| "grad_norm": 19.839669107198333, | |
| "learning_rate": 3.931535241443225e-06, | |
| "loss": 0.5796, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 0.3273339111813593, | |
| "grad_norm": 9.379304814298935, | |
| "learning_rate": 3.9281486764382e-06, | |
| "loss": 0.5959, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 0.3278437770866262, | |
| "grad_norm": 4.964025222773356, | |
| "learning_rate": 3.924758216774579e-06, | |
| "loss": 0.6092, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 0.3283536429918931, | |
| "grad_norm": 8.647009136839792, | |
| "learning_rate": 3.921363871698372e-06, | |
| "loss": 0.6594, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 0.32886350889716004, | |
| "grad_norm": 7.0288166471256535, | |
| "learning_rate": 3.917965650466192e-06, | |
| "loss": 0.6147, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 0.32937337480242695, | |
| "grad_norm": 13.463141621042343, | |
| "learning_rate": 3.914563562345218e-06, | |
| "loss": 0.6147, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 0.32988324070769387, | |
| "grad_norm": 10.452423068036081, | |
| "learning_rate": 3.911157616613176e-06, | |
| "loss": 0.6291, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 0.3303931066129608, | |
| "grad_norm": 22.28017669172328, | |
| "learning_rate": 3.9077478225583115e-06, | |
| "loss": 0.58, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 0.3309029725182277, | |
| "grad_norm": 7.096911056626139, | |
| "learning_rate": 3.904334189479366e-06, | |
| "loss": 0.6304, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 0.3314128384234946, | |
| "grad_norm": 7.513660732062853, | |
| "learning_rate": 3.900916726685547e-06, | |
| "loss": 0.5736, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.33192270432876153, | |
| "grad_norm": 10.702933677647772, | |
| "learning_rate": 3.897495443496511e-06, | |
| "loss": 0.6478, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 0.33243257023402845, | |
| "grad_norm": 7.712509198957455, | |
| "learning_rate": 3.894070349242328e-06, | |
| "loss": 0.6366, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 0.33294243613929536, | |
| "grad_norm": 12.4801385321312, | |
| "learning_rate": 3.890641453263463e-06, | |
| "loss": 0.6557, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 0.3334523020445623, | |
| "grad_norm": 11.838342547088189, | |
| "learning_rate": 3.887208764910749e-06, | |
| "loss": 0.6099, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 0.3339621679498292, | |
| "grad_norm": 9.848289485775439, | |
| "learning_rate": 3.8837722935453615e-06, | |
| "loss": 0.625, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 0.3344720338550961, | |
| "grad_norm": 20.69381486392122, | |
| "learning_rate": 3.880332048538789e-06, | |
| "loss": 0.5759, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 0.334981899760363, | |
| "grad_norm": 6.137002587152166, | |
| "learning_rate": 3.876888039272818e-06, | |
| "loss": 0.6291, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 0.33549176566562994, | |
| "grad_norm": 8.858939897615745, | |
| "learning_rate": 3.8734402751394925e-06, | |
| "loss": 0.6023, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 0.33600163157089685, | |
| "grad_norm": 5.371446280698542, | |
| "learning_rate": 3.869988765541101e-06, | |
| "loss": 0.5707, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 0.33651149747616377, | |
| "grad_norm": 13.449325689930731, | |
| "learning_rate": 3.866533519890145e-06, | |
| "loss": 0.5676, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 0.3370213633814307, | |
| "grad_norm": 7.58335388392582, | |
| "learning_rate": 3.863074547609314e-06, | |
| "loss": 0.5677, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 0.3375312292866976, | |
| "grad_norm": 6.048763372455439, | |
| "learning_rate": 3.859611858131461e-06, | |
| "loss": 0.6223, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 0.3380410951919645, | |
| "grad_norm": 8.43049059804318, | |
| "learning_rate": 3.8561454608995765e-06, | |
| "loss": 0.6523, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 0.33855096109723143, | |
| "grad_norm": 14.456986021929183, | |
| "learning_rate": 3.852675365366762e-06, | |
| "loss": 0.5357, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 0.33906082700249834, | |
| "grad_norm": 8.698736924699832, | |
| "learning_rate": 3.849201580996201e-06, | |
| "loss": 0.6119, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 0.33957069290776526, | |
| "grad_norm": 12.227061026125032, | |
| "learning_rate": 3.845724117261142e-06, | |
| "loss": 0.6333, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 0.3400805588130322, | |
| "grad_norm": 7.964324332305248, | |
| "learning_rate": 3.8422429836448665e-06, | |
| "loss": 0.632, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 0.3405904247182991, | |
| "grad_norm": 9.02347624995148, | |
| "learning_rate": 3.83875818964066e-06, | |
| "loss": 0.5645, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 0.341100290623566, | |
| "grad_norm": 11.481259913748172, | |
| "learning_rate": 3.835269744751796e-06, | |
| "loss": 0.6003, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 0.3416101565288329, | |
| "grad_norm": 13.058847885580379, | |
| "learning_rate": 3.831777658491497e-06, | |
| "loss": 0.6493, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 0.34212002243409984, | |
| "grad_norm": 36.57172275862477, | |
| "learning_rate": 3.828281940382923e-06, | |
| "loss": 0.6713, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 0.34262988833936675, | |
| "grad_norm": 17.121028748445738, | |
| "learning_rate": 3.824782599959134e-06, | |
| "loss": 0.6546, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 0.34313975424463367, | |
| "grad_norm": 4.667001744714278, | |
| "learning_rate": 3.8212796467630685e-06, | |
| "loss": 0.5635, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 0.3436496201499006, | |
| "grad_norm": 6.6063555790808595, | |
| "learning_rate": 3.8177730903475195e-06, | |
| "loss": 0.5727, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 0.3441594860551675, | |
| "grad_norm": 9.254337862184316, | |
| "learning_rate": 3.8142629402751047e-06, | |
| "loss": 0.6035, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 0.3446693519604344, | |
| "grad_norm": 7.310130343220906, | |
| "learning_rate": 3.8107492061182418e-06, | |
| "loss": 0.6365, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 0.3451792178657013, | |
| "grad_norm": 7.347731976740374, | |
| "learning_rate": 3.8072318974591233e-06, | |
| "loss": 0.5842, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 0.34568908377096824, | |
| "grad_norm": 5.516178192770456, | |
| "learning_rate": 3.803711023889688e-06, | |
| "loss": 0.5975, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 0.34619894967623516, | |
| "grad_norm": 9.877992028377761, | |
| "learning_rate": 3.800186595011599e-06, | |
| "loss": 0.6027, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 0.3467088155815021, | |
| "grad_norm": 8.263217660967262, | |
| "learning_rate": 3.7966586204362137e-06, | |
| "loss": 0.5963, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 0.347218681486769, | |
| "grad_norm": 22.684240498659832, | |
| "learning_rate": 3.793127109784558e-06, | |
| "loss": 0.6034, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 0.3477285473920359, | |
| "grad_norm": 11.932025654487864, | |
| "learning_rate": 3.789592072687302e-06, | |
| "loss": 0.6842, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 0.3482384132973028, | |
| "grad_norm": 5.99767425388713, | |
| "learning_rate": 3.7860535187847326e-06, | |
| "loss": 0.5786, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 0.34874827920256973, | |
| "grad_norm": 11.542318476320958, | |
| "learning_rate": 3.782511457726725e-06, | |
| "loss": 0.6377, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 0.34925814510783665, | |
| "grad_norm": 8.06618580589015, | |
| "learning_rate": 3.7789658991727242e-06, | |
| "loss": 0.5589, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 0.34976801101310356, | |
| "grad_norm": 6.172507472614161, | |
| "learning_rate": 3.775416852791707e-06, | |
| "loss": 0.5888, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 0.3502778769183705, | |
| "grad_norm": 13.480236409716568, | |
| "learning_rate": 3.7718643282621648e-06, | |
| "loss": 0.5631, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 0.3507877428236374, | |
| "grad_norm": 8.245968547125749, | |
| "learning_rate": 3.768308335272075e-06, | |
| "loss": 0.5725, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 0.3512976087289043, | |
| "grad_norm": 5.113284315645673, | |
| "learning_rate": 3.7647488835188705e-06, | |
| "loss": 0.5578, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 0.3518074746341712, | |
| "grad_norm": 21.845139952027658, | |
| "learning_rate": 3.76118598270942e-06, | |
| "loss": 0.6442, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 0.35231734053943814, | |
| "grad_norm": 5.808958140251627, | |
| "learning_rate": 3.7576196425599944e-06, | |
| "loss": 0.5623, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 0.35282720644470505, | |
| "grad_norm": 12.8925779686756, | |
| "learning_rate": 3.7540498727962483e-06, | |
| "loss": 0.617, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 0.35333707234997197, | |
| "grad_norm": 6.351102780495482, | |
| "learning_rate": 3.7504766831531848e-06, | |
| "loss": 0.6059, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 0.3538469382552389, | |
| "grad_norm": 15.781230743083418, | |
| "learning_rate": 3.746900083375137e-06, | |
| "loss": 0.602, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 0.3543568041605058, | |
| "grad_norm": 21.198227388450018, | |
| "learning_rate": 3.7433200832157333e-06, | |
| "loss": 0.7074, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 0.3548666700657727, | |
| "grad_norm": 7.172725885534685, | |
| "learning_rate": 3.7397366924378797e-06, | |
| "loss": 0.6029, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 0.35537653597103963, | |
| "grad_norm": 7.408637085264887, | |
| "learning_rate": 3.736149920813726e-06, | |
| "loss": 0.6019, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 0.35588640187630655, | |
| "grad_norm": 7.798488548214566, | |
| "learning_rate": 3.7325597781246426e-06, | |
| "loss": 0.583, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 0.35639626778157346, | |
| "grad_norm": 9.20496366906781, | |
| "learning_rate": 3.728966274161192e-06, | |
| "loss": 0.554, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 0.3569061336868404, | |
| "grad_norm": 10.6846708940764, | |
| "learning_rate": 3.725369418723106e-06, | |
| "loss": 0.5573, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.3574159995921073, | |
| "grad_norm": 6.499329020438048, | |
| "learning_rate": 3.721769221619252e-06, | |
| "loss": 0.5126, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 0.3579258654973742, | |
| "grad_norm": 4.232004126510205, | |
| "learning_rate": 3.718165692667613e-06, | |
| "loss": 0.5866, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 0.3584357314026411, | |
| "grad_norm": 6.522779753834059, | |
| "learning_rate": 3.714558841695258e-06, | |
| "loss": 0.6243, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 0.35894559730790804, | |
| "grad_norm": 7.842308577918714, | |
| "learning_rate": 3.710948678538314e-06, | |
| "loss": 0.5961, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 0.35945546321317495, | |
| "grad_norm": 4.155939637670406, | |
| "learning_rate": 3.7073352130419436e-06, | |
| "loss": 0.6546, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 0.35996532911844187, | |
| "grad_norm": 15.06151436559477, | |
| "learning_rate": 3.703718455060311e-06, | |
| "loss": 0.6156, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 0.3604751950237088, | |
| "grad_norm": 10.46655056093518, | |
| "learning_rate": 3.7000984144565617e-06, | |
| "loss": 0.6271, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 0.3609850609289757, | |
| "grad_norm": 10.530339938427321, | |
| "learning_rate": 3.6964751011027933e-06, | |
| "loss": 0.5611, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 0.3614949268342426, | |
| "grad_norm": 11.596468152577025, | |
| "learning_rate": 3.6928485248800273e-06, | |
| "loss": 0.5413, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 0.36200479273950953, | |
| "grad_norm": 7.438067886146539, | |
| "learning_rate": 3.689218695678184e-06, | |
| "loss": 0.5483, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 0.36251465864477644, | |
| "grad_norm": 5.0417594250753055, | |
| "learning_rate": 3.685585623396055e-06, | |
| "loss": 0.5861, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 0.36302452455004336, | |
| "grad_norm": 6.086227491760784, | |
| "learning_rate": 3.681949317941275e-06, | |
| "loss": 0.5121, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 0.3635343904553103, | |
| "grad_norm": 8.564698930366653, | |
| "learning_rate": 3.6783097892302967e-06, | |
| "loss": 0.6275, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 0.3640442563605772, | |
| "grad_norm": 6.506669188127755, | |
| "learning_rate": 3.6746670471883616e-06, | |
| "loss": 0.5806, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 0.3645541222658441, | |
| "grad_norm": 7.390307218413792, | |
| "learning_rate": 3.671021101749476e-06, | |
| "loss": 0.5798, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 0.365063988171111, | |
| "grad_norm": 10.553770888956274, | |
| "learning_rate": 3.6673719628563808e-06, | |
| "loss": 0.5747, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 0.36557385407637794, | |
| "grad_norm": 24.14877566048891, | |
| "learning_rate": 3.6637196404605257e-06, | |
| "loss": 0.5573, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 0.36608371998164485, | |
| "grad_norm": 13.172173489692812, | |
| "learning_rate": 3.660064144522043e-06, | |
| "loss": 0.5897, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 0.36659358588691177, | |
| "grad_norm": 6.2257304828288955, | |
| "learning_rate": 3.656405485009719e-06, | |
| "loss": 0.5682, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 0.3671034517921787, | |
| "grad_norm": 12.945880033725963, | |
| "learning_rate": 3.652743671900967e-06, | |
| "loss": 0.5714, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 0.3676133176974456, | |
| "grad_norm": 7.236111657688771, | |
| "learning_rate": 3.6490787151817986e-06, | |
| "loss": 0.5455, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 0.3681231836027125, | |
| "grad_norm": 8.565857394457222, | |
| "learning_rate": 3.6454106248468024e-06, | |
| "loss": 0.6042, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 0.3686330495079794, | |
| "grad_norm": 5.938790795920517, | |
| "learning_rate": 3.6417394108991096e-06, | |
| "loss": 0.5757, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 0.36914291541324634, | |
| "grad_norm": 16.58193027443637, | |
| "learning_rate": 3.6380650833503705e-06, | |
| "loss": 0.5971, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 0.36965278131851326, | |
| "grad_norm": 11.450948063607477, | |
| "learning_rate": 3.6343876522207253e-06, | |
| "loss": 0.6397, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 0.3701626472237802, | |
| "grad_norm": 9.41451555929652, | |
| "learning_rate": 3.6307071275387807e-06, | |
| "loss": 0.5914, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 0.3706725131290471, | |
| "grad_norm": 12.429290516145963, | |
| "learning_rate": 3.6270235193415754e-06, | |
| "loss": 0.5679, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 0.371182379034314, | |
| "grad_norm": 13.72537185817539, | |
| "learning_rate": 3.6233368376745616e-06, | |
| "loss": 0.6502, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 0.37169224493958086, | |
| "grad_norm": 9.211876210956733, | |
| "learning_rate": 3.6196470925915705e-06, | |
| "loss": 0.6053, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 0.3722021108448478, | |
| "grad_norm": 6.674342793228114, | |
| "learning_rate": 3.6159542941547883e-06, | |
| "loss": 0.57, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 0.3727119767501147, | |
| "grad_norm": 5.06011137522497, | |
| "learning_rate": 3.6122584524347267e-06, | |
| "loss": 0.6127, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 0.3732218426553816, | |
| "grad_norm": 5.50419488018807, | |
| "learning_rate": 3.608559577510198e-06, | |
| "loss": 0.5693, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 0.3737317085606485, | |
| "grad_norm": 11.98832355243555, | |
| "learning_rate": 3.604857679468285e-06, | |
| "loss": 0.5676, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 0.37424157446591544, | |
| "grad_norm": 25.368564304510052, | |
| "learning_rate": 3.601152768404317e-06, | |
| "loss": 0.5902, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 0.37475144037118235, | |
| "grad_norm": 13.241496574133155, | |
| "learning_rate": 3.597444854421837e-06, | |
| "loss": 0.6013, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 0.37526130627644927, | |
| "grad_norm": 9.605414734958668, | |
| "learning_rate": 3.59373394763258e-06, | |
| "loss": 0.5892, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 0.3757711721817162, | |
| "grad_norm": 10.50506861161194, | |
| "learning_rate": 3.5900200581564403e-06, | |
| "loss": 0.634, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 0.3762810380869831, | |
| "grad_norm": 6.366109630402361, | |
| "learning_rate": 3.586303196121447e-06, | |
| "loss": 0.5825, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 0.37679090399225, | |
| "grad_norm": 6.938308539769015, | |
| "learning_rate": 3.5825833716637364e-06, | |
| "loss": 0.6521, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 0.37730076989751693, | |
| "grad_norm": 26.03998674798817, | |
| "learning_rate": 3.5788605949275237e-06, | |
| "loss": 0.5358, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 0.37781063580278385, | |
| "grad_norm": 11.548585198622813, | |
| "learning_rate": 3.5751348760650722e-06, | |
| "loss": 0.6379, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 0.37832050170805076, | |
| "grad_norm": 5.318555598784323, | |
| "learning_rate": 3.5714062252366723e-06, | |
| "loss": 0.6551, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 0.3788303676133177, | |
| "grad_norm": 5.478810952612762, | |
| "learning_rate": 3.5676746526106084e-06, | |
| "loss": 0.6603, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 0.3793402335185846, | |
| "grad_norm": 9.875012211939511, | |
| "learning_rate": 3.5639401683631314e-06, | |
| "loss": 0.647, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 0.3798500994238515, | |
| "grad_norm": 13.66351116178773, | |
| "learning_rate": 3.5602027826784356e-06, | |
| "loss": 0.6437, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 0.3803599653291184, | |
| "grad_norm": 3.3576915666147977, | |
| "learning_rate": 3.556462505748625e-06, | |
| "loss": 0.5802, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 0.38086983123438534, | |
| "grad_norm": 6.045967456335718, | |
| "learning_rate": 3.5527193477736903e-06, | |
| "loss": 0.5692, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 0.38137969713965225, | |
| "grad_norm": 5.119077954410416, | |
| "learning_rate": 3.548973318961477e-06, | |
| "loss": 0.5656, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 0.38188956304491917, | |
| "grad_norm": 6.179648411829707, | |
| "learning_rate": 3.5452244295276604e-06, | |
| "loss": 0.6491, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 0.3823994289501861, | |
| "grad_norm": 16.80155654981562, | |
| "learning_rate": 3.541472689695718e-06, | |
| "loss": 0.588, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.382909294855453, | |
| "grad_norm": 14.227034706280053, | |
| "learning_rate": 3.537718109696899e-06, | |
| "loss": 0.5874, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 0.3834191607607199, | |
| "grad_norm": 6.082648099096069, | |
| "learning_rate": 3.5339606997701975e-06, | |
| "loss": 0.5422, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 0.38392902666598683, | |
| "grad_norm": 22.201688715390624, | |
| "learning_rate": 3.530200470162328e-06, | |
| "loss": 0.6293, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 0.38443889257125374, | |
| "grad_norm": 4.117638443340211, | |
| "learning_rate": 3.5264374311276904e-06, | |
| "loss": 0.5174, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 0.38494875847652066, | |
| "grad_norm": 18.482761872963568, | |
| "learning_rate": 3.5226715929283507e-06, | |
| "loss": 0.618, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 0.3854586243817876, | |
| "grad_norm": 6.392047812297191, | |
| "learning_rate": 3.518902965834003e-06, | |
| "loss": 0.594, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 0.3859684902870545, | |
| "grad_norm": 8.55473811171792, | |
| "learning_rate": 3.5151315601219517e-06, | |
| "loss": 0.5521, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 0.3864783561923214, | |
| "grad_norm": 9.050740419704756, | |
| "learning_rate": 3.5113573860770755e-06, | |
| "loss": 0.582, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 0.3869882220975883, | |
| "grad_norm": 25.688592583626388, | |
| "learning_rate": 3.5075804539918047e-06, | |
| "loss": 0.7061, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 0.38749808800285523, | |
| "grad_norm": 7.353214023143235, | |
| "learning_rate": 3.5038007741660895e-06, | |
| "loss": 0.5931, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 0.38800795390812215, | |
| "grad_norm": 14.032425976466067, | |
| "learning_rate": 3.5000183569073743e-06, | |
| "loss": 0.6448, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 0.38851781981338906, | |
| "grad_norm": 4.8340970822759575, | |
| "learning_rate": 3.4962332125305686e-06, | |
| "loss": 0.5511, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 0.389027685718656, | |
| "grad_norm": 10.049457628084676, | |
| "learning_rate": 3.492445351358018e-06, | |
| "loss": 0.5893, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 0.3895375516239229, | |
| "grad_norm": 6.418594454021383, | |
| "learning_rate": 3.488654783719477e-06, | |
| "loss": 0.5619, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 0.3900474175291898, | |
| "grad_norm": 9.276784267515474, | |
| "learning_rate": 3.484861519952083e-06, | |
| "loss": 0.5815, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 0.3905572834344567, | |
| "grad_norm": 16.507690557273907, | |
| "learning_rate": 3.4810655704003237e-06, | |
| "loss": 0.5685, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 0.39106714933972364, | |
| "grad_norm": 9.18372974531889, | |
| "learning_rate": 3.4772669454160113e-06, | |
| "loss": 0.5539, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 0.39157701524499056, | |
| "grad_norm": 9.95648456172559, | |
| "learning_rate": 3.473465655358255e-06, | |
| "loss": 0.5408, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.39208688115025747, | |
| "grad_norm": 7.154378316140561, | |
| "learning_rate": 3.469661710593431e-06, | |
| "loss": 0.5849, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 0.3925967470555244, | |
| "grad_norm": 4.296446896204838, | |
| "learning_rate": 3.465855121495156e-06, | |
| "loss": 0.4575, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 0.3931066129607913, | |
| "grad_norm": 11.971727859126235, | |
| "learning_rate": 3.4620458984442564e-06, | |
| "loss": 0.553, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 0.3936164788660582, | |
| "grad_norm": 8.056036290422684, | |
| "learning_rate": 3.4582340518287444e-06, | |
| "loss": 0.5746, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 0.39412634477132513, | |
| "grad_norm": 3.9615637901197744, | |
| "learning_rate": 3.4544195920437834e-06, | |
| "loss": 0.5716, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 0.39463621067659205, | |
| "grad_norm": 10.049166058251805, | |
| "learning_rate": 3.4506025294916656e-06, | |
| "loss": 0.585, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 0.39514607658185896, | |
| "grad_norm": 10.449434153164734, | |
| "learning_rate": 3.44678287458178e-06, | |
| "loss": 0.6304, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 0.3956559424871259, | |
| "grad_norm": 6.927454525401204, | |
| "learning_rate": 3.4429606377305847e-06, | |
| "loss": 0.6219, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 0.3961658083923928, | |
| "grad_norm": 4.870437160417047, | |
| "learning_rate": 3.439135829361581e-06, | |
| "loss": 0.5676, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 0.3966756742976597, | |
| "grad_norm": 17.89544614300908, | |
| "learning_rate": 3.435308459905281e-06, | |
| "loss": 0.6855, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 0.3971855402029266, | |
| "grad_norm": 12.771844660858063, | |
| "learning_rate": 3.4314785397991814e-06, | |
| "loss": 0.5657, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 0.39769540610819354, | |
| "grad_norm": 7.7960650699904805, | |
| "learning_rate": 3.4276460794877343e-06, | |
| "loss": 0.5664, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 0.39820527201346045, | |
| "grad_norm": 7.228542637831989, | |
| "learning_rate": 3.4238110894223205e-06, | |
| "loss": 0.6863, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 0.39871513791872737, | |
| "grad_norm": 7.760845660983971, | |
| "learning_rate": 3.419973580061218e-06, | |
| "loss": 0.5669, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 0.3992250038239943, | |
| "grad_norm": 12.392495438781795, | |
| "learning_rate": 3.4161335618695774e-06, | |
| "loss": 0.6402, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 0.3997348697292612, | |
| "grad_norm": 26.611025449613667, | |
| "learning_rate": 3.4122910453193885e-06, | |
| "loss": 0.5825, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 0.4002447356345281, | |
| "grad_norm": 18.091448185422944, | |
| "learning_rate": 3.4084460408894553e-06, | |
| "loss": 0.6152, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 0.40075460153979503, | |
| "grad_norm": 15.72425633199789, | |
| "learning_rate": 3.4045985590653667e-06, | |
| "loss": 0.611, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 0.40126446744506195, | |
| "grad_norm": 10.798561704841136, | |
| "learning_rate": 3.4007486103394678e-06, | |
| "loss": 0.6159, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 0.40177433335032886, | |
| "grad_norm": 3.307220226809657, | |
| "learning_rate": 3.3968962052108288e-06, | |
| "loss": 0.5643, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 0.4022841992555958, | |
| "grad_norm": 13.995794219992154, | |
| "learning_rate": 3.3930413541852235e-06, | |
| "loss": 0.6081, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 0.4027940651608627, | |
| "grad_norm": 7.517626739286846, | |
| "learning_rate": 3.389184067775091e-06, | |
| "loss": 0.5687, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 0.4033039310661296, | |
| "grad_norm": 8.606794051353527, | |
| "learning_rate": 3.385324356499513e-06, | |
| "loss": 0.5603, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 0.4038137969713965, | |
| "grad_norm": 9.684832646327461, | |
| "learning_rate": 3.381462230884187e-06, | |
| "loss": 0.5539, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 0.40432366287666344, | |
| "grad_norm": 4.1687295402302444, | |
| "learning_rate": 3.377597701461391e-06, | |
| "loss": 0.538, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 0.40483352878193035, | |
| "grad_norm": 27.225147980543973, | |
| "learning_rate": 3.3737307787699593e-06, | |
| "loss": 0.5857, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 0.40534339468719727, | |
| "grad_norm": 4.638533674818879, | |
| "learning_rate": 3.3698614733552537e-06, | |
| "loss": 0.5602, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 0.4058532605924642, | |
| "grad_norm": 10.400432513347486, | |
| "learning_rate": 3.3659897957691334e-06, | |
| "loss": 0.6512, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 0.4063631264977311, | |
| "grad_norm": 5.130756363065307, | |
| "learning_rate": 3.3621157565699265e-06, | |
| "loss": 0.5803, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 0.406872992402998, | |
| "grad_norm": 7.271116073711001, | |
| "learning_rate": 3.3582393663224012e-06, | |
| "loss": 0.5683, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 0.40738285830826493, | |
| "grad_norm": 7.359012484464728, | |
| "learning_rate": 3.3543606355977377e-06, | |
| "loss": 0.5638, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 0.40789272421353184, | |
| "grad_norm": 7.415959535609534, | |
| "learning_rate": 3.350479574973498e-06, | |
| "loss": 0.6123, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.40840259011879876, | |
| "grad_norm": 7.811269462367596, | |
| "learning_rate": 3.3465961950335994e-06, | |
| "loss": 0.5595, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 0.4089124560240657, | |
| "grad_norm": 7.989979255664991, | |
| "learning_rate": 3.342710506368282e-06, | |
| "loss": 0.5159, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 0.4094223219293326, | |
| "grad_norm": 7.869848589658554, | |
| "learning_rate": 3.3388225195740844e-06, | |
| "loss": 0.5517, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 0.4099321878345995, | |
| "grad_norm": 13.414951394387845, | |
| "learning_rate": 3.33493224525381e-06, | |
| "loss": 0.5044, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 0.4104420537398664, | |
| "grad_norm": 7.250013552370468, | |
| "learning_rate": 3.3310396940165025e-06, | |
| "loss": 0.6043, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 0.41095191964513333, | |
| "grad_norm": 29.849356149503027, | |
| "learning_rate": 3.327144876477413e-06, | |
| "loss": 0.6256, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 0.41146178555040025, | |
| "grad_norm": 6.196410170936335, | |
| "learning_rate": 3.3232478032579746e-06, | |
| "loss": 0.5695, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 0.41197165145566716, | |
| "grad_norm": 15.64304736056121, | |
| "learning_rate": 3.319348484985771e-06, | |
| "loss": 0.5448, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 0.4124815173609341, | |
| "grad_norm": 5.370956738872842, | |
| "learning_rate": 3.3154469322945083e-06, | |
| "loss": 0.5642, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 0.412991383266201, | |
| "grad_norm": 10.279572349290994, | |
| "learning_rate": 3.311543155823985e-06, | |
| "loss": 0.5895, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 0.4135012491714679, | |
| "grad_norm": 10.29244692765525, | |
| "learning_rate": 3.3076371662200666e-06, | |
| "loss": 0.5625, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 0.4140111150767348, | |
| "grad_norm": 10.44654118728628, | |
| "learning_rate": 3.303728974134653e-06, | |
| "loss": 0.5469, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 0.41452098098200174, | |
| "grad_norm": 4.83706374349343, | |
| "learning_rate": 3.2998185902256475e-06, | |
| "loss": 0.5821, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 0.41503084688726866, | |
| "grad_norm": 11.49003309549299, | |
| "learning_rate": 3.295906025156935e-06, | |
| "loss": 0.6202, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 0.41554071279253557, | |
| "grad_norm": 10.702272759661504, | |
| "learning_rate": 3.291991289598347e-06, | |
| "loss": 0.5714, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 0.4160505786978025, | |
| "grad_norm": 45.2248551956823, | |
| "learning_rate": 3.288074394225632e-06, | |
| "loss": 0.6211, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 0.4165604446030694, | |
| "grad_norm": 7.4598902579327335, | |
| "learning_rate": 3.284155349720431e-06, | |
| "loss": 0.6028, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 0.4170703105083363, | |
| "grad_norm": 6.274399449869339, | |
| "learning_rate": 3.2802341667702448e-06, | |
| "loss": 0.6018, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 0.41758017641360323, | |
| "grad_norm": 3.9008731665441356, | |
| "learning_rate": 3.276310856068406e-06, | |
| "loss": 0.6355, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 0.41809004231887015, | |
| "grad_norm": 13.84695131835591, | |
| "learning_rate": 3.27238542831405e-06, | |
| "loss": 0.5286, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 0.41859990822413706, | |
| "grad_norm": 9.644587842783794, | |
| "learning_rate": 3.2684578942120853e-06, | |
| "loss": 0.5411, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 0.419109774129404, | |
| "grad_norm": 9.161992322868674, | |
| "learning_rate": 3.2645282644731648e-06, | |
| "loss": 0.6141, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 0.4196196400346709, | |
| "grad_norm": 6.903980241843122, | |
| "learning_rate": 3.2605965498136554e-06, | |
| "loss": 0.5841, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 0.4201295059399378, | |
| "grad_norm": 6.035862909552343, | |
| "learning_rate": 3.2566627609556117e-06, | |
| "loss": 0.5653, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 0.4206393718452047, | |
| "grad_norm": 8.105812505508382, | |
| "learning_rate": 3.252726908626742e-06, | |
| "loss": 0.6354, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 0.42114923775047164, | |
| "grad_norm": 11.937584745156768, | |
| "learning_rate": 3.248789003560385e-06, | |
| "loss": 0.5163, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 0.42165910365573855, | |
| "grad_norm": 21.76832746956224, | |
| "learning_rate": 3.2448490564954744e-06, | |
| "loss": 0.5452, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 0.42216896956100547, | |
| "grad_norm": 18.051485655686268, | |
| "learning_rate": 3.2409070781765147e-06, | |
| "loss": 0.5664, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 0.4226788354662724, | |
| "grad_norm": 8.423685743223801, | |
| "learning_rate": 3.236963079353548e-06, | |
| "loss": 0.561, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 0.4231887013715393, | |
| "grad_norm": 8.624624976972216, | |
| "learning_rate": 3.2330170707821283e-06, | |
| "loss": 0.5843, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 0.4236985672768062, | |
| "grad_norm": 6.933716611383655, | |
| "learning_rate": 3.229069063223289e-06, | |
| "loss": 0.5982, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 0.42420843318207313, | |
| "grad_norm": 6.308662920804664, | |
| "learning_rate": 3.225119067443515e-06, | |
| "loss": 0.5909, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 0.42471829908734005, | |
| "grad_norm": 6.275184288671406, | |
| "learning_rate": 3.2211670942147144e-06, | |
| "loss": 0.5669, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 0.42522816499260696, | |
| "grad_norm": 8.569157246885606, | |
| "learning_rate": 3.2172131543141865e-06, | |
| "loss": 0.5746, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 0.4257380308978739, | |
| "grad_norm": 6.307071145921682, | |
| "learning_rate": 3.2132572585245946e-06, | |
| "loss": 0.6255, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 0.4262478968031408, | |
| "grad_norm": 9.047786484848844, | |
| "learning_rate": 3.209299417633936e-06, | |
| "loss": 0.608, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 0.4267577627084077, | |
| "grad_norm": 7.183817587841396, | |
| "learning_rate": 3.2053396424355105e-06, | |
| "loss": 0.6324, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 0.4272676286136746, | |
| "grad_norm": 4.004382030972596, | |
| "learning_rate": 3.201377943727896e-06, | |
| "loss": 0.5876, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 0.42777749451894154, | |
| "grad_norm": 24.284714914627546, | |
| "learning_rate": 3.197414332314914e-06, | |
| "loss": 0.4996, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 0.42828736042420845, | |
| "grad_norm": 14.820952554168857, | |
| "learning_rate": 3.193448819005603e-06, | |
| "loss": 0.6185, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 0.42879722632947537, | |
| "grad_norm": 6.093851744759424, | |
| "learning_rate": 3.189481414614186e-06, | |
| "loss": 0.5609, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 0.4293070922347423, | |
| "grad_norm": 7.259173190794586, | |
| "learning_rate": 3.1855121299600454e-06, | |
| "loss": 0.6312, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 0.4298169581400092, | |
| "grad_norm": 8.653922058289407, | |
| "learning_rate": 3.1815409758676917e-06, | |
| "loss": 0.6608, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 0.4303268240452761, | |
| "grad_norm": 5.552195512575827, | |
| "learning_rate": 3.1775679631667306e-06, | |
| "loss": 0.584, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 0.43083668995054303, | |
| "grad_norm": 3.743477278951455, | |
| "learning_rate": 3.1735931026918393e-06, | |
| "loss": 0.6055, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 0.43134655585580994, | |
| "grad_norm": 17.16382374151614, | |
| "learning_rate": 3.1696164052827318e-06, | |
| "loss": 0.5615, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 0.43185642176107686, | |
| "grad_norm": 7.525184001694853, | |
| "learning_rate": 3.165637881784133e-06, | |
| "loss": 0.5091, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 0.4323662876663438, | |
| "grad_norm": 7.806074822426972, | |
| "learning_rate": 3.161657543045747e-06, | |
| "loss": 0.6274, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 0.4328761535716107, | |
| "grad_norm": 10.128093552583556, | |
| "learning_rate": 3.1576753999222275e-06, | |
| "loss": 0.5862, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 0.4333860194768776, | |
| "grad_norm": 10.241501484791744, | |
| "learning_rate": 3.1536914632731512e-06, | |
| "loss": 0.5975, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.4338958853821445, | |
| "grad_norm": 6.190804768770443, | |
| "learning_rate": 3.1497057439629836e-06, | |
| "loss": 0.5558, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 0.43440575128741143, | |
| "grad_norm": 16.180652163795123, | |
| "learning_rate": 3.1457182528610526e-06, | |
| "loss": 0.6185, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 0.43491561719267835, | |
| "grad_norm": 6.5378974213106416, | |
| "learning_rate": 3.1417290008415167e-06, | |
| "loss": 0.5623, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 0.43542548309794527, | |
| "grad_norm": 17.793095921414608, | |
| "learning_rate": 3.1377379987833395e-06, | |
| "loss": 0.5711, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 0.4359353490032122, | |
| "grad_norm": 5.698719713245101, | |
| "learning_rate": 3.133745257570253e-06, | |
| "loss": 0.6352, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 0.4364452149084791, | |
| "grad_norm": 12.893025105305172, | |
| "learning_rate": 3.1297507880907357e-06, | |
| "loss": 0.4856, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 0.436955080813746, | |
| "grad_norm": 8.399321065682123, | |
| "learning_rate": 3.1257546012379775e-06, | |
| "loss": 0.5586, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 0.4374649467190129, | |
| "grad_norm": 4.831675625103787, | |
| "learning_rate": 3.121756707909851e-06, | |
| "loss": 0.5308, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 0.43797481262427984, | |
| "grad_norm": 5.955633182788592, | |
| "learning_rate": 3.1177571190088833e-06, | |
| "loss": 0.606, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 0.43848467852954676, | |
| "grad_norm": 13.753186960996212, | |
| "learning_rate": 3.1137558454422266e-06, | |
| "loss": 0.5694, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 0.43899454443481367, | |
| "grad_norm": 10.973002877580813, | |
| "learning_rate": 3.1097528981216245e-06, | |
| "loss": 0.5468, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 0.43950441034008053, | |
| "grad_norm": 6.520010650488079, | |
| "learning_rate": 3.105748287963388e-06, | |
| "loss": 0.6218, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 0.44001427624534745, | |
| "grad_norm": 4.663602488972492, | |
| "learning_rate": 3.1017420258883607e-06, | |
| "loss": 0.6092, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 0.44052414215061436, | |
| "grad_norm": 6.0674522796006665, | |
| "learning_rate": 3.0977341228218916e-06, | |
| "loss": 0.5371, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 0.4410340080558813, | |
| "grad_norm": 6.258508973891013, | |
| "learning_rate": 3.0937245896938062e-06, | |
| "loss": 0.4981, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 0.4415438739611482, | |
| "grad_norm": 7.511397618456565, | |
| "learning_rate": 3.089713437438373e-06, | |
| "loss": 0.556, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 0.4420537398664151, | |
| "grad_norm": 10.901260436805492, | |
| "learning_rate": 3.085700676994277e-06, | |
| "loss": 0.5178, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 0.442563605771682, | |
| "grad_norm": 4.032271770877195, | |
| "learning_rate": 3.0816863193045898e-06, | |
| "loss": 0.5299, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 0.44307347167694894, | |
| "grad_norm": 4.682645234637967, | |
| "learning_rate": 3.0776703753167382e-06, | |
| "loss": 0.5409, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 0.44358333758221585, | |
| "grad_norm": 6.869333096460406, | |
| "learning_rate": 3.0736528559824736e-06, | |
| "loss": 0.5774, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 0.44409320348748277, | |
| "grad_norm": 5.646834228989253, | |
| "learning_rate": 3.0696337722578444e-06, | |
| "loss": 0.5627, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 0.4446030693927497, | |
| "grad_norm": 5.011538749911393, | |
| "learning_rate": 3.0656131351031663e-06, | |
| "loss": 0.5484, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 0.4451129352980166, | |
| "grad_norm": 39.470423402922066, | |
| "learning_rate": 3.06159095548299e-06, | |
| "loss": 0.5335, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 0.4456228012032835, | |
| "grad_norm": 5.114310793982849, | |
| "learning_rate": 3.057567244366072e-06, | |
| "loss": 0.5413, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 0.44613266710855043, | |
| "grad_norm": 13.21974253835841, | |
| "learning_rate": 3.053542012725347e-06, | |
| "loss": 0.6133, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 0.44664253301381734, | |
| "grad_norm": 6.883499098740761, | |
| "learning_rate": 3.049515271537896e-06, | |
| "loss": 0.5629, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 0.44715239891908426, | |
| "grad_norm": 8.128398135255848, | |
| "learning_rate": 3.045487031784916e-06, | |
| "loss": 0.5635, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 0.4476622648243512, | |
| "grad_norm": 14.008364726787159, | |
| "learning_rate": 3.041457304451691e-06, | |
| "loss": 0.569, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 0.4481721307296181, | |
| "grad_norm": 11.22813055032139, | |
| "learning_rate": 3.0374261005275606e-06, | |
| "loss": 0.5666, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 0.448681996634885, | |
| "grad_norm": 6.139480470965199, | |
| "learning_rate": 3.033393431005893e-06, | |
| "loss": 0.5756, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 0.4491918625401519, | |
| "grad_norm": 8.549581288367204, | |
| "learning_rate": 3.0293593068840514e-06, | |
| "loss": 0.5939, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 0.44970172844541884, | |
| "grad_norm": 12.60039243357793, | |
| "learning_rate": 3.0253237391633684e-06, | |
| "loss": 0.5794, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 0.45021159435068575, | |
| "grad_norm": 6.017339635112227, | |
| "learning_rate": 3.02128673884911e-06, | |
| "loss": 0.5692, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 0.45072146025595267, | |
| "grad_norm": 10.959547011184434, | |
| "learning_rate": 3.017248316950452e-06, | |
| "loss": 0.5534, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 0.4512313261612196, | |
| "grad_norm": 9.79220916497208, | |
| "learning_rate": 3.0132084844804444e-06, | |
| "loss": 0.5757, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 0.4517411920664865, | |
| "grad_norm": 6.681505188215735, | |
| "learning_rate": 3.0091672524559855e-06, | |
| "loss": 0.5547, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 0.4522510579717534, | |
| "grad_norm": 20.40660151851915, | |
| "learning_rate": 3.0051246318977913e-06, | |
| "loss": 0.577, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 0.4527609238770203, | |
| "grad_norm": 10.223009988923437, | |
| "learning_rate": 3.0010806338303615e-06, | |
| "loss": 0.6965, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 0.45327078978228724, | |
| "grad_norm": 7.19787970543309, | |
| "learning_rate": 2.997035269281955e-06, | |
| "loss": 0.6269, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 0.45378065568755416, | |
| "grad_norm": 9.996599464517434, | |
| "learning_rate": 2.9929885492845555e-06, | |
| "loss": 0.5932, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 0.4542905215928211, | |
| "grad_norm": 6.990713058959951, | |
| "learning_rate": 2.9889404848738434e-06, | |
| "loss": 0.5632, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 0.454800387498088, | |
| "grad_norm": 39.445505834540185, | |
| "learning_rate": 2.9848910870891667e-06, | |
| "loss": 0.5992, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 0.4553102534033549, | |
| "grad_norm": 9.930318139540027, | |
| "learning_rate": 2.980840366973508e-06, | |
| "loss": 0.6362, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 0.4558201193086218, | |
| "grad_norm": 6.1911156271130565, | |
| "learning_rate": 2.9767883355734567e-06, | |
| "loss": 0.5946, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 0.45632998521388873, | |
| "grad_norm": 18.274373612330432, | |
| "learning_rate": 2.9727350039391782e-06, | |
| "loss": 0.6217, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 0.45683985111915565, | |
| "grad_norm": 5.521799462230959, | |
| "learning_rate": 2.968680383124384e-06, | |
| "loss": 0.5643, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 0.45734971702442256, | |
| "grad_norm": 4.417246498865753, | |
| "learning_rate": 2.9646244841862996e-06, | |
| "loss": 0.5954, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 0.4578595829296895, | |
| "grad_norm": 7.1357660764881095, | |
| "learning_rate": 2.9605673181856386e-06, | |
| "loss": 0.5944, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 0.4583694488349564, | |
| "grad_norm": 5.689317447969378, | |
| "learning_rate": 2.956508896186569e-06, | |
| "loss": 0.5749, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 0.4588793147402233, | |
| "grad_norm": 15.300454136222234, | |
| "learning_rate": 2.9524492292566824e-06, | |
| "loss": 0.5954, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.4593891806454902, | |
| "grad_norm": 4.717762951430906, | |
| "learning_rate": 2.948388328466968e-06, | |
| "loss": 0.5539, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 0.45989904655075714, | |
| "grad_norm": 5.159362880082, | |
| "learning_rate": 2.944326204891777e-06, | |
| "loss": 0.5472, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 0.46040891245602406, | |
| "grad_norm": 7.114476434079598, | |
| "learning_rate": 2.940262869608798e-06, | |
| "loss": 0.6114, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 0.46091877836129097, | |
| "grad_norm": 19.8649933302076, | |
| "learning_rate": 2.9361983336990217e-06, | |
| "loss": 0.5137, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 0.4614286442665579, | |
| "grad_norm": 5.383648367474294, | |
| "learning_rate": 2.932132608246715e-06, | |
| "loss": 0.5745, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 0.4619385101718248, | |
| "grad_norm": 4.573473908276397, | |
| "learning_rate": 2.928065704339388e-06, | |
| "loss": 0.5508, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 0.4624483760770917, | |
| "grad_norm": 7.809278030311417, | |
| "learning_rate": 2.9239976330677627e-06, | |
| "loss": 0.6065, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 0.46295824198235863, | |
| "grad_norm": 7.3170190230299745, | |
| "learning_rate": 2.919928405525748e-06, | |
| "loss": 0.6124, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 0.46346810788762555, | |
| "grad_norm": 9.676008788916786, | |
| "learning_rate": 2.9158580328104036e-06, | |
| "loss": 0.5445, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 0.46397797379289246, | |
| "grad_norm": 6.891950872153487, | |
| "learning_rate": 2.9117865260219117e-06, | |
| "loss": 0.6456, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 0.4644878396981594, | |
| "grad_norm": 5.263756940922855, | |
| "learning_rate": 2.907713896263551e-06, | |
| "loss": 0.5612, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 0.4649977056034263, | |
| "grad_norm": 5.733645589519304, | |
| "learning_rate": 2.903640154641657e-06, | |
| "loss": 0.6166, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 0.4655075715086932, | |
| "grad_norm": 5.622571999492135, | |
| "learning_rate": 2.899565312265602e-06, | |
| "loss": 0.5459, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 0.4660174374139601, | |
| "grad_norm": 7.365458300779295, | |
| "learning_rate": 2.895489380247758e-06, | |
| "loss": 0.5388, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 0.46652730331922704, | |
| "grad_norm": 8.659176612642387, | |
| "learning_rate": 2.891412369703469e-06, | |
| "loss": 0.5395, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 0.46703716922449395, | |
| "grad_norm": 7.9795925132888685, | |
| "learning_rate": 2.887334291751019e-06, | |
| "loss": 0.5713, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 0.46754703512976087, | |
| "grad_norm": 11.811189784886455, | |
| "learning_rate": 2.883255157511605e-06, | |
| "loss": 0.5767, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 0.4680569010350278, | |
| "grad_norm": 7.117984988872768, | |
| "learning_rate": 2.8791749781093036e-06, | |
| "loss": 0.5649, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 0.4685667669402947, | |
| "grad_norm": 4.290368892982683, | |
| "learning_rate": 2.8750937646710416e-06, | |
| "loss": 0.5301, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 0.4690766328455616, | |
| "grad_norm": 17.98069195716884, | |
| "learning_rate": 2.8710115283265655e-06, | |
| "loss": 0.6331, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 0.46958649875082853, | |
| "grad_norm": 8.243633625731393, | |
| "learning_rate": 2.866928280208412e-06, | |
| "loss": 0.5107, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 0.47009636465609544, | |
| "grad_norm": 6.677199266110991, | |
| "learning_rate": 2.8628440314518752e-06, | |
| "loss": 0.5939, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 0.47060623056136236, | |
| "grad_norm": 13.017880763698141, | |
| "learning_rate": 2.8587587931949806e-06, | |
| "loss": 0.5262, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 0.4711160964666293, | |
| "grad_norm": 7.3415953210088105, | |
| "learning_rate": 2.854672576578451e-06, | |
| "loss": 0.5276, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 0.4716259623718962, | |
| "grad_norm": 10.96227824447763, | |
| "learning_rate": 2.850585392745676e-06, | |
| "loss": 0.5208, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 0.4721358282771631, | |
| "grad_norm": 13.046761791051441, | |
| "learning_rate": 2.8464972528426847e-06, | |
| "loss": 0.5542, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 0.47264569418243, | |
| "grad_norm": 9.203867781595244, | |
| "learning_rate": 2.842408168018112e-06, | |
| "loss": 0.6764, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 0.47315556008769694, | |
| "grad_norm": 8.204302843434839, | |
| "learning_rate": 2.8383181494231714e-06, | |
| "loss": 0.6044, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 0.47366542599296385, | |
| "grad_norm": 17.257229733535443, | |
| "learning_rate": 2.8342272082116214e-06, | |
| "loss": 0.5799, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 0.47417529189823077, | |
| "grad_norm": 48.134097471584894, | |
| "learning_rate": 2.830135355539737e-06, | |
| "loss": 0.5445, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 0.4746851578034977, | |
| "grad_norm": 19.275589909403493, | |
| "learning_rate": 2.826042602566279e-06, | |
| "loss": 0.5852, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 0.4751950237087646, | |
| "grad_norm": 11.641780387376368, | |
| "learning_rate": 2.821948960452463e-06, | |
| "loss": 0.5544, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 0.4757048896140315, | |
| "grad_norm": 26.514172353901436, | |
| "learning_rate": 2.817854440361929e-06, | |
| "loss": 0.5607, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 0.4762147555192984, | |
| "grad_norm": 8.912788994240413, | |
| "learning_rate": 2.8137590534607123e-06, | |
| "loss": 0.599, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 0.47672462142456534, | |
| "grad_norm": 4.7043228206878105, | |
| "learning_rate": 2.8096628109172125e-06, | |
| "loss": 0.5597, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 0.47723448732983226, | |
| "grad_norm": 10.056552729096786, | |
| "learning_rate": 2.8055657239021605e-06, | |
| "loss": 0.5544, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 0.4777443532350992, | |
| "grad_norm": 11.358614652667871, | |
| "learning_rate": 2.8014678035885913e-06, | |
| "loss": 0.5654, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 0.4782542191403661, | |
| "grad_norm": 11.285505664189369, | |
| "learning_rate": 2.7973690611518124e-06, | |
| "loss": 0.6263, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 0.478764085045633, | |
| "grad_norm": 14.490746002092285, | |
| "learning_rate": 2.7932695077693745e-06, | |
| "loss": 0.5964, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 0.4792739509508999, | |
| "grad_norm": 16.56839590898341, | |
| "learning_rate": 2.7891691546210374e-06, | |
| "loss": 0.5001, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 0.47978381685616683, | |
| "grad_norm": 6.355268739093313, | |
| "learning_rate": 2.7850680128887424e-06, | |
| "loss": 0.5557, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 0.48029368276143375, | |
| "grad_norm": 4.861294379529152, | |
| "learning_rate": 2.780966093756584e-06, | |
| "loss": 0.5566, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 0.48080354866670066, | |
| "grad_norm": 12.792971199282182, | |
| "learning_rate": 2.7768634084107736e-06, | |
| "loss": 0.5651, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 0.4813134145719676, | |
| "grad_norm": 6.746606350811939, | |
| "learning_rate": 2.772759968039614e-06, | |
| "loss": 0.5201, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 0.4818232804772345, | |
| "grad_norm": 29.540322079081925, | |
| "learning_rate": 2.7686557838334644e-06, | |
| "loss": 0.6562, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 0.4823331463825014, | |
| "grad_norm": 9.069525088118954, | |
| "learning_rate": 2.764550866984716e-06, | |
| "loss": 0.5443, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 0.4828430122877683, | |
| "grad_norm": 9.551367204892276, | |
| "learning_rate": 2.760445228687755e-06, | |
| "loss": 0.5846, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 0.48335287819303524, | |
| "grad_norm": 9.76185721527148, | |
| "learning_rate": 2.7563388801389386e-06, | |
| "loss": 0.5927, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 0.48386274409830216, | |
| "grad_norm": 21.04589411137331, | |
| "learning_rate": 2.752231832536556e-06, | |
| "loss": 0.5547, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 0.48437261000356907, | |
| "grad_norm": 9.066899550488897, | |
| "learning_rate": 2.7481240970808074e-06, | |
| "loss": 0.572, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.484882475908836, | |
| "grad_norm": 5.241832407340177, | |
| "learning_rate": 2.744015684973766e-06, | |
| "loss": 0.6231, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 0.4853923418141029, | |
| "grad_norm": 8.390016323543929, | |
| "learning_rate": 2.739906607419351e-06, | |
| "loss": 0.6537, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 0.4859022077193698, | |
| "grad_norm": 4.679399687028415, | |
| "learning_rate": 2.7357968756232963e-06, | |
| "loss": 0.5742, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 0.48641207362463673, | |
| "grad_norm": 12.2527696919582, | |
| "learning_rate": 2.7316865007931208e-06, | |
| "loss": 0.5758, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 0.48692193952990365, | |
| "grad_norm": 11.787110553359827, | |
| "learning_rate": 2.727575494138096e-06, | |
| "loss": 0.5877, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 0.48743180543517056, | |
| "grad_norm": 6.5067870783524, | |
| "learning_rate": 2.7234638668692166e-06, | |
| "loss": 0.5406, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 0.4879416713404375, | |
| "grad_norm": 22.58855874717976, | |
| "learning_rate": 2.7193516301991703e-06, | |
| "loss": 0.5888, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 0.4884515372457044, | |
| "grad_norm": 5.338386745561283, | |
| "learning_rate": 2.7152387953423047e-06, | |
| "loss": 0.5745, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 0.4889614031509713, | |
| "grad_norm": 3.459193769024109, | |
| "learning_rate": 2.711125373514602e-06, | |
| "loss": 0.4953, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 0.4894712690562382, | |
| "grad_norm": 6.446548850190915, | |
| "learning_rate": 2.7070113759336424e-06, | |
| "loss": 0.5437, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 0.48998113496150514, | |
| "grad_norm": 17.79215951866651, | |
| "learning_rate": 2.7028968138185783e-06, | |
| "loss": 0.5767, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 0.49049100086677205, | |
| "grad_norm": 2.9805861974926042, | |
| "learning_rate": 2.6987816983900995e-06, | |
| "loss": 0.5674, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 0.49100086677203897, | |
| "grad_norm": 6.129586543666673, | |
| "learning_rate": 2.6946660408704062e-06, | |
| "loss": 0.5525, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 0.4915107326773059, | |
| "grad_norm": 7.83623462595385, | |
| "learning_rate": 2.6905498524831763e-06, | |
| "loss": 0.6184, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 0.4920205985825728, | |
| "grad_norm": 58.25334222306575, | |
| "learning_rate": 2.6864331444535347e-06, | |
| "loss": 0.558, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 0.4925304644878397, | |
| "grad_norm": 6.3215473826681965, | |
| "learning_rate": 2.682315928008026e-06, | |
| "loss": 0.5026, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 0.49304033039310663, | |
| "grad_norm": 8.52558919954231, | |
| "learning_rate": 2.6781982143745776e-06, | |
| "loss": 0.5422, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 0.49355019629837354, | |
| "grad_norm": 20.2309368399216, | |
| "learning_rate": 2.6740800147824764e-06, | |
| "loss": 0.5206, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 0.49406006220364046, | |
| "grad_norm": 9.45042168411588, | |
| "learning_rate": 2.669961340462332e-06, | |
| "loss": 0.6294, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 0.4945699281089074, | |
| "grad_norm": 4.1312591938210685, | |
| "learning_rate": 2.6658422026460493e-06, | |
| "loss": 0.5323, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 0.4950797940141743, | |
| "grad_norm": 12.26862074644364, | |
| "learning_rate": 2.6617226125667977e-06, | |
| "loss": 0.6065, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 0.4955896599194412, | |
| "grad_norm": 28.97413747857921, | |
| "learning_rate": 2.65760258145898e-06, | |
| "loss": 0.5238, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 0.4960995258247081, | |
| "grad_norm": 46.611911684006444, | |
| "learning_rate": 2.653482120558201e-06, | |
| "loss": 0.6087, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 0.49660939172997504, | |
| "grad_norm": 24.265978020472545, | |
| "learning_rate": 2.6493612411012377e-06, | |
| "loss": 0.4993, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 0.49711925763524195, | |
| "grad_norm": 61.9664610260044, | |
| "learning_rate": 2.645239954326009e-06, | |
| "loss": 0.583, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 0.49762912354050887, | |
| "grad_norm": 11.421201901607155, | |
| "learning_rate": 2.641118271471543e-06, | |
| "loss": 0.5282, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 0.4981389894457758, | |
| "grad_norm": 8.469805395004254, | |
| "learning_rate": 2.6369962037779513e-06, | |
| "loss": 0.5381, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 0.4986488553510427, | |
| "grad_norm": 6.825974444005084, | |
| "learning_rate": 2.632873762486392e-06, | |
| "loss": 0.5972, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 0.4991587212563096, | |
| "grad_norm": 4.502610701034782, | |
| "learning_rate": 2.6287509588390424e-06, | |
| "loss": 0.5137, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 0.4996685871615765, | |
| "grad_norm": 10.021546011417461, | |
| "learning_rate": 2.6246278040790696e-06, | |
| "loss": 0.5858, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 0.5001784530668434, | |
| "grad_norm": 10.603678720209555, | |
| "learning_rate": 2.620504309450596e-06, | |
| "loss": 0.5989, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 0.5006883189721103, | |
| "grad_norm": 31.57403616528839, | |
| "learning_rate": 2.6163804861986735e-06, | |
| "loss": 0.5516, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 0.5011981848773772, | |
| "grad_norm": 11.014820450677353, | |
| "learning_rate": 2.6122563455692467e-06, | |
| "loss": 0.5738, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 0.5017080507826441, | |
| "grad_norm": 13.980415539710211, | |
| "learning_rate": 2.608131898809129e-06, | |
| "loss": 0.6012, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 0.502217916687911, | |
| "grad_norm": 8.212468333614668, | |
| "learning_rate": 2.6040071571659676e-06, | |
| "loss": 0.5443, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 0.502727782593178, | |
| "grad_norm": 14.081381389861937, | |
| "learning_rate": 2.5998821318882127e-06, | |
| "loss": 0.5794, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 0.5032376484984449, | |
| "grad_norm": 9.109868034850592, | |
| "learning_rate": 2.595756834225089e-06, | |
| "loss": 0.5533, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 0.5037475144037118, | |
| "grad_norm": 5.125784400942763, | |
| "learning_rate": 2.5916312754265636e-06, | |
| "loss": 0.5506, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 0.5042573803089787, | |
| "grad_norm": 9.57153158237963, | |
| "learning_rate": 2.587505466743317e-06, | |
| "loss": 0.5881, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 0.5047672462142456, | |
| "grad_norm": 8.591015024768001, | |
| "learning_rate": 2.583379419426709e-06, | |
| "loss": 0.5838, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 0.5052771121195125, | |
| "grad_norm": 4.456326119029988, | |
| "learning_rate": 2.5792531447287515e-06, | |
| "loss": 0.5543, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 0.5057869780247795, | |
| "grad_norm": 5.4644311436455535, | |
| "learning_rate": 2.575126653902078e-06, | |
| "loss": 0.4771, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 0.5062968439300464, | |
| "grad_norm": 7.427951980380359, | |
| "learning_rate": 2.570999958199908e-06, | |
| "loss": 0.565, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 0.5068067098353133, | |
| "grad_norm": 7.106568576861007, | |
| "learning_rate": 2.566873068876022e-06, | |
| "loss": 0.5849, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 0.5073165757405802, | |
| "grad_norm": 9.110513479717502, | |
| "learning_rate": 2.5627459971847264e-06, | |
| "loss": 0.5208, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 0.5078264416458471, | |
| "grad_norm": 10.449121049647742, | |
| "learning_rate": 2.558618754380829e-06, | |
| "loss": 0.5625, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 0.508336307551114, | |
| "grad_norm": 5.528616833916277, | |
| "learning_rate": 2.5544913517196006e-06, | |
| "loss": 0.6019, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 0.508846173456381, | |
| "grad_norm": 5.062026973579031, | |
| "learning_rate": 2.5503638004567487e-06, | |
| "loss": 0.5975, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 0.5093560393616479, | |
| "grad_norm": 5.98683821910533, | |
| "learning_rate": 2.546236111848387e-06, | |
| "loss": 0.5156, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 0.5098659052669148, | |
| "grad_norm": 11.616172837788515, | |
| "learning_rate": 2.5421082971510024e-06, | |
| "loss": 0.6184, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.5103757711721817, | |
| "grad_norm": 21.09149624644589, | |
| "learning_rate": 2.537980367621427e-06, | |
| "loss": 0.5898, | |
| "step": 10010 | |
| }, | |
| { | |
| "epoch": 0.5108856370774486, | |
| "grad_norm": 4.710577802867508, | |
| "learning_rate": 2.533852334516805e-06, | |
| "loss": 0.5942, | |
| "step": 10020 | |
| }, | |
| { | |
| "epoch": 0.5113955029827155, | |
| "grad_norm": 5.51514736515005, | |
| "learning_rate": 2.5297242090945638e-06, | |
| "loss": 0.5705, | |
| "step": 10030 | |
| }, | |
| { | |
| "epoch": 0.5119053688879824, | |
| "grad_norm": 5.627220384979693, | |
| "learning_rate": 2.5255960026123825e-06, | |
| "loss": 0.5193, | |
| "step": 10040 | |
| }, | |
| { | |
| "epoch": 0.5124152347932494, | |
| "grad_norm": 69.00517336322274, | |
| "learning_rate": 2.5214677263281613e-06, | |
| "loss": 0.609, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 0.5129251006985163, | |
| "grad_norm": 7.2529107552093555, | |
| "learning_rate": 2.5173393914999894e-06, | |
| "loss": 0.5935, | |
| "step": 10060 | |
| }, | |
| { | |
| "epoch": 0.5134349666037832, | |
| "grad_norm": 10.759484594224745, | |
| "learning_rate": 2.5132110093861174e-06, | |
| "loss": 0.5138, | |
| "step": 10070 | |
| }, | |
| { | |
| "epoch": 0.5139448325090501, | |
| "grad_norm": 3.920463168617169, | |
| "learning_rate": 2.5090825912449233e-06, | |
| "loss": 0.5137, | |
| "step": 10080 | |
| }, | |
| { | |
| "epoch": 0.514454698414317, | |
| "grad_norm": 3.913288130629234, | |
| "learning_rate": 2.504954148334886e-06, | |
| "loss": 0.5385, | |
| "step": 10090 | |
| }, | |
| { | |
| "epoch": 0.5149645643195839, | |
| "grad_norm": 15.125679483487403, | |
| "learning_rate": 2.500825691914549e-06, | |
| "loss": 0.5652, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 0.5154744302248508, | |
| "grad_norm": 27.642563009181146, | |
| "learning_rate": 2.4966972332424934e-06, | |
| "loss": 0.6017, | |
| "step": 10110 | |
| }, | |
| { | |
| "epoch": 0.5159842961301178, | |
| "grad_norm": 9.924339952473618, | |
| "learning_rate": 2.492568783577308e-06, | |
| "loss": 0.5479, | |
| "step": 10120 | |
| }, | |
| { | |
| "epoch": 0.5164941620353847, | |
| "grad_norm": 8.10524709991328, | |
| "learning_rate": 2.4884403541775553e-06, | |
| "loss": 0.5492, | |
| "step": 10130 | |
| }, | |
| { | |
| "epoch": 0.5170040279406516, | |
| "grad_norm": 197.39062095068655, | |
| "learning_rate": 2.4843119563017426e-06, | |
| "loss": 0.5094, | |
| "step": 10140 | |
| }, | |
| { | |
| "epoch": 0.5175138938459185, | |
| "grad_norm": 10.76373884142598, | |
| "learning_rate": 2.480183601208293e-06, | |
| "loss": 0.6102, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 0.5180237597511854, | |
| "grad_norm": 4.331181284635521, | |
| "learning_rate": 2.4760553001555105e-06, | |
| "loss": 0.5191, | |
| "step": 10160 | |
| }, | |
| { | |
| "epoch": 0.5185336256564523, | |
| "grad_norm": 5.8217667885042985, | |
| "learning_rate": 2.4719270644015533e-06, | |
| "loss": 0.5496, | |
| "step": 10170 | |
| }, | |
| { | |
| "epoch": 0.5190434915617193, | |
| "grad_norm": 19.08094034857763, | |
| "learning_rate": 2.4677989052044022e-06, | |
| "loss": 0.6322, | |
| "step": 10180 | |
| }, | |
| { | |
| "epoch": 0.5195533574669862, | |
| "grad_norm": 5.2461850193376485, | |
| "learning_rate": 2.4636708338218267e-06, | |
| "loss": 0.5394, | |
| "step": 10190 | |
| }, | |
| { | |
| "epoch": 0.5200632233722531, | |
| "grad_norm": 20.81830527042792, | |
| "learning_rate": 2.4595428615113596e-06, | |
| "loss": 0.5523, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 0.52057308927752, | |
| "grad_norm": 19.96864700282037, | |
| "learning_rate": 2.4554149995302605e-06, | |
| "loss": 0.5756, | |
| "step": 10210 | |
| }, | |
| { | |
| "epoch": 0.5210829551827869, | |
| "grad_norm": 7.162960743105341, | |
| "learning_rate": 2.451287259135491e-06, | |
| "loss": 0.5989, | |
| "step": 10220 | |
| }, | |
| { | |
| "epoch": 0.5215928210880538, | |
| "grad_norm": 9.571220402814106, | |
| "learning_rate": 2.4471596515836797e-06, | |
| "loss": 0.5182, | |
| "step": 10230 | |
| }, | |
| { | |
| "epoch": 0.5221026869933207, | |
| "grad_norm": 15.182064264052597, | |
| "learning_rate": 2.4430321881310928e-06, | |
| "loss": 0.6155, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 0.5226125528985877, | |
| "grad_norm": 12.09544309621859, | |
| "learning_rate": 2.4389048800336056e-06, | |
| "loss": 0.4815, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 0.5231224188038546, | |
| "grad_norm": 12.673582745109133, | |
| "learning_rate": 2.434777738546666e-06, | |
| "loss": 0.5715, | |
| "step": 10260 | |
| }, | |
| { | |
| "epoch": 0.5236322847091215, | |
| "grad_norm": 4.490508455233823, | |
| "learning_rate": 2.4306507749252715e-06, | |
| "loss": 0.5596, | |
| "step": 10270 | |
| }, | |
| { | |
| "epoch": 0.5241421506143884, | |
| "grad_norm": 23.00387868404976, | |
| "learning_rate": 2.426524000423931e-06, | |
| "loss": 0.5911, | |
| "step": 10280 | |
| }, | |
| { | |
| "epoch": 0.5246520165196553, | |
| "grad_norm": 9.313144992336463, | |
| "learning_rate": 2.4223974262966395e-06, | |
| "loss": 0.5439, | |
| "step": 10290 | |
| }, | |
| { | |
| "epoch": 0.5251618824249222, | |
| "grad_norm": 7.04600299886626, | |
| "learning_rate": 2.4182710637968466e-06, | |
| "loss": 0.541, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 0.5256717483301891, | |
| "grad_norm": 28.8589127862858, | |
| "learning_rate": 2.414144924177422e-06, | |
| "loss": 0.5036, | |
| "step": 10310 | |
| }, | |
| { | |
| "epoch": 0.5261816142354561, | |
| "grad_norm": 13.107283050966476, | |
| "learning_rate": 2.4100190186906304e-06, | |
| "loss": 0.5215, | |
| "step": 10320 | |
| }, | |
| { | |
| "epoch": 0.526691480140723, | |
| "grad_norm": 7.642985447128399, | |
| "learning_rate": 2.4058933585880958e-06, | |
| "loss": 0.4943, | |
| "step": 10330 | |
| }, | |
| { | |
| "epoch": 0.5272013460459899, | |
| "grad_norm": 10.878127942510975, | |
| "learning_rate": 2.401767955120776e-06, | |
| "loss": 0.573, | |
| "step": 10340 | |
| }, | |
| { | |
| "epoch": 0.5277112119512568, | |
| "grad_norm": 7.41098358575074, | |
| "learning_rate": 2.3976428195389236e-06, | |
| "loss": 0.5247, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 0.5282210778565237, | |
| "grad_norm": 5.924014795722462, | |
| "learning_rate": 2.393517963092066e-06, | |
| "loss": 0.5586, | |
| "step": 10360 | |
| }, | |
| { | |
| "epoch": 0.5287309437617906, | |
| "grad_norm": 4.518669209117697, | |
| "learning_rate": 2.3893933970289677e-06, | |
| "loss": 0.515, | |
| "step": 10370 | |
| }, | |
| { | |
| "epoch": 0.5292408096670576, | |
| "grad_norm": 6.223131311678184, | |
| "learning_rate": 2.3852691325975996e-06, | |
| "loss": 0.6371, | |
| "step": 10380 | |
| }, | |
| { | |
| "epoch": 0.5297506755723245, | |
| "grad_norm": 8.75630273349942, | |
| "learning_rate": 2.3811451810451132e-06, | |
| "loss": 0.5759, | |
| "step": 10390 | |
| }, | |
| { | |
| "epoch": 0.5302605414775914, | |
| "grad_norm": 8.600063322715169, | |
| "learning_rate": 2.377021553617803e-06, | |
| "loss": 0.5489, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 0.5307704073828583, | |
| "grad_norm": 4.174747365549438, | |
| "learning_rate": 2.372898261561082e-06, | |
| "loss": 0.5865, | |
| "step": 10410 | |
| }, | |
| { | |
| "epoch": 0.5312802732881252, | |
| "grad_norm": 4.916435449099151, | |
| "learning_rate": 2.3687753161194498e-06, | |
| "loss": 0.5744, | |
| "step": 10420 | |
| }, | |
| { | |
| "epoch": 0.5317901391933921, | |
| "grad_norm": 5.548881930046863, | |
| "learning_rate": 2.3646527285364565e-06, | |
| "loss": 0.5161, | |
| "step": 10430 | |
| }, | |
| { | |
| "epoch": 0.532300005098659, | |
| "grad_norm": 4.378987795858091, | |
| "learning_rate": 2.3605305100546807e-06, | |
| "loss": 0.5833, | |
| "step": 10440 | |
| }, | |
| { | |
| "epoch": 0.532809871003926, | |
| "grad_norm": 18.165930076107532, | |
| "learning_rate": 2.356408671915692e-06, | |
| "loss": 0.5813, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 0.5333197369091929, | |
| "grad_norm": 15.010025469079643, | |
| "learning_rate": 2.352287225360024e-06, | |
| "loss": 0.5461, | |
| "step": 10460 | |
| }, | |
| { | |
| "epoch": 0.5338296028144598, | |
| "grad_norm": 4.72191705604572, | |
| "learning_rate": 2.3481661816271413e-06, | |
| "loss": 0.577, | |
| "step": 10470 | |
| }, | |
| { | |
| "epoch": 0.5343394687197267, | |
| "grad_norm": 7.029303864129285, | |
| "learning_rate": 2.3440455519554096e-06, | |
| "loss": 0.5542, | |
| "step": 10480 | |
| }, | |
| { | |
| "epoch": 0.5348493346249936, | |
| "grad_norm": 5.617285824429663, | |
| "learning_rate": 2.339925347582069e-06, | |
| "loss": 0.4906, | |
| "step": 10490 | |
| }, | |
| { | |
| "epoch": 0.5353592005302605, | |
| "grad_norm": 5.705927749325273, | |
| "learning_rate": 2.3358055797431945e-06, | |
| "loss": 0.6121, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.5358690664355275, | |
| "grad_norm": 6.538622967992609, | |
| "learning_rate": 2.3316862596736752e-06, | |
| "loss": 0.5855, | |
| "step": 10510 | |
| }, | |
| { | |
| "epoch": 0.5363789323407944, | |
| "grad_norm": 17.530699610788833, | |
| "learning_rate": 2.327567398607175e-06, | |
| "loss": 0.5381, | |
| "step": 10520 | |
| }, | |
| { | |
| "epoch": 0.5368887982460613, | |
| "grad_norm": 5.89952620967862, | |
| "learning_rate": 2.3234490077761097e-06, | |
| "loss": 0.5461, | |
| "step": 10530 | |
| }, | |
| { | |
| "epoch": 0.5373986641513282, | |
| "grad_norm": 5.255170494186333, | |
| "learning_rate": 2.3193310984116113e-06, | |
| "loss": 0.5675, | |
| "step": 10540 | |
| }, | |
| { | |
| "epoch": 0.5379085300565951, | |
| "grad_norm": 5.275971519411612, | |
| "learning_rate": 2.3152136817434983e-06, | |
| "loss": 0.572, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 0.538418395961862, | |
| "grad_norm": 14.685487630845353, | |
| "learning_rate": 2.311096769000247e-06, | |
| "loss": 0.5969, | |
| "step": 10560 | |
| }, | |
| { | |
| "epoch": 0.5389282618671289, | |
| "grad_norm": 4.955813987538444, | |
| "learning_rate": 2.306980371408957e-06, | |
| "loss": 0.5584, | |
| "step": 10570 | |
| }, | |
| { | |
| "epoch": 0.5394381277723959, | |
| "grad_norm": 7.380894299801214, | |
| "learning_rate": 2.302864500195326e-06, | |
| "loss": 0.4963, | |
| "step": 10580 | |
| }, | |
| { | |
| "epoch": 0.5399479936776628, | |
| "grad_norm": 4.890069171450678, | |
| "learning_rate": 2.2987491665836137e-06, | |
| "loss": 0.525, | |
| "step": 10590 | |
| }, | |
| { | |
| "epoch": 0.5404578595829297, | |
| "grad_norm": 11.506877076778487, | |
| "learning_rate": 2.2946343817966147e-06, | |
| "loss": 0.5496, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 0.5409677254881966, | |
| "grad_norm": 7.716243227450342, | |
| "learning_rate": 2.2905201570556294e-06, | |
| "loss": 0.5583, | |
| "step": 10610 | |
| }, | |
| { | |
| "epoch": 0.5414775913934635, | |
| "grad_norm": 5.818018263936738, | |
| "learning_rate": 2.2864065035804253e-06, | |
| "loss": 0.5293, | |
| "step": 10620 | |
| }, | |
| { | |
| "epoch": 0.5419874572987304, | |
| "grad_norm": 9.928565196052132, | |
| "learning_rate": 2.2822934325892178e-06, | |
| "loss": 0.5897, | |
| "step": 10630 | |
| }, | |
| { | |
| "epoch": 0.5424973232039974, | |
| "grad_norm": 11.33679136308585, | |
| "learning_rate": 2.2781809552986296e-06, | |
| "loss": 0.5679, | |
| "step": 10640 | |
| }, | |
| { | |
| "epoch": 0.5430071891092643, | |
| "grad_norm": 7.70962478429198, | |
| "learning_rate": 2.2740690829236672e-06, | |
| "loss": 0.5523, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 0.5435170550145312, | |
| "grad_norm": 7.917410798743483, | |
| "learning_rate": 2.269957826677685e-06, | |
| "loss": 0.5672, | |
| "step": 10660 | |
| }, | |
| { | |
| "epoch": 0.5440269209197981, | |
| "grad_norm": 6.7827283040354205, | |
| "learning_rate": 2.2658471977723593e-06, | |
| "loss": 0.5437, | |
| "step": 10670 | |
| }, | |
| { | |
| "epoch": 0.544536786825065, | |
| "grad_norm": 4.342938474458968, | |
| "learning_rate": 2.2617372074176565e-06, | |
| "loss": 0.5387, | |
| "step": 10680 | |
| }, | |
| { | |
| "epoch": 0.5450466527303319, | |
| "grad_norm": 9.43212832651628, | |
| "learning_rate": 2.2576278668217967e-06, | |
| "loss": 0.5197, | |
| "step": 10690 | |
| }, | |
| { | |
| "epoch": 0.5455565186355988, | |
| "grad_norm": 8.004745774649368, | |
| "learning_rate": 2.2535191871912337e-06, | |
| "loss": 0.5429, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 0.5460663845408658, | |
| "grad_norm": 6.872851460413676, | |
| "learning_rate": 2.2494111797306146e-06, | |
| "loss": 0.5419, | |
| "step": 10710 | |
| }, | |
| { | |
| "epoch": 0.5465762504461327, | |
| "grad_norm": 7.289325384183292, | |
| "learning_rate": 2.2453038556427557e-06, | |
| "loss": 0.6115, | |
| "step": 10720 | |
| }, | |
| { | |
| "epoch": 0.5470861163513996, | |
| "grad_norm": 13.306562120222383, | |
| "learning_rate": 2.2411972261286107e-06, | |
| "loss": 0.5227, | |
| "step": 10730 | |
| }, | |
| { | |
| "epoch": 0.5475959822566665, | |
| "grad_norm": 8.891170592948066, | |
| "learning_rate": 2.2370913023872357e-06, | |
| "loss": 0.6406, | |
| "step": 10740 | |
| }, | |
| { | |
| "epoch": 0.5481058481619334, | |
| "grad_norm": 8.565050748681404, | |
| "learning_rate": 2.2329860956157655e-06, | |
| "loss": 0.563, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 0.5486157140672003, | |
| "grad_norm": 10.560003193382766, | |
| "learning_rate": 2.2288816170093767e-06, | |
| "loss": 0.5297, | |
| "step": 10760 | |
| }, | |
| { | |
| "epoch": 0.5491255799724672, | |
| "grad_norm": 21.790652278804174, | |
| "learning_rate": 2.224777877761264e-06, | |
| "loss": 0.5178, | |
| "step": 10770 | |
| }, | |
| { | |
| "epoch": 0.5496354458777342, | |
| "grad_norm": 4.762684973385444, | |
| "learning_rate": 2.2206748890626004e-06, | |
| "loss": 0.6076, | |
| "step": 10780 | |
| }, | |
| { | |
| "epoch": 0.5501453117830011, | |
| "grad_norm": 11.67968286241827, | |
| "learning_rate": 2.216572662102518e-06, | |
| "loss": 0.636, | |
| "step": 10790 | |
| }, | |
| { | |
| "epoch": 0.550655177688268, | |
| "grad_norm": 32.813770920064506, | |
| "learning_rate": 2.212471208068068e-06, | |
| "loss": 0.5275, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 0.5511650435935349, | |
| "grad_norm": 15.057043570306027, | |
| "learning_rate": 2.2083705381441933e-06, | |
| "loss": 0.5409, | |
| "step": 10810 | |
| }, | |
| { | |
| "epoch": 0.5516749094988018, | |
| "grad_norm": 5.890492850664067, | |
| "learning_rate": 2.204270663513702e-06, | |
| "loss": 0.5254, | |
| "step": 10820 | |
| }, | |
| { | |
| "epoch": 0.5521847754040687, | |
| "grad_norm": 7.960866990199008, | |
| "learning_rate": 2.200171595357229e-06, | |
| "loss": 0.529, | |
| "step": 10830 | |
| }, | |
| { | |
| "epoch": 0.5526946413093357, | |
| "grad_norm": 12.87627099794233, | |
| "learning_rate": 2.196073344853213e-06, | |
| "loss": 0.493, | |
| "step": 10840 | |
| }, | |
| { | |
| "epoch": 0.5532045072146026, | |
| "grad_norm": 13.807869879260597, | |
| "learning_rate": 2.1919759231778616e-06, | |
| "loss": 0.5958, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 0.5537143731198695, | |
| "grad_norm": 8.732910844238024, | |
| "learning_rate": 2.187879341505123e-06, | |
| "loss": 0.5315, | |
| "step": 10860 | |
| }, | |
| { | |
| "epoch": 0.5542242390251364, | |
| "grad_norm": 10.627749896245922, | |
| "learning_rate": 2.1837836110066544e-06, | |
| "loss": 0.6201, | |
| "step": 10870 | |
| }, | |
| { | |
| "epoch": 0.5547341049304033, | |
| "grad_norm": 12.410053602064153, | |
| "learning_rate": 2.17968874285179e-06, | |
| "loss": 0.661, | |
| "step": 10880 | |
| }, | |
| { | |
| "epoch": 0.5552439708356702, | |
| "grad_norm": 9.702490596647241, | |
| "learning_rate": 2.175594748207516e-06, | |
| "loss": 0.5263, | |
| "step": 10890 | |
| }, | |
| { | |
| "epoch": 0.5557538367409371, | |
| "grad_norm": 10.737235352322436, | |
| "learning_rate": 2.1715016382384314e-06, | |
| "loss": 0.5643, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 0.5562637026462041, | |
| "grad_norm": 5.205601904965876, | |
| "learning_rate": 2.1674094241067275e-06, | |
| "loss": 0.4741, | |
| "step": 10910 | |
| }, | |
| { | |
| "epoch": 0.556773568551471, | |
| "grad_norm": 9.54609904611359, | |
| "learning_rate": 2.1633181169721518e-06, | |
| "loss": 0.5626, | |
| "step": 10920 | |
| }, | |
| { | |
| "epoch": 0.5572834344567379, | |
| "grad_norm": 17.84606744633947, | |
| "learning_rate": 2.159227727991974e-06, | |
| "loss": 0.5571, | |
| "step": 10930 | |
| }, | |
| { | |
| "epoch": 0.5577933003620048, | |
| "grad_norm": 21.19914314000095, | |
| "learning_rate": 2.155138268320966e-06, | |
| "loss": 0.5913, | |
| "step": 10940 | |
| }, | |
| { | |
| "epoch": 0.5583031662672717, | |
| "grad_norm": 11.748809523593113, | |
| "learning_rate": 2.151049749111361e-06, | |
| "loss": 0.6247, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 0.5588130321725386, | |
| "grad_norm": 9.896026330514129, | |
| "learning_rate": 2.146962181512829e-06, | |
| "loss": 0.5989, | |
| "step": 10960 | |
| }, | |
| { | |
| "epoch": 0.5593228980778056, | |
| "grad_norm": 7.035140414909101, | |
| "learning_rate": 2.142875576672446e-06, | |
| "loss": 0.5488, | |
| "step": 10970 | |
| }, | |
| { | |
| "epoch": 0.5598327639830725, | |
| "grad_norm": 6.072140050707628, | |
| "learning_rate": 2.13878994573466e-06, | |
| "loss": 0.5407, | |
| "step": 10980 | |
| }, | |
| { | |
| "epoch": 0.5603426298883394, | |
| "grad_norm": 7.288317100033059, | |
| "learning_rate": 2.1347052998412667e-06, | |
| "loss": 0.5561, | |
| "step": 10990 | |
| }, | |
| { | |
| "epoch": 0.5608524957936063, | |
| "grad_norm": 5.819210689950646, | |
| "learning_rate": 2.1306216501313705e-06, | |
| "loss": 0.5428, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.5613623616988732, | |
| "grad_norm": 12.776136561779468, | |
| "learning_rate": 2.1265390077413643e-06, | |
| "loss": 0.5003, | |
| "step": 11010 | |
| }, | |
| { | |
| "epoch": 0.5618722276041401, | |
| "grad_norm": 18.983352626504544, | |
| "learning_rate": 2.1224573838048894e-06, | |
| "loss": 0.5445, | |
| "step": 11020 | |
| }, | |
| { | |
| "epoch": 0.562382093509407, | |
| "grad_norm": 15.491629082640607, | |
| "learning_rate": 2.1183767894528135e-06, | |
| "loss": 0.5391, | |
| "step": 11030 | |
| }, | |
| { | |
| "epoch": 0.562891959414674, | |
| "grad_norm": 9.763365375568759, | |
| "learning_rate": 2.114297235813196e-06, | |
| "loss": 0.5338, | |
| "step": 11040 | |
| }, | |
| { | |
| "epoch": 0.5634018253199409, | |
| "grad_norm": 6.44544801800945, | |
| "learning_rate": 2.110218734011255e-06, | |
| "loss": 0.5031, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 0.5639116912252078, | |
| "grad_norm": 4.539572048783182, | |
| "learning_rate": 2.106141295169344e-06, | |
| "loss": 0.539, | |
| "step": 11060 | |
| }, | |
| { | |
| "epoch": 0.5644215571304747, | |
| "grad_norm": 7.702392114659815, | |
| "learning_rate": 2.1020649304069144e-06, | |
| "loss": 0.5167, | |
| "step": 11070 | |
| }, | |
| { | |
| "epoch": 0.5649314230357416, | |
| "grad_norm": 6.263165760093371, | |
| "learning_rate": 2.0979896508404917e-06, | |
| "loss": 0.524, | |
| "step": 11080 | |
| }, | |
| { | |
| "epoch": 0.5654412889410085, | |
| "grad_norm": 8.742193198862491, | |
| "learning_rate": 2.0939154675836407e-06, | |
| "loss": 0.5162, | |
| "step": 11090 | |
| }, | |
| { | |
| "epoch": 0.5659511548462755, | |
| "grad_norm": 7.658537397707099, | |
| "learning_rate": 2.0898423917469344e-06, | |
| "loss": 0.5552, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 0.5664610207515424, | |
| "grad_norm": 7.432799918637242, | |
| "learning_rate": 2.085770434437931e-06, | |
| "loss": 0.4958, | |
| "step": 11110 | |
| }, | |
| { | |
| "epoch": 0.5669708866568093, | |
| "grad_norm": 8.525440793917197, | |
| "learning_rate": 2.0816996067611315e-06, | |
| "loss": 0.5474, | |
| "step": 11120 | |
| }, | |
| { | |
| "epoch": 0.5674807525620762, | |
| "grad_norm": 12.292033213542174, | |
| "learning_rate": 2.0776299198179624e-06, | |
| "loss": 0.6679, | |
| "step": 11130 | |
| }, | |
| { | |
| "epoch": 0.5679906184673431, | |
| "grad_norm": 11.936598755773637, | |
| "learning_rate": 2.0735613847067355e-06, | |
| "loss": 0.5626, | |
| "step": 11140 | |
| }, | |
| { | |
| "epoch": 0.56850048437261, | |
| "grad_norm": 15.717441916004525, | |
| "learning_rate": 2.0694940125226224e-06, | |
| "loss": 0.5813, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 0.5690103502778769, | |
| "grad_norm": 4.026163345202539, | |
| "learning_rate": 2.0654278143576263e-06, | |
| "loss": 0.5576, | |
| "step": 11160 | |
| }, | |
| { | |
| "epoch": 0.5695202161831439, | |
| "grad_norm": 6.045823319943218, | |
| "learning_rate": 2.0613628013005437e-06, | |
| "loss": 0.595, | |
| "step": 11170 | |
| }, | |
| { | |
| "epoch": 0.5700300820884108, | |
| "grad_norm": 22.41401539076487, | |
| "learning_rate": 2.0572989844369427e-06, | |
| "loss": 0.5276, | |
| "step": 11180 | |
| }, | |
| { | |
| "epoch": 0.5705399479936777, | |
| "grad_norm": 5.04830021515491, | |
| "learning_rate": 2.053236374849128e-06, | |
| "loss": 0.5411, | |
| "step": 11190 | |
| }, | |
| { | |
| "epoch": 0.5710498138989446, | |
| "grad_norm": 8.227143620060716, | |
| "learning_rate": 2.049174983616113e-06, | |
| "loss": 0.5622, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 0.5715596798042115, | |
| "grad_norm": 4.747071324681422, | |
| "learning_rate": 2.045114821813588e-06, | |
| "loss": 0.5133, | |
| "step": 11210 | |
| }, | |
| { | |
| "epoch": 0.5720695457094784, | |
| "grad_norm": 4.548869894399742, | |
| "learning_rate": 2.0410559005138893e-06, | |
| "loss": 0.5411, | |
| "step": 11220 | |
| }, | |
| { | |
| "epoch": 0.5725794116147453, | |
| "grad_norm": 4.694113174753521, | |
| "learning_rate": 2.0369982307859728e-06, | |
| "loss": 0.5564, | |
| "step": 11230 | |
| }, | |
| { | |
| "epoch": 0.5730892775200123, | |
| "grad_norm": 7.208504892989224, | |
| "learning_rate": 2.032941823695378e-06, | |
| "loss": 0.4987, | |
| "step": 11240 | |
| }, | |
| { | |
| "epoch": 0.5735991434252792, | |
| "grad_norm": 6.550131307780764, | |
| "learning_rate": 2.0288866903042055e-06, | |
| "loss": 0.5533, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 0.5741090093305461, | |
| "grad_norm": 14.482125263295131, | |
| "learning_rate": 2.024832841671077e-06, | |
| "loss": 0.5762, | |
| "step": 11260 | |
| }, | |
| { | |
| "epoch": 0.574618875235813, | |
| "grad_norm": 5.509057904355494, | |
| "learning_rate": 2.0207802888511155e-06, | |
| "loss": 0.564, | |
| "step": 11270 | |
| }, | |
| { | |
| "epoch": 0.5751287411410799, | |
| "grad_norm": 11.023848829252348, | |
| "learning_rate": 2.0167290428959082e-06, | |
| "loss": 0.5716, | |
| "step": 11280 | |
| }, | |
| { | |
| "epoch": 0.5756386070463468, | |
| "grad_norm": 12.049873053442631, | |
| "learning_rate": 2.0126791148534777e-06, | |
| "loss": 0.5457, | |
| "step": 11290 | |
| }, | |
| { | |
| "epoch": 0.5761484729516138, | |
| "grad_norm": 4.3956366771791515, | |
| "learning_rate": 2.0086305157682546e-06, | |
| "loss": 0.4937, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 0.5766583388568807, | |
| "grad_norm": 5.635414557925949, | |
| "learning_rate": 2.004583256681042e-06, | |
| "loss": 0.5618, | |
| "step": 11310 | |
| }, | |
| { | |
| "epoch": 0.5771682047621476, | |
| "grad_norm": 4.7330346651271755, | |
| "learning_rate": 2.0005373486289932e-06, | |
| "loss": 0.5553, | |
| "step": 11320 | |
| }, | |
| { | |
| "epoch": 0.5776780706674145, | |
| "grad_norm": 5.922877822891385, | |
| "learning_rate": 1.9964928026455715e-06, | |
| "loss": 0.4771, | |
| "step": 11330 | |
| }, | |
| { | |
| "epoch": 0.5781879365726814, | |
| "grad_norm": 5.231113774110676, | |
| "learning_rate": 1.9924496297605315e-06, | |
| "loss": 0.5578, | |
| "step": 11340 | |
| }, | |
| { | |
| "epoch": 0.5786978024779483, | |
| "grad_norm": 5.08380380468843, | |
| "learning_rate": 1.988407840999881e-06, | |
| "loss": 0.4714, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 0.5792076683832152, | |
| "grad_norm": 6.004819919545154, | |
| "learning_rate": 1.984367447385851e-06, | |
| "loss": 0.5764, | |
| "step": 11360 | |
| }, | |
| { | |
| "epoch": 0.5797175342884822, | |
| "grad_norm": 18.64442586475368, | |
| "learning_rate": 1.9803284599368704e-06, | |
| "loss": 0.5513, | |
| "step": 11370 | |
| }, | |
| { | |
| "epoch": 0.5802274001937491, | |
| "grad_norm": 11.25732697341979, | |
| "learning_rate": 1.976290889667533e-06, | |
| "loss": 0.5753, | |
| "step": 11380 | |
| }, | |
| { | |
| "epoch": 0.580737266099016, | |
| "grad_norm": 13.524467497890853, | |
| "learning_rate": 1.9722547475885685e-06, | |
| "loss": 0.5732, | |
| "step": 11390 | |
| }, | |
| { | |
| "epoch": 0.5812471320042829, | |
| "grad_norm": 11.958430536712548, | |
| "learning_rate": 1.9682200447068095e-06, | |
| "loss": 0.5608, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 0.5817569979095498, | |
| "grad_norm": 4.929859531783645, | |
| "learning_rate": 1.9641867920251655e-06, | |
| "loss": 0.5411, | |
| "step": 11410 | |
| }, | |
| { | |
| "epoch": 0.5822668638148167, | |
| "grad_norm": 6.115864848385566, | |
| "learning_rate": 1.9601550005425925e-06, | |
| "loss": 0.5141, | |
| "step": 11420 | |
| }, | |
| { | |
| "epoch": 0.5827767297200837, | |
| "grad_norm": 3.8065996292234425, | |
| "learning_rate": 1.9561246812540572e-06, | |
| "loss": 0.5874, | |
| "step": 11430 | |
| }, | |
| { | |
| "epoch": 0.5832865956253506, | |
| "grad_norm": 8.306882889332991, | |
| "learning_rate": 1.952095845150518e-06, | |
| "loss": 0.5053, | |
| "step": 11440 | |
| }, | |
| { | |
| "epoch": 0.5837964615306175, | |
| "grad_norm": 10.01015622593652, | |
| "learning_rate": 1.9480685032188816e-06, | |
| "loss": 0.5359, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 0.5843063274358844, | |
| "grad_norm": 10.699493523122232, | |
| "learning_rate": 1.9440426664419855e-06, | |
| "loss": 0.5721, | |
| "step": 11460 | |
| }, | |
| { | |
| "epoch": 0.5848161933411513, | |
| "grad_norm": 3.2992193035520776, | |
| "learning_rate": 1.940018345798561e-06, | |
| "loss": 0.4852, | |
| "step": 11470 | |
| }, | |
| { | |
| "epoch": 0.5853260592464182, | |
| "grad_norm": 42.64445686580835, | |
| "learning_rate": 1.935995552263202e-06, | |
| "loss": 0.5427, | |
| "step": 11480 | |
| }, | |
| { | |
| "epoch": 0.5858359251516851, | |
| "grad_norm": 4.99320155932582, | |
| "learning_rate": 1.931974296806342e-06, | |
| "loss": 0.5109, | |
| "step": 11490 | |
| }, | |
| { | |
| "epoch": 0.5863457910569521, | |
| "grad_norm": 11.084567601573044, | |
| "learning_rate": 1.9279545903942174e-06, | |
| "loss": 0.5371, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.586855656962219, | |
| "grad_norm": 10.31727590216634, | |
| "learning_rate": 1.923936443988841e-06, | |
| "loss": 0.5611, | |
| "step": 11510 | |
| }, | |
| { | |
| "epoch": 0.5873655228674859, | |
| "grad_norm": 12.632248744614495, | |
| "learning_rate": 1.9199198685479732e-06, | |
| "loss": 0.5298, | |
| "step": 11520 | |
| }, | |
| { | |
| "epoch": 0.5878753887727528, | |
| "grad_norm": 7.57910307013232, | |
| "learning_rate": 1.9159048750250855e-06, | |
| "loss": 0.5541, | |
| "step": 11530 | |
| }, | |
| { | |
| "epoch": 0.5883852546780197, | |
| "grad_norm": 7.842350564520856, | |
| "learning_rate": 1.9118914743693407e-06, | |
| "loss": 0.5822, | |
| "step": 11540 | |
| }, | |
| { | |
| "epoch": 0.5888951205832866, | |
| "grad_norm": 14.98917864774212, | |
| "learning_rate": 1.907879677525554e-06, | |
| "loss": 0.5409, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 0.5894049864885536, | |
| "grad_norm": 14.362958611737447, | |
| "learning_rate": 1.9038694954341697e-06, | |
| "loss": 0.4998, | |
| "step": 11560 | |
| }, | |
| { | |
| "epoch": 0.5899148523938205, | |
| "grad_norm": 5.82283027332385, | |
| "learning_rate": 1.8998609390312251e-06, | |
| "loss": 0.4923, | |
| "step": 11570 | |
| }, | |
| { | |
| "epoch": 0.5904247182990874, | |
| "grad_norm": 10.272349364887027, | |
| "learning_rate": 1.8958540192483277e-06, | |
| "loss": 0.5519, | |
| "step": 11580 | |
| }, | |
| { | |
| "epoch": 0.5909345842043543, | |
| "grad_norm": 12.109395292903596, | |
| "learning_rate": 1.8918487470126207e-06, | |
| "loss": 0.5581, | |
| "step": 11590 | |
| }, | |
| { | |
| "epoch": 0.5914444501096212, | |
| "grad_norm": 7.524591927450444, | |
| "learning_rate": 1.8878451332467529e-06, | |
| "loss": 0.5133, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 0.5919543160148881, | |
| "grad_norm": 16.959598592402315, | |
| "learning_rate": 1.8838431888688528e-06, | |
| "loss": 0.5905, | |
| "step": 11610 | |
| }, | |
| { | |
| "epoch": 0.592464181920155, | |
| "grad_norm": 6.178426939909682, | |
| "learning_rate": 1.879842924792493e-06, | |
| "loss": 0.5712, | |
| "step": 11620 | |
| }, | |
| { | |
| "epoch": 0.592974047825422, | |
| "grad_norm": 6.552338716939495, | |
| "learning_rate": 1.8758443519266667e-06, | |
| "loss": 0.5146, | |
| "step": 11630 | |
| }, | |
| { | |
| "epoch": 0.5934839137306889, | |
| "grad_norm": 5.554779498177808, | |
| "learning_rate": 1.8718474811757553e-06, | |
| "loss": 0.5211, | |
| "step": 11640 | |
| }, | |
| { | |
| "epoch": 0.5939937796359558, | |
| "grad_norm": 14.887120778210514, | |
| "learning_rate": 1.8678523234394956e-06, | |
| "loss": 0.5599, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 0.5945036455412227, | |
| "grad_norm": 3.108872975693166, | |
| "learning_rate": 1.863858889612956e-06, | |
| "loss": 0.4967, | |
| "step": 11660 | |
| }, | |
| { | |
| "epoch": 0.5950135114464896, | |
| "grad_norm": 4.95066312376142, | |
| "learning_rate": 1.8598671905865002e-06, | |
| "loss": 0.5689, | |
| "step": 11670 | |
| }, | |
| { | |
| "epoch": 0.5955233773517565, | |
| "grad_norm": 3.8170420467015793, | |
| "learning_rate": 1.8558772372457647e-06, | |
| "loss": 0.5409, | |
| "step": 11680 | |
| }, | |
| { | |
| "epoch": 0.5960332432570234, | |
| "grad_norm": 9.995945349423792, | |
| "learning_rate": 1.8518890404716227e-06, | |
| "loss": 0.5085, | |
| "step": 11690 | |
| }, | |
| { | |
| "epoch": 0.5965431091622904, | |
| "grad_norm": 7.49096178498184, | |
| "learning_rate": 1.8479026111401594e-06, | |
| "loss": 0.5287, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 0.5970529750675573, | |
| "grad_norm": 5.778438270552736, | |
| "learning_rate": 1.8439179601226376e-06, | |
| "loss": 0.5124, | |
| "step": 11710 | |
| }, | |
| { | |
| "epoch": 0.5975628409728242, | |
| "grad_norm": 4.567825321093821, | |
| "learning_rate": 1.8399350982854717e-06, | |
| "loss": 0.4978, | |
| "step": 11720 | |
| }, | |
| { | |
| "epoch": 0.5980727068780911, | |
| "grad_norm": 11.475875662289322, | |
| "learning_rate": 1.835954036490198e-06, | |
| "loss": 0.5369, | |
| "step": 11730 | |
| }, | |
| { | |
| "epoch": 0.598582572783358, | |
| "grad_norm": 5.961118047678783, | |
| "learning_rate": 1.8319747855934416e-06, | |
| "loss": 0.5744, | |
| "step": 11740 | |
| }, | |
| { | |
| "epoch": 0.5990924386886249, | |
| "grad_norm": 6.930421840026854, | |
| "learning_rate": 1.8279973564468906e-06, | |
| "loss": 0.5138, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 0.5996023045938919, | |
| "grad_norm": 6.394493063704984, | |
| "learning_rate": 1.8240217598972665e-06, | |
| "loss": 0.6055, | |
| "step": 11760 | |
| }, | |
| { | |
| "epoch": 0.6001121704991588, | |
| "grad_norm": 6.464271673435689, | |
| "learning_rate": 1.8200480067862888e-06, | |
| "loss": 0.5635, | |
| "step": 11770 | |
| }, | |
| { | |
| "epoch": 0.6006220364044257, | |
| "grad_norm": 6.33418337132712, | |
| "learning_rate": 1.8160761079506553e-06, | |
| "loss": 0.546, | |
| "step": 11780 | |
| }, | |
| { | |
| "epoch": 0.6011319023096926, | |
| "grad_norm": 10.636087327366742, | |
| "learning_rate": 1.812106074222002e-06, | |
| "loss": 0.5233, | |
| "step": 11790 | |
| }, | |
| { | |
| "epoch": 0.6016417682149595, | |
| "grad_norm": 8.78934513556952, | |
| "learning_rate": 1.8081379164268826e-06, | |
| "loss": 0.574, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 0.6021516341202264, | |
| "grad_norm": 32.344538634188154, | |
| "learning_rate": 1.804171645386732e-06, | |
| "loss": 0.5374, | |
| "step": 11810 | |
| }, | |
| { | |
| "epoch": 0.6026615000254933, | |
| "grad_norm": 6.569258991721041, | |
| "learning_rate": 1.800207271917842e-06, | |
| "loss": 0.549, | |
| "step": 11820 | |
| }, | |
| { | |
| "epoch": 0.6031713659307603, | |
| "grad_norm": 7.410474971488895, | |
| "learning_rate": 1.7962448068313298e-06, | |
| "loss": 0.5449, | |
| "step": 11830 | |
| }, | |
| { | |
| "epoch": 0.6036812318360272, | |
| "grad_norm": 13.567886275688778, | |
| "learning_rate": 1.7922842609331053e-06, | |
| "loss": 0.5348, | |
| "step": 11840 | |
| }, | |
| { | |
| "epoch": 0.6041910977412941, | |
| "grad_norm": 22.176157368753856, | |
| "learning_rate": 1.788325645023848e-06, | |
| "loss": 0.5003, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 0.604700963646561, | |
| "grad_norm": 5.704122016801913, | |
| "learning_rate": 1.7843689698989715e-06, | |
| "loss": 0.4972, | |
| "step": 11860 | |
| }, | |
| { | |
| "epoch": 0.6052108295518279, | |
| "grad_norm": 10.267907792805214, | |
| "learning_rate": 1.7804142463486e-06, | |
| "loss": 0.51, | |
| "step": 11870 | |
| }, | |
| { | |
| "epoch": 0.6057206954570948, | |
| "grad_norm": 6.572061790414582, | |
| "learning_rate": 1.776461485157531e-06, | |
| "loss": 0.5938, | |
| "step": 11880 | |
| }, | |
| { | |
| "epoch": 0.6062305613623618, | |
| "grad_norm": 6.2343504746004, | |
| "learning_rate": 1.7725106971052147e-06, | |
| "loss": 0.5394, | |
| "step": 11890 | |
| }, | |
| { | |
| "epoch": 0.6067404272676287, | |
| "grad_norm": 12.18768143965922, | |
| "learning_rate": 1.7685618929657193e-06, | |
| "loss": 0.5751, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 0.6072502931728956, | |
| "grad_norm": 7.201216852229459, | |
| "learning_rate": 1.7646150835077014e-06, | |
| "loss": 0.5611, | |
| "step": 11910 | |
| }, | |
| { | |
| "epoch": 0.6077601590781625, | |
| "grad_norm": 6.707647871833833, | |
| "learning_rate": 1.7606702794943803e-06, | |
| "loss": 0.539, | |
| "step": 11920 | |
| }, | |
| { | |
| "epoch": 0.6082700249834293, | |
| "grad_norm": 16.03213196409904, | |
| "learning_rate": 1.756727491683503e-06, | |
| "loss": 0.5695, | |
| "step": 11930 | |
| }, | |
| { | |
| "epoch": 0.6087798908886962, | |
| "grad_norm": 7.653859604284126, | |
| "learning_rate": 1.7527867308273211e-06, | |
| "loss": 0.5502, | |
| "step": 11940 | |
| }, | |
| { | |
| "epoch": 0.6092897567939631, | |
| "grad_norm": 18.450018356779395, | |
| "learning_rate": 1.7488480076725584e-06, | |
| "loss": 0.5902, | |
| "step": 11950 | |
| }, | |
| { | |
| "epoch": 0.60979962269923, | |
| "grad_norm": 8.81328827050954, | |
| "learning_rate": 1.7449113329603787e-06, | |
| "loss": 0.5659, | |
| "step": 11960 | |
| }, | |
| { | |
| "epoch": 0.610309488604497, | |
| "grad_norm": 16.666299004092103, | |
| "learning_rate": 1.7409767174263643e-06, | |
| "loss": 0.5547, | |
| "step": 11970 | |
| }, | |
| { | |
| "epoch": 0.6108193545097639, | |
| "grad_norm": 8.923550983437284, | |
| "learning_rate": 1.7370441718004771e-06, | |
| "loss": 0.6135, | |
| "step": 11980 | |
| }, | |
| { | |
| "epoch": 0.6113292204150308, | |
| "grad_norm": 6.030797934178173, | |
| "learning_rate": 1.733113706807038e-06, | |
| "loss": 0.563, | |
| "step": 11990 | |
| }, | |
| { | |
| "epoch": 0.6118390863202977, | |
| "grad_norm": 6.60626139740419, | |
| "learning_rate": 1.7291853331646917e-06, | |
| "loss": 0.5256, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.6123489522255646, | |
| "grad_norm": 7.553040351819789, | |
| "learning_rate": 1.7252590615863809e-06, | |
| "loss": 0.5619, | |
| "step": 12010 | |
| }, | |
| { | |
| "epoch": 0.6128588181308315, | |
| "grad_norm": 12.276519778238537, | |
| "learning_rate": 1.7213349027793153e-06, | |
| "loss": 0.5427, | |
| "step": 12020 | |
| }, | |
| { | |
| "epoch": 0.6133686840360985, | |
| "grad_norm": 6.7982241497229134, | |
| "learning_rate": 1.7174128674449422e-06, | |
| "loss": 0.4984, | |
| "step": 12030 | |
| }, | |
| { | |
| "epoch": 0.6138785499413654, | |
| "grad_norm": 34.987861391211595, | |
| "learning_rate": 1.7134929662789204e-06, | |
| "loss": 0.535, | |
| "step": 12040 | |
| }, | |
| { | |
| "epoch": 0.6143884158466323, | |
| "grad_norm": 6.391665999406623, | |
| "learning_rate": 1.709575209971085e-06, | |
| "loss": 0.4856, | |
| "step": 12050 | |
| }, | |
| { | |
| "epoch": 0.6148982817518992, | |
| "grad_norm": 10.174572831746715, | |
| "learning_rate": 1.7056596092054245e-06, | |
| "loss": 0.5267, | |
| "step": 12060 | |
| }, | |
| { | |
| "epoch": 0.6154081476571661, | |
| "grad_norm": 3.8316603741601885, | |
| "learning_rate": 1.7017461746600506e-06, | |
| "loss": 0.56, | |
| "step": 12070 | |
| }, | |
| { | |
| "epoch": 0.615918013562433, | |
| "grad_norm": 4.2956097081697315, | |
| "learning_rate": 1.697834917007163e-06, | |
| "loss": 0.5564, | |
| "step": 12080 | |
| }, | |
| { | |
| "epoch": 0.6164278794677, | |
| "grad_norm": 12.793184931761772, | |
| "learning_rate": 1.6939258469130288e-06, | |
| "loss": 0.5483, | |
| "step": 12090 | |
| }, | |
| { | |
| "epoch": 0.6169377453729669, | |
| "grad_norm": 11.38804280431213, | |
| "learning_rate": 1.6900189750379469e-06, | |
| "loss": 0.5347, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 0.6174476112782338, | |
| "grad_norm": 8.594475973751882, | |
| "learning_rate": 1.6861143120362239e-06, | |
| "loss": 0.5411, | |
| "step": 12110 | |
| }, | |
| { | |
| "epoch": 0.6179574771835007, | |
| "grad_norm": 7.3766297715792115, | |
| "learning_rate": 1.6822118685561403e-06, | |
| "loss": 0.5243, | |
| "step": 12120 | |
| }, | |
| { | |
| "epoch": 0.6184673430887676, | |
| "grad_norm": 13.124098897523416, | |
| "learning_rate": 1.6783116552399258e-06, | |
| "loss": 0.5176, | |
| "step": 12130 | |
| }, | |
| { | |
| "epoch": 0.6189772089940345, | |
| "grad_norm": 5.497804698316182, | |
| "learning_rate": 1.6744136827237283e-06, | |
| "loss": 0.5455, | |
| "step": 12140 | |
| }, | |
| { | |
| "epoch": 0.6194870748993014, | |
| "grad_norm": 7.212115337101796, | |
| "learning_rate": 1.670517961637582e-06, | |
| "loss": 0.5101, | |
| "step": 12150 | |
| }, | |
| { | |
| "epoch": 0.6199969408045684, | |
| "grad_norm": 8.604412741667838, | |
| "learning_rate": 1.666624502605385e-06, | |
| "loss": 0.5701, | |
| "step": 12160 | |
| }, | |
| { | |
| "epoch": 0.6205068067098353, | |
| "grad_norm": 19.511180309160327, | |
| "learning_rate": 1.6627333162448638e-06, | |
| "loss": 0.5202, | |
| "step": 12170 | |
| }, | |
| { | |
| "epoch": 0.6210166726151022, | |
| "grad_norm": 8.939814436037937, | |
| "learning_rate": 1.6588444131675486e-06, | |
| "loss": 0.5488, | |
| "step": 12180 | |
| }, | |
| { | |
| "epoch": 0.6215265385203691, | |
| "grad_norm": 8.046046108782505, | |
| "learning_rate": 1.6549578039787436e-06, | |
| "loss": 0.5954, | |
| "step": 12190 | |
| }, | |
| { | |
| "epoch": 0.622036404425636, | |
| "grad_norm": 15.08482992814336, | |
| "learning_rate": 1.6510734992774953e-06, | |
| "loss": 0.5213, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 0.6225462703309029, | |
| "grad_norm": 8.290379527495224, | |
| "learning_rate": 1.647191509656567e-06, | |
| "loss": 0.5287, | |
| "step": 12210 | |
| }, | |
| { | |
| "epoch": 0.6230561362361698, | |
| "grad_norm": 5.2768775686017495, | |
| "learning_rate": 1.6433118457024094e-06, | |
| "loss": 0.5833, | |
| "step": 12220 | |
| }, | |
| { | |
| "epoch": 0.6235660021414368, | |
| "grad_norm": 7.166817676607632, | |
| "learning_rate": 1.6394345179951293e-06, | |
| "loss": 0.464, | |
| "step": 12230 | |
| }, | |
| { | |
| "epoch": 0.6240758680467037, | |
| "grad_norm": 7.212770160890743, | |
| "learning_rate": 1.6355595371084627e-06, | |
| "loss": 0.5278, | |
| "step": 12240 | |
| }, | |
| { | |
| "epoch": 0.6245857339519706, | |
| "grad_norm": 10.940639389400307, | |
| "learning_rate": 1.6316869136097467e-06, | |
| "loss": 0.541, | |
| "step": 12250 | |
| }, | |
| { | |
| "epoch": 0.6250955998572375, | |
| "grad_norm": 7.45936713616669, | |
| "learning_rate": 1.6278166580598897e-06, | |
| "loss": 0.4824, | |
| "step": 12260 | |
| }, | |
| { | |
| "epoch": 0.6256054657625044, | |
| "grad_norm": 5.728147171292839, | |
| "learning_rate": 1.6239487810133404e-06, | |
| "loss": 0.5447, | |
| "step": 12270 | |
| }, | |
| { | |
| "epoch": 0.6261153316677713, | |
| "grad_norm": 3.2085803535811, | |
| "learning_rate": 1.6200832930180643e-06, | |
| "loss": 0.489, | |
| "step": 12280 | |
| }, | |
| { | |
| "epoch": 0.6266251975730383, | |
| "grad_norm": 11.435384897927873, | |
| "learning_rate": 1.6162202046155085e-06, | |
| "loss": 0.5655, | |
| "step": 12290 | |
| }, | |
| { | |
| "epoch": 0.6271350634783052, | |
| "grad_norm": 5.80944804251215, | |
| "learning_rate": 1.6123595263405783e-06, | |
| "loss": 0.5259, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 0.6276449293835721, | |
| "grad_norm": 8.788085075044139, | |
| "learning_rate": 1.6085012687216078e-06, | |
| "loss": 0.4623, | |
| "step": 12310 | |
| }, | |
| { | |
| "epoch": 0.628154795288839, | |
| "grad_norm": 8.084105701558203, | |
| "learning_rate": 1.6046454422803253e-06, | |
| "loss": 0.5811, | |
| "step": 12320 | |
| }, | |
| { | |
| "epoch": 0.6286646611941059, | |
| "grad_norm": 6.232198726308961, | |
| "learning_rate": 1.6007920575318334e-06, | |
| "loss": 0.5777, | |
| "step": 12330 | |
| }, | |
| { | |
| "epoch": 0.6291745270993728, | |
| "grad_norm": 11.130383595276275, | |
| "learning_rate": 1.5969411249845737e-06, | |
| "loss": 0.5393, | |
| "step": 12340 | |
| }, | |
| { | |
| "epoch": 0.6296843930046397, | |
| "grad_norm": 5.992969945305373, | |
| "learning_rate": 1.5930926551403025e-06, | |
| "loss": 0.4707, | |
| "step": 12350 | |
| }, | |
| { | |
| "epoch": 0.6301942589099067, | |
| "grad_norm": 5.463490867539239, | |
| "learning_rate": 1.5892466584940574e-06, | |
| "loss": 0.564, | |
| "step": 12360 | |
| }, | |
| { | |
| "epoch": 0.6307041248151736, | |
| "grad_norm": 5.145508622723425, | |
| "learning_rate": 1.5854031455341332e-06, | |
| "loss": 0.5216, | |
| "step": 12370 | |
| }, | |
| { | |
| "epoch": 0.6312139907204405, | |
| "grad_norm": 6.430541005929227, | |
| "learning_rate": 1.5815621267420526e-06, | |
| "loss": 0.5294, | |
| "step": 12380 | |
| }, | |
| { | |
| "epoch": 0.6317238566257074, | |
| "grad_norm": 15.519128813172943, | |
| "learning_rate": 1.5777236125925333e-06, | |
| "loss": 0.4695, | |
| "step": 12390 | |
| }, | |
| { | |
| "epoch": 0.6322337225309743, | |
| "grad_norm": 16.999741625919526, | |
| "learning_rate": 1.5738876135534659e-06, | |
| "loss": 0.5651, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 0.6327435884362412, | |
| "grad_norm": 16.449734306702062, | |
| "learning_rate": 1.5700541400858793e-06, | |
| "loss": 0.5825, | |
| "step": 12410 | |
| }, | |
| { | |
| "epoch": 0.6332534543415081, | |
| "grad_norm": 8.678524232749545, | |
| "learning_rate": 1.5662232026439172e-06, | |
| "loss": 0.5247, | |
| "step": 12420 | |
| }, | |
| { | |
| "epoch": 0.6337633202467751, | |
| "grad_norm": 5.544135324516816, | |
| "learning_rate": 1.5623948116748074e-06, | |
| "loss": 0.4806, | |
| "step": 12430 | |
| }, | |
| { | |
| "epoch": 0.634273186152042, | |
| "grad_norm": 8.135829474616635, | |
| "learning_rate": 1.5585689776188321e-06, | |
| "loss": 0.5846, | |
| "step": 12440 | |
| }, | |
| { | |
| "epoch": 0.6347830520573089, | |
| "grad_norm": 4.438831891658201, | |
| "learning_rate": 1.5547457109093004e-06, | |
| "loss": 0.445, | |
| "step": 12450 | |
| }, | |
| { | |
| "epoch": 0.6352929179625758, | |
| "grad_norm": 5.302529484772549, | |
| "learning_rate": 1.5509250219725207e-06, | |
| "loss": 0.4947, | |
| "step": 12460 | |
| }, | |
| { | |
| "epoch": 0.6358027838678427, | |
| "grad_norm": 7.124606505211159, | |
| "learning_rate": 1.5471069212277729e-06, | |
| "loss": 0.5015, | |
| "step": 12470 | |
| }, | |
| { | |
| "epoch": 0.6363126497731096, | |
| "grad_norm": 18.000002055899103, | |
| "learning_rate": 1.5432914190872757e-06, | |
| "loss": 0.5355, | |
| "step": 12480 | |
| }, | |
| { | |
| "epoch": 0.6368225156783766, | |
| "grad_norm": 10.878967475009329, | |
| "learning_rate": 1.539478525956164e-06, | |
| "loss": 0.5542, | |
| "step": 12490 | |
| }, | |
| { | |
| "epoch": 0.6373323815836435, | |
| "grad_norm": 12.088166395329571, | |
| "learning_rate": 1.5356682522324578e-06, | |
| "loss": 0.4835, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.6378422474889104, | |
| "grad_norm": 35.076987710234555, | |
| "learning_rate": 1.5318606083070305e-06, | |
| "loss": 0.5917, | |
| "step": 12510 | |
| }, | |
| { | |
| "epoch": 0.6383521133941773, | |
| "grad_norm": 13.112714878589523, | |
| "learning_rate": 1.5280556045635881e-06, | |
| "loss": 0.5474, | |
| "step": 12520 | |
| }, | |
| { | |
| "epoch": 0.6388619792994442, | |
| "grad_norm": 6.40859233326716, | |
| "learning_rate": 1.5242532513786334e-06, | |
| "loss": 0.5124, | |
| "step": 12530 | |
| }, | |
| { | |
| "epoch": 0.6393718452047111, | |
| "grad_norm": 8.147985062950728, | |
| "learning_rate": 1.5204535591214428e-06, | |
| "loss": 0.5748, | |
| "step": 12540 | |
| }, | |
| { | |
| "epoch": 0.639881711109978, | |
| "grad_norm": 5.978197749841948, | |
| "learning_rate": 1.516656538154035e-06, | |
| "loss": 0.5753, | |
| "step": 12550 | |
| }, | |
| { | |
| "epoch": 0.640391577015245, | |
| "grad_norm": 5.3901524765660085, | |
| "learning_rate": 1.5128621988311448e-06, | |
| "loss": 0.4921, | |
| "step": 12560 | |
| }, | |
| { | |
| "epoch": 0.6409014429205119, | |
| "grad_norm": 8.021974547296901, | |
| "learning_rate": 1.5090705515001949e-06, | |
| "loss": 0.5782, | |
| "step": 12570 | |
| }, | |
| { | |
| "epoch": 0.6414113088257788, | |
| "grad_norm": 4.898265970748594, | |
| "learning_rate": 1.5052816065012635e-06, | |
| "loss": 0.5026, | |
| "step": 12580 | |
| }, | |
| { | |
| "epoch": 0.6419211747310457, | |
| "grad_norm": 7.58277321463348, | |
| "learning_rate": 1.501495374167063e-06, | |
| "loss": 0.5916, | |
| "step": 12590 | |
| }, | |
| { | |
| "epoch": 0.6424310406363126, | |
| "grad_norm": 5.643643219733153, | |
| "learning_rate": 1.497711864822905e-06, | |
| "loss": 0.5112, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 0.6429409065415795, | |
| "grad_norm": 25.832450519018945, | |
| "learning_rate": 1.4939310887866775e-06, | |
| "loss": 0.5532, | |
| "step": 12610 | |
| }, | |
| { | |
| "epoch": 0.6434507724468465, | |
| "grad_norm": 11.500383214893693, | |
| "learning_rate": 1.4901530563688154e-06, | |
| "loss": 0.5108, | |
| "step": 12620 | |
| }, | |
| { | |
| "epoch": 0.6439606383521134, | |
| "grad_norm": 13.803826215250618, | |
| "learning_rate": 1.4863777778722682e-06, | |
| "loss": 0.5415, | |
| "step": 12630 | |
| }, | |
| { | |
| "epoch": 0.6444705042573803, | |
| "grad_norm": 23.185859827096497, | |
| "learning_rate": 1.482605263592478e-06, | |
| "loss": 0.5105, | |
| "step": 12640 | |
| }, | |
| { | |
| "epoch": 0.6449803701626472, | |
| "grad_norm": 3.1925340110943874, | |
| "learning_rate": 1.4788355238173473e-06, | |
| "loss": 0.5143, | |
| "step": 12650 | |
| }, | |
| { | |
| "epoch": 0.6454902360679141, | |
| "grad_norm": 8.434282532315683, | |
| "learning_rate": 1.4750685688272143e-06, | |
| "loss": 0.5254, | |
| "step": 12660 | |
| }, | |
| { | |
| "epoch": 0.646000101973181, | |
| "grad_norm": 5.696693313005259, | |
| "learning_rate": 1.4713044088948197e-06, | |
| "loss": 0.4906, | |
| "step": 12670 | |
| }, | |
| { | |
| "epoch": 0.6465099678784479, | |
| "grad_norm": 4.446483290889001, | |
| "learning_rate": 1.4675430542852848e-06, | |
| "loss": 0.5744, | |
| "step": 12680 | |
| }, | |
| { | |
| "epoch": 0.6470198337837149, | |
| "grad_norm": 13.981158431091856, | |
| "learning_rate": 1.4637845152560804e-06, | |
| "loss": 0.5317, | |
| "step": 12690 | |
| }, | |
| { | |
| "epoch": 0.6475296996889818, | |
| "grad_norm": 9.908338337938533, | |
| "learning_rate": 1.4600288020569959e-06, | |
| "loss": 0.5559, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 0.6480395655942487, | |
| "grad_norm": 10.346360874512909, | |
| "learning_rate": 1.4562759249301185e-06, | |
| "loss": 0.6283, | |
| "step": 12710 | |
| }, | |
| { | |
| "epoch": 0.6485494314995156, | |
| "grad_norm": 5.455903851149757, | |
| "learning_rate": 1.4525258941097985e-06, | |
| "loss": 0.5419, | |
| "step": 12720 | |
| }, | |
| { | |
| "epoch": 0.6490592974047825, | |
| "grad_norm": 6.911006067636452, | |
| "learning_rate": 1.4487787198226244e-06, | |
| "loss": 0.5488, | |
| "step": 12730 | |
| }, | |
| { | |
| "epoch": 0.6495691633100494, | |
| "grad_norm": 5.265599029986363, | |
| "learning_rate": 1.4450344122873985e-06, | |
| "loss": 0.4896, | |
| "step": 12740 | |
| }, | |
| { | |
| "epoch": 0.6500790292153164, | |
| "grad_norm": 11.776370208991526, | |
| "learning_rate": 1.4412929817150982e-06, | |
| "loss": 0.5506, | |
| "step": 12750 | |
| }, | |
| { | |
| "epoch": 0.6505888951205833, | |
| "grad_norm": 18.5005112032705, | |
| "learning_rate": 1.437554438308863e-06, | |
| "loss": 0.4875, | |
| "step": 12760 | |
| }, | |
| { | |
| "epoch": 0.6510987610258502, | |
| "grad_norm": 8.771512470871057, | |
| "learning_rate": 1.4338187922639506e-06, | |
| "loss": 0.5322, | |
| "step": 12770 | |
| }, | |
| { | |
| "epoch": 0.6516086269311171, | |
| "grad_norm": 27.262326684678758, | |
| "learning_rate": 1.430086053767726e-06, | |
| "loss": 0.5692, | |
| "step": 12780 | |
| }, | |
| { | |
| "epoch": 0.652118492836384, | |
| "grad_norm": 8.73658779249368, | |
| "learning_rate": 1.4263562329996194e-06, | |
| "loss": 0.5229, | |
| "step": 12790 | |
| }, | |
| { | |
| "epoch": 0.6526283587416509, | |
| "grad_norm": 4.696525831770103, | |
| "learning_rate": 1.422629340131106e-06, | |
| "loss": 0.5722, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.6531382246469178, | |
| "grad_norm": 13.98465385717505, | |
| "learning_rate": 1.4189053853256757e-06, | |
| "loss": 0.5333, | |
| "step": 12810 | |
| }, | |
| { | |
| "epoch": 0.6536480905521848, | |
| "grad_norm": 44.135847073923244, | |
| "learning_rate": 1.4151843787388062e-06, | |
| "loss": 0.584, | |
| "step": 12820 | |
| }, | |
| { | |
| "epoch": 0.6541579564574517, | |
| "grad_norm": 17.496468258150834, | |
| "learning_rate": 1.4114663305179382e-06, | |
| "loss": 0.5256, | |
| "step": 12830 | |
| }, | |
| { | |
| "epoch": 0.6546678223627186, | |
| "grad_norm": 3.5798846929402095, | |
| "learning_rate": 1.4077512508024382e-06, | |
| "loss": 0.4815, | |
| "step": 12840 | |
| }, | |
| { | |
| "epoch": 0.6551776882679855, | |
| "grad_norm": 27.319618611904193, | |
| "learning_rate": 1.4040391497235845e-06, | |
| "loss": 0.5426, | |
| "step": 12850 | |
| }, | |
| { | |
| "epoch": 0.6556875541732524, | |
| "grad_norm": 3.9535275012824753, | |
| "learning_rate": 1.4003300374045283e-06, | |
| "loss": 0.5097, | |
| "step": 12860 | |
| }, | |
| { | |
| "epoch": 0.6561974200785193, | |
| "grad_norm": 2.874465898696785, | |
| "learning_rate": 1.396623923960271e-06, | |
| "loss": 0.5031, | |
| "step": 12870 | |
| }, | |
| { | |
| "epoch": 0.6567072859837862, | |
| "grad_norm": 5.723509444766761, | |
| "learning_rate": 1.3929208194976362e-06, | |
| "loss": 0.5727, | |
| "step": 12880 | |
| }, | |
| { | |
| "epoch": 0.6572171518890532, | |
| "grad_norm": 10.234880244026453, | |
| "learning_rate": 1.3892207341152416e-06, | |
| "loss": 0.5734, | |
| "step": 12890 | |
| }, | |
| { | |
| "epoch": 0.6577270177943201, | |
| "grad_norm": 11.590413083459318, | |
| "learning_rate": 1.385523677903472e-06, | |
| "loss": 0.5068, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 0.658236883699587, | |
| "grad_norm": 20.26688405326, | |
| "learning_rate": 1.38182966094445e-06, | |
| "loss": 0.601, | |
| "step": 12910 | |
| }, | |
| { | |
| "epoch": 0.6587467496048539, | |
| "grad_norm": 5.873427482586327, | |
| "learning_rate": 1.3781386933120133e-06, | |
| "loss": 0.4823, | |
| "step": 12920 | |
| }, | |
| { | |
| "epoch": 0.6592566155101208, | |
| "grad_norm": 7.732319317412305, | |
| "learning_rate": 1.3744507850716804e-06, | |
| "loss": 0.517, | |
| "step": 12930 | |
| }, | |
| { | |
| "epoch": 0.6597664814153877, | |
| "grad_norm": 5.717087164447723, | |
| "learning_rate": 1.3707659462806284e-06, | |
| "loss": 0.5071, | |
| "step": 12940 | |
| }, | |
| { | |
| "epoch": 0.6602763473206547, | |
| "grad_norm": 12.546369227920732, | |
| "learning_rate": 1.367084186987663e-06, | |
| "loss": 0.5283, | |
| "step": 12950 | |
| }, | |
| { | |
| "epoch": 0.6607862132259216, | |
| "grad_norm": 9.522985256100982, | |
| "learning_rate": 1.3634055172331926e-06, | |
| "loss": 0.5291, | |
| "step": 12960 | |
| }, | |
| { | |
| "epoch": 0.6612960791311885, | |
| "grad_norm": 6.0967021531973025, | |
| "learning_rate": 1.3597299470491986e-06, | |
| "loss": 0.6057, | |
| "step": 12970 | |
| }, | |
| { | |
| "epoch": 0.6618059450364554, | |
| "grad_norm": 15.313091190736756, | |
| "learning_rate": 1.356057486459214e-06, | |
| "loss": 0.5561, | |
| "step": 12980 | |
| }, | |
| { | |
| "epoch": 0.6623158109417223, | |
| "grad_norm": 7.740759385070269, | |
| "learning_rate": 1.352388145478285e-06, | |
| "loss": 0.5671, | |
| "step": 12990 | |
| }, | |
| { | |
| "epoch": 0.6628256768469892, | |
| "grad_norm": 11.418720457485895, | |
| "learning_rate": 1.3487219341129566e-06, | |
| "loss": 0.4986, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.6633355427522561, | |
| "grad_norm": 16.280698683867406, | |
| "learning_rate": 1.3450588623612353e-06, | |
| "loss": 0.5557, | |
| "step": 13010 | |
| }, | |
| { | |
| "epoch": 0.6638454086575231, | |
| "grad_norm": 5.81925433785274, | |
| "learning_rate": 1.3413989402125682e-06, | |
| "loss": 0.5501, | |
| "step": 13020 | |
| }, | |
| { | |
| "epoch": 0.66435527456279, | |
| "grad_norm": 5.939874542788059, | |
| "learning_rate": 1.3377421776478111e-06, | |
| "loss": 0.5353, | |
| "step": 13030 | |
| }, | |
| { | |
| "epoch": 0.6648651404680569, | |
| "grad_norm": 8.305569539285225, | |
| "learning_rate": 1.3340885846392032e-06, | |
| "loss": 0.5852, | |
| "step": 13040 | |
| }, | |
| { | |
| "epoch": 0.6653750063733238, | |
| "grad_norm": 5.767160133744185, | |
| "learning_rate": 1.3304381711503444e-06, | |
| "loss": 0.5129, | |
| "step": 13050 | |
| }, | |
| { | |
| "epoch": 0.6658848722785907, | |
| "grad_norm": 6.875680453072486, | |
| "learning_rate": 1.3267909471361574e-06, | |
| "loss": 0.5727, | |
| "step": 13060 | |
| }, | |
| { | |
| "epoch": 0.6663947381838576, | |
| "grad_norm": 5.03774252438953, | |
| "learning_rate": 1.3231469225428726e-06, | |
| "loss": 0.5317, | |
| "step": 13070 | |
| }, | |
| { | |
| "epoch": 0.6669046040891246, | |
| "grad_norm": 5.1407923174582395, | |
| "learning_rate": 1.3195061073079901e-06, | |
| "loss": 0.5798, | |
| "step": 13080 | |
| }, | |
| { | |
| "epoch": 0.6674144699943915, | |
| "grad_norm": 9.799807279239122, | |
| "learning_rate": 1.3158685113602636e-06, | |
| "loss": 0.4992, | |
| "step": 13090 | |
| }, | |
| { | |
| "epoch": 0.6679243358996584, | |
| "grad_norm": 12.17057925211001, | |
| "learning_rate": 1.3122341446196636e-06, | |
| "loss": 0.5889, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 0.6684342018049253, | |
| "grad_norm": 4.2258115828431535, | |
| "learning_rate": 1.3086030169973552e-06, | |
| "loss": 0.5236, | |
| "step": 13110 | |
| }, | |
| { | |
| "epoch": 0.6689440677101922, | |
| "grad_norm": 13.144950859146647, | |
| "learning_rate": 1.3049751383956707e-06, | |
| "loss": 0.5002, | |
| "step": 13120 | |
| }, | |
| { | |
| "epoch": 0.6694539336154591, | |
| "grad_norm": 9.859556462555908, | |
| "learning_rate": 1.301350518708081e-06, | |
| "loss": 0.5379, | |
| "step": 13130 | |
| }, | |
| { | |
| "epoch": 0.669963799520726, | |
| "grad_norm": 8.997523392336818, | |
| "learning_rate": 1.2977291678191733e-06, | |
| "loss": 0.5159, | |
| "step": 13140 | |
| }, | |
| { | |
| "epoch": 0.670473665425993, | |
| "grad_norm": 11.962543453038034, | |
| "learning_rate": 1.2941110956046142e-06, | |
| "loss": 0.5201, | |
| "step": 13150 | |
| }, | |
| { | |
| "epoch": 0.6709835313312599, | |
| "grad_norm": 9.211901098231362, | |
| "learning_rate": 1.290496311931135e-06, | |
| "loss": 0.5275, | |
| "step": 13160 | |
| }, | |
| { | |
| "epoch": 0.6714933972365268, | |
| "grad_norm": 5.802536276229398, | |
| "learning_rate": 1.2868848266564964e-06, | |
| "loss": 0.4901, | |
| "step": 13170 | |
| }, | |
| { | |
| "epoch": 0.6720032631417937, | |
| "grad_norm": 9.337922159510477, | |
| "learning_rate": 1.2832766496294647e-06, | |
| "loss": 0.5323, | |
| "step": 13180 | |
| }, | |
| { | |
| "epoch": 0.6725131290470606, | |
| "grad_norm": 7.120905849429728, | |
| "learning_rate": 1.2796717906897831e-06, | |
| "loss": 0.5164, | |
| "step": 13190 | |
| }, | |
| { | |
| "epoch": 0.6730229949523275, | |
| "grad_norm": 9.733236048311111, | |
| "learning_rate": 1.2760702596681478e-06, | |
| "loss": 0.5552, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 0.6735328608575945, | |
| "grad_norm": 3.3631007408034677, | |
| "learning_rate": 1.2724720663861792e-06, | |
| "loss": 0.4785, | |
| "step": 13210 | |
| }, | |
| { | |
| "epoch": 0.6740427267628614, | |
| "grad_norm": 13.547952544012343, | |
| "learning_rate": 1.2688772206563938e-06, | |
| "loss": 0.5631, | |
| "step": 13220 | |
| }, | |
| { | |
| "epoch": 0.6745525926681283, | |
| "grad_norm": 7.191362755646714, | |
| "learning_rate": 1.2652857322821821e-06, | |
| "loss": 0.4977, | |
| "step": 13230 | |
| }, | |
| { | |
| "epoch": 0.6750624585733952, | |
| "grad_norm": 5.156968959766061, | |
| "learning_rate": 1.2616976110577766e-06, | |
| "loss": 0.5453, | |
| "step": 13240 | |
| }, | |
| { | |
| "epoch": 0.6755723244786621, | |
| "grad_norm": 11.097231281107701, | |
| "learning_rate": 1.2581128667682286e-06, | |
| "loss": 0.547, | |
| "step": 13250 | |
| }, | |
| { | |
| "epoch": 0.676082190383929, | |
| "grad_norm": 19.165171233840805, | |
| "learning_rate": 1.2545315091893784e-06, | |
| "loss": 0.5206, | |
| "step": 13260 | |
| }, | |
| { | |
| "epoch": 0.6765920562891959, | |
| "grad_norm": 35.51519567896334, | |
| "learning_rate": 1.2509535480878325e-06, | |
| "loss": 0.4864, | |
| "step": 13270 | |
| }, | |
| { | |
| "epoch": 0.6771019221944629, | |
| "grad_norm": 5.511431626465132, | |
| "learning_rate": 1.2473789932209331e-06, | |
| "loss": 0.4802, | |
| "step": 13280 | |
| }, | |
| { | |
| "epoch": 0.6776117880997298, | |
| "grad_norm": 11.049237655524086, | |
| "learning_rate": 1.2438078543367377e-06, | |
| "loss": 0.6008, | |
| "step": 13290 | |
| }, | |
| { | |
| "epoch": 0.6781216540049967, | |
| "grad_norm": 12.081808269170782, | |
| "learning_rate": 1.2402401411739806e-06, | |
| "loss": 0.5358, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 0.6786315199102636, | |
| "grad_norm": 6.161026900799108, | |
| "learning_rate": 1.2366758634620615e-06, | |
| "loss": 0.5713, | |
| "step": 13310 | |
| }, | |
| { | |
| "epoch": 0.6791413858155305, | |
| "grad_norm": 6.293422338189026, | |
| "learning_rate": 1.2331150309210075e-06, | |
| "loss": 0.556, | |
| "step": 13320 | |
| }, | |
| { | |
| "epoch": 0.6796512517207974, | |
| "grad_norm": 4.283687942029315, | |
| "learning_rate": 1.2295576532614506e-06, | |
| "loss": 0.5029, | |
| "step": 13330 | |
| }, | |
| { | |
| "epoch": 0.6801611176260643, | |
| "grad_norm": 7.352841148937369, | |
| "learning_rate": 1.226003740184602e-06, | |
| "loss": 0.5361, | |
| "step": 13340 | |
| }, | |
| { | |
| "epoch": 0.6806709835313313, | |
| "grad_norm": 175.73598698371708, | |
| "learning_rate": 1.2224533013822237e-06, | |
| "loss": 0.5526, | |
| "step": 13350 | |
| }, | |
| { | |
| "epoch": 0.6811808494365982, | |
| "grad_norm": 12.027309626664836, | |
| "learning_rate": 1.2189063465366064e-06, | |
| "loss": 0.6395, | |
| "step": 13360 | |
| }, | |
| { | |
| "epoch": 0.6816907153418651, | |
| "grad_norm": 12.040883913595133, | |
| "learning_rate": 1.2153628853205336e-06, | |
| "loss": 0.5229, | |
| "step": 13370 | |
| }, | |
| { | |
| "epoch": 0.682200581247132, | |
| "grad_norm": 8.000181612684882, | |
| "learning_rate": 1.2118229273972684e-06, | |
| "loss": 0.5347, | |
| "step": 13380 | |
| }, | |
| { | |
| "epoch": 0.6827104471523989, | |
| "grad_norm": 12.028638552739197, | |
| "learning_rate": 1.2082864824205138e-06, | |
| "loss": 0.5195, | |
| "step": 13390 | |
| }, | |
| { | |
| "epoch": 0.6832203130576658, | |
| "grad_norm": 20.371683560997308, | |
| "learning_rate": 1.2047535600343984e-06, | |
| "loss": 0.5081, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 0.6837301789629328, | |
| "grad_norm": 8.136173607931632, | |
| "learning_rate": 1.2012241698734408e-06, | |
| "loss": 0.5893, | |
| "step": 13410 | |
| }, | |
| { | |
| "epoch": 0.6842400448681997, | |
| "grad_norm": 9.232546953455795, | |
| "learning_rate": 1.1976983215625285e-06, | |
| "loss": 0.5348, | |
| "step": 13420 | |
| }, | |
| { | |
| "epoch": 0.6847499107734666, | |
| "grad_norm": 4.377944461206342, | |
| "learning_rate": 1.1941760247168893e-06, | |
| "loss": 0.5529, | |
| "step": 13430 | |
| }, | |
| { | |
| "epoch": 0.6852597766787335, | |
| "grad_norm": 3.459532633536289, | |
| "learning_rate": 1.1906572889420655e-06, | |
| "loss": 0.4904, | |
| "step": 13440 | |
| }, | |
| { | |
| "epoch": 0.6857696425840004, | |
| "grad_norm": 6.8068993996494775, | |
| "learning_rate": 1.1871421238338917e-06, | |
| "loss": 0.5179, | |
| "step": 13450 | |
| }, | |
| { | |
| "epoch": 0.6862795084892673, | |
| "grad_norm": 9.388231515206153, | |
| "learning_rate": 1.1836305389784588e-06, | |
| "loss": 0.5772, | |
| "step": 13460 | |
| }, | |
| { | |
| "epoch": 0.6867893743945342, | |
| "grad_norm": 3.8333730829756174, | |
| "learning_rate": 1.1801225439521003e-06, | |
| "loss": 0.4779, | |
| "step": 13470 | |
| }, | |
| { | |
| "epoch": 0.6872992402998012, | |
| "grad_norm": 6.9070129435643, | |
| "learning_rate": 1.176618148321356e-06, | |
| "loss": 0.4894, | |
| "step": 13480 | |
| }, | |
| { | |
| "epoch": 0.6878091062050681, | |
| "grad_norm": 7.058960081890223, | |
| "learning_rate": 1.1731173616429514e-06, | |
| "loss": 0.4723, | |
| "step": 13490 | |
| }, | |
| { | |
| "epoch": 0.688318972110335, | |
| "grad_norm": 7.419454694113433, | |
| "learning_rate": 1.16962019346377e-06, | |
| "loss": 0.4849, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.6888288380156019, | |
| "grad_norm": 6.911766062790672, | |
| "learning_rate": 1.1661266533208274e-06, | |
| "loss": 0.5458, | |
| "step": 13510 | |
| }, | |
| { | |
| "epoch": 0.6893387039208688, | |
| "grad_norm": 25.73500382065693, | |
| "learning_rate": 1.1626367507412443e-06, | |
| "loss": 0.5025, | |
| "step": 13520 | |
| }, | |
| { | |
| "epoch": 0.6898485698261357, | |
| "grad_norm": 9.057037998210602, | |
| "learning_rate": 1.1591504952422243e-06, | |
| "loss": 0.5203, | |
| "step": 13530 | |
| }, | |
| { | |
| "epoch": 0.6903584357314027, | |
| "grad_norm": 16.41052757265655, | |
| "learning_rate": 1.1556678963310222e-06, | |
| "loss": 0.5844, | |
| "step": 13540 | |
| }, | |
| { | |
| "epoch": 0.6908683016366696, | |
| "grad_norm": 5.732859564218691, | |
| "learning_rate": 1.152188963504922e-06, | |
| "loss": 0.5078, | |
| "step": 13550 | |
| }, | |
| { | |
| "epoch": 0.6913781675419365, | |
| "grad_norm": 5.929076421646337, | |
| "learning_rate": 1.148713706251211e-06, | |
| "loss": 0.5164, | |
| "step": 13560 | |
| }, | |
| { | |
| "epoch": 0.6918880334472034, | |
| "grad_norm": 9.301935237585717, | |
| "learning_rate": 1.1452421340471514e-06, | |
| "loss": 0.55, | |
| "step": 13570 | |
| }, | |
| { | |
| "epoch": 0.6923978993524703, | |
| "grad_norm": 8.655543019346618, | |
| "learning_rate": 1.1417742563599568e-06, | |
| "loss": 0.5696, | |
| "step": 13580 | |
| }, | |
| { | |
| "epoch": 0.6929077652577372, | |
| "grad_norm": 10.889091625170066, | |
| "learning_rate": 1.1383100826467653e-06, | |
| "loss": 0.5868, | |
| "step": 13590 | |
| }, | |
| { | |
| "epoch": 0.6934176311630041, | |
| "grad_norm": 5.809789419158467, | |
| "learning_rate": 1.1348496223546162e-06, | |
| "loss": 0.5087, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 0.6939274970682711, | |
| "grad_norm": 21.7701581078474, | |
| "learning_rate": 1.131392884920417e-06, | |
| "loss": 0.4572, | |
| "step": 13610 | |
| }, | |
| { | |
| "epoch": 0.694437362973538, | |
| "grad_norm": 36.72901867309476, | |
| "learning_rate": 1.1279398797709293e-06, | |
| "loss": 0.5561, | |
| "step": 13620 | |
| }, | |
| { | |
| "epoch": 0.6949472288788049, | |
| "grad_norm": 7.015828755810641, | |
| "learning_rate": 1.1244906163227295e-06, | |
| "loss": 0.5569, | |
| "step": 13630 | |
| }, | |
| { | |
| "epoch": 0.6954570947840718, | |
| "grad_norm": 5.717141506625831, | |
| "learning_rate": 1.1210451039821965e-06, | |
| "loss": 0.5606, | |
| "step": 13640 | |
| }, | |
| { | |
| "epoch": 0.6959669606893387, | |
| "grad_norm": 200.18780369162442, | |
| "learning_rate": 1.1176033521454758e-06, | |
| "loss": 0.5456, | |
| "step": 13650 | |
| }, | |
| { | |
| "epoch": 0.6964768265946056, | |
| "grad_norm": 8.019380290741653, | |
| "learning_rate": 1.114165370198458e-06, | |
| "loss": 0.5384, | |
| "step": 13660 | |
| }, | |
| { | |
| "epoch": 0.6969866924998726, | |
| "grad_norm": 15.058488690780067, | |
| "learning_rate": 1.1107311675167558e-06, | |
| "loss": 0.5397, | |
| "step": 13670 | |
| }, | |
| { | |
| "epoch": 0.6974965584051395, | |
| "grad_norm": 8.87322964539086, | |
| "learning_rate": 1.1073007534656712e-06, | |
| "loss": 0.5328, | |
| "step": 13680 | |
| }, | |
| { | |
| "epoch": 0.6980064243104064, | |
| "grad_norm": 7.857307785773062, | |
| "learning_rate": 1.1038741374001793e-06, | |
| "loss": 0.5493, | |
| "step": 13690 | |
| }, | |
| { | |
| "epoch": 0.6985162902156733, | |
| "grad_norm": 48.793916370591866, | |
| "learning_rate": 1.1004513286648922e-06, | |
| "loss": 0.5117, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 0.6990261561209402, | |
| "grad_norm": 3.4690544397426195, | |
| "learning_rate": 1.0970323365940443e-06, | |
| "loss": 0.5075, | |
| "step": 13710 | |
| }, | |
| { | |
| "epoch": 0.6995360220262071, | |
| "grad_norm": 5.720335109430535, | |
| "learning_rate": 1.093617170511459e-06, | |
| "loss": 0.5554, | |
| "step": 13720 | |
| }, | |
| { | |
| "epoch": 0.700045887931474, | |
| "grad_norm": 3.592681308182018, | |
| "learning_rate": 1.0902058397305268e-06, | |
| "loss": 0.5351, | |
| "step": 13730 | |
| }, | |
| { | |
| "epoch": 0.700555753836741, | |
| "grad_norm": 9.773333278685, | |
| "learning_rate": 1.0867983535541785e-06, | |
| "loss": 0.5818, | |
| "step": 13740 | |
| }, | |
| { | |
| "epoch": 0.7010656197420079, | |
| "grad_norm": 10.27186569790017, | |
| "learning_rate": 1.0833947212748597e-06, | |
| "loss": 0.5552, | |
| "step": 13750 | |
| }, | |
| { | |
| "epoch": 0.7015754856472748, | |
| "grad_norm": 13.36103278733716, | |
| "learning_rate": 1.07999495217451e-06, | |
| "loss": 0.616, | |
| "step": 13760 | |
| }, | |
| { | |
| "epoch": 0.7020853515525417, | |
| "grad_norm": 5.392163411689741, | |
| "learning_rate": 1.0765990555245275e-06, | |
| "loss": 0.5264, | |
| "step": 13770 | |
| }, | |
| { | |
| "epoch": 0.7025952174578086, | |
| "grad_norm": 10.72345270938434, | |
| "learning_rate": 1.0732070405857562e-06, | |
| "loss": 0.5634, | |
| "step": 13780 | |
| }, | |
| { | |
| "epoch": 0.7031050833630755, | |
| "grad_norm": 15.80049388002083, | |
| "learning_rate": 1.0698189166084501e-06, | |
| "loss": 0.466, | |
| "step": 13790 | |
| }, | |
| { | |
| "epoch": 0.7036149492683424, | |
| "grad_norm": 4.82132484505896, | |
| "learning_rate": 1.0664346928322547e-06, | |
| "loss": 0.5186, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 0.7041248151736094, | |
| "grad_norm": 6.069159826575058, | |
| "learning_rate": 1.063054378486178e-06, | |
| "loss": 0.634, | |
| "step": 13810 | |
| }, | |
| { | |
| "epoch": 0.7046346810788763, | |
| "grad_norm": 11.614673101846604, | |
| "learning_rate": 1.059677982788567e-06, | |
| "loss": 0.4856, | |
| "step": 13820 | |
| }, | |
| { | |
| "epoch": 0.7051445469841432, | |
| "grad_norm": 14.11494362911992, | |
| "learning_rate": 1.056305514947082e-06, | |
| "loss": 0.4981, | |
| "step": 13830 | |
| }, | |
| { | |
| "epoch": 0.7056544128894101, | |
| "grad_norm": 8.007261173840975, | |
| "learning_rate": 1.0529369841586743e-06, | |
| "loss": 0.4831, | |
| "step": 13840 | |
| }, | |
| { | |
| "epoch": 0.706164278794677, | |
| "grad_norm": 17.304524008597603, | |
| "learning_rate": 1.0495723996095533e-06, | |
| "loss": 0.4787, | |
| "step": 13850 | |
| }, | |
| { | |
| "epoch": 0.7066741446999439, | |
| "grad_norm": 10.05312890548322, | |
| "learning_rate": 1.046211770475173e-06, | |
| "loss": 0.4995, | |
| "step": 13860 | |
| }, | |
| { | |
| "epoch": 0.7071840106052109, | |
| "grad_norm": 7.738138897636104, | |
| "learning_rate": 1.0428551059201964e-06, | |
| "loss": 0.4952, | |
| "step": 13870 | |
| }, | |
| { | |
| "epoch": 0.7076938765104778, | |
| "grad_norm": 3.979757475584907, | |
| "learning_rate": 1.039502415098476e-06, | |
| "loss": 0.5682, | |
| "step": 13880 | |
| }, | |
| { | |
| "epoch": 0.7082037424157447, | |
| "grad_norm": 16.9521365709318, | |
| "learning_rate": 1.0361537071530277e-06, | |
| "loss": 0.5504, | |
| "step": 13890 | |
| }, | |
| { | |
| "epoch": 0.7087136083210116, | |
| "grad_norm": 13.478011647892208, | |
| "learning_rate": 1.0328089912160055e-06, | |
| "loss": 0.5764, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 0.7092234742262785, | |
| "grad_norm": 14.12969618493761, | |
| "learning_rate": 1.0294682764086794e-06, | |
| "loss": 0.5445, | |
| "step": 13910 | |
| }, | |
| { | |
| "epoch": 0.7097333401315454, | |
| "grad_norm": 5.795649950376214, | |
| "learning_rate": 1.0261315718414028e-06, | |
| "loss": 0.5645, | |
| "step": 13920 | |
| }, | |
| { | |
| "epoch": 0.7102432060368123, | |
| "grad_norm": 11.710592013823014, | |
| "learning_rate": 1.0227988866135995e-06, | |
| "loss": 0.5083, | |
| "step": 13930 | |
| }, | |
| { | |
| "epoch": 0.7107530719420793, | |
| "grad_norm": 12.43499461710959, | |
| "learning_rate": 1.0194702298137251e-06, | |
| "loss": 0.4863, | |
| "step": 13940 | |
| }, | |
| { | |
| "epoch": 0.7112629378473462, | |
| "grad_norm": 8.863719068408987, | |
| "learning_rate": 1.016145610519256e-06, | |
| "loss": 0.5543, | |
| "step": 13950 | |
| }, | |
| { | |
| "epoch": 0.7117728037526131, | |
| "grad_norm": 5.42342484070064, | |
| "learning_rate": 1.0128250377966545e-06, | |
| "loss": 0.559, | |
| "step": 13960 | |
| }, | |
| { | |
| "epoch": 0.71228266965788, | |
| "grad_norm": 5.137324371851054, | |
| "learning_rate": 1.009508520701347e-06, | |
| "loss": 0.5158, | |
| "step": 13970 | |
| }, | |
| { | |
| "epoch": 0.7127925355631469, | |
| "grad_norm": 6.755524106797482, | |
| "learning_rate": 1.006196068277704e-06, | |
| "loss": 0.4776, | |
| "step": 13980 | |
| }, | |
| { | |
| "epoch": 0.7133024014684138, | |
| "grad_norm": 5.199631700415942, | |
| "learning_rate": 1.002887689559005e-06, | |
| "loss": 0.5302, | |
| "step": 13990 | |
| }, | |
| { | |
| "epoch": 0.7138122673736808, | |
| "grad_norm": 5.152070108304142, | |
| "learning_rate": 9.99583393567428e-07, | |
| "loss": 0.4551, | |
| "step": 14000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 19613, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 9501248720994304.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |