| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.8, | |
| "eval_steps": 500, | |
| "global_step": 5000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0016, | |
| "grad_norm": 1.146405816078186, | |
| "learning_rate": 1.4285714285714285e-05, | |
| "loss": 2.7179, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0032, | |
| "grad_norm": 0.8417394161224365, | |
| "learning_rate": 2.857142857142857e-05, | |
| "loss": 2.5949, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0048, | |
| "grad_norm": 0.8995881080627441, | |
| "learning_rate": 4.2857142857142856e-05, | |
| "loss": 2.332, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0064, | |
| "grad_norm": 0.7769910097122192, | |
| "learning_rate": 5.714285714285714e-05, | |
| "loss": 2.203, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.008, | |
| "grad_norm": 0.612125039100647, | |
| "learning_rate": 7.142857142857143e-05, | |
| "loss": 1.9872, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0096, | |
| "grad_norm": 0.6728251576423645, | |
| "learning_rate": 8.571428571428571e-05, | |
| "loss": 2.0172, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0112, | |
| "grad_norm": 0.6434890627861023, | |
| "learning_rate": 0.0001, | |
| "loss": 2.0561, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0128, | |
| "grad_norm": 0.7109495997428894, | |
| "learning_rate": 9.979716024340772e-05, | |
| "loss": 1.9218, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0144, | |
| "grad_norm": 0.6287526488304138, | |
| "learning_rate": 9.959432048681541e-05, | |
| "loss": 1.9663, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.016, | |
| "grad_norm": 0.761013388633728, | |
| "learning_rate": 9.939148073022312e-05, | |
| "loss": 1.8372, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0176, | |
| "grad_norm": 0.771938681602478, | |
| "learning_rate": 9.918864097363083e-05, | |
| "loss": 1.8148, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.0192, | |
| "grad_norm": 0.7518746256828308, | |
| "learning_rate": 9.898580121703854e-05, | |
| "loss": 1.8446, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.0208, | |
| "grad_norm": 0.7022605538368225, | |
| "learning_rate": 9.878296146044626e-05, | |
| "loss": 1.789, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.0224, | |
| "grad_norm": 0.9093043208122253, | |
| "learning_rate": 9.858012170385395e-05, | |
| "loss": 1.8224, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.024, | |
| "grad_norm": 0.8738862872123718, | |
| "learning_rate": 9.837728194726166e-05, | |
| "loss": 1.7464, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.0256, | |
| "grad_norm": 0.941779613494873, | |
| "learning_rate": 9.817444219066937e-05, | |
| "loss": 1.7306, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.0272, | |
| "grad_norm": 0.9313611388206482, | |
| "learning_rate": 9.797160243407709e-05, | |
| "loss": 1.7978, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.0288, | |
| "grad_norm": 1.0731656551361084, | |
| "learning_rate": 9.77687626774848e-05, | |
| "loss": 1.731, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.0304, | |
| "grad_norm": 0.8247014880180359, | |
| "learning_rate": 9.756592292089249e-05, | |
| "loss": 1.8481, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.032, | |
| "grad_norm": 0.9673528671264648, | |
| "learning_rate": 9.73630831643002e-05, | |
| "loss": 1.7895, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.0336, | |
| "grad_norm": 0.9702100157737732, | |
| "learning_rate": 9.716024340770791e-05, | |
| "loss": 1.7249, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.0352, | |
| "grad_norm": 0.7754144668579102, | |
| "learning_rate": 9.695740365111563e-05, | |
| "loss": 1.7188, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.0368, | |
| "grad_norm": 0.9529830813407898, | |
| "learning_rate": 9.675456389452334e-05, | |
| "loss": 1.6541, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.0384, | |
| "grad_norm": 0.926566481590271, | |
| "learning_rate": 9.655172413793105e-05, | |
| "loss": 1.6983, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 0.8942977786064148, | |
| "learning_rate": 9.634888438133874e-05, | |
| "loss": 1.6702, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.0416, | |
| "grad_norm": 0.8158455491065979, | |
| "learning_rate": 9.614604462474645e-05, | |
| "loss": 1.6604, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.0432, | |
| "grad_norm": 0.7894754409790039, | |
| "learning_rate": 9.594320486815415e-05, | |
| "loss": 1.6727, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.0448, | |
| "grad_norm": 0.6904724836349487, | |
| "learning_rate": 9.574036511156188e-05, | |
| "loss": 1.6826, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.0464, | |
| "grad_norm": 0.8520828485488892, | |
| "learning_rate": 9.553752535496959e-05, | |
| "loss": 1.6916, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.048, | |
| "grad_norm": 0.9699738025665283, | |
| "learning_rate": 9.533468559837728e-05, | |
| "loss": 1.7246, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.0496, | |
| "grad_norm": 0.8913508057594299, | |
| "learning_rate": 9.5131845841785e-05, | |
| "loss": 1.7523, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.0512, | |
| "grad_norm": 0.8721765875816345, | |
| "learning_rate": 9.49290060851927e-05, | |
| "loss": 1.7196, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.0528, | |
| "grad_norm": 0.7151852250099182, | |
| "learning_rate": 9.47261663286004e-05, | |
| "loss": 1.7053, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.0544, | |
| "grad_norm": 0.6914452314376831, | |
| "learning_rate": 9.452332657200811e-05, | |
| "loss": 1.6875, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.056, | |
| "grad_norm": 1.2169272899627686, | |
| "learning_rate": 9.432048681541582e-05, | |
| "loss": 1.6677, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.0576, | |
| "grad_norm": 0.8138052821159363, | |
| "learning_rate": 9.411764705882353e-05, | |
| "loss": 1.6198, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.0592, | |
| "grad_norm": 0.8007165789604187, | |
| "learning_rate": 9.391480730223125e-05, | |
| "loss": 1.6212, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.0608, | |
| "grad_norm": 0.8502349853515625, | |
| "learning_rate": 9.371196754563894e-05, | |
| "loss": 1.6224, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.0624, | |
| "grad_norm": 0.8362089395523071, | |
| "learning_rate": 9.350912778904665e-05, | |
| "loss": 1.6112, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.064, | |
| "grad_norm": 0.9215638041496277, | |
| "learning_rate": 9.330628803245436e-05, | |
| "loss": 1.5754, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.0656, | |
| "grad_norm": 1.6188653707504272, | |
| "learning_rate": 9.310344827586207e-05, | |
| "loss": 1.6563, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.0672, | |
| "grad_norm": 0.8163923621177673, | |
| "learning_rate": 9.290060851926979e-05, | |
| "loss": 1.661, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.0688, | |
| "grad_norm": 0.9351512789726257, | |
| "learning_rate": 9.269776876267748e-05, | |
| "loss": 1.6473, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.0704, | |
| "grad_norm": 0.8003765940666199, | |
| "learning_rate": 9.24949290060852e-05, | |
| "loss": 1.6461, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.072, | |
| "grad_norm": 0.8877068161964417, | |
| "learning_rate": 9.22920892494929e-05, | |
| "loss": 1.6587, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.0736, | |
| "grad_norm": 0.8028745651245117, | |
| "learning_rate": 9.208924949290061e-05, | |
| "loss": 1.6517, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.0752, | |
| "grad_norm": 0.9326333403587341, | |
| "learning_rate": 9.188640973630833e-05, | |
| "loss": 1.5998, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.0768, | |
| "grad_norm": 1.0918183326721191, | |
| "learning_rate": 9.168356997971604e-05, | |
| "loss": 1.5878, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.0784, | |
| "grad_norm": 0.8588346838951111, | |
| "learning_rate": 9.148073022312373e-05, | |
| "loss": 1.7285, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 0.67298823595047, | |
| "learning_rate": 9.127789046653144e-05, | |
| "loss": 1.6274, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.0816, | |
| "grad_norm": 0.7446144819259644, | |
| "learning_rate": 9.107505070993914e-05, | |
| "loss": 1.6256, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.0832, | |
| "grad_norm": 0.8986618518829346, | |
| "learning_rate": 9.087221095334687e-05, | |
| "loss": 1.5674, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.0848, | |
| "grad_norm": 0.6667838096618652, | |
| "learning_rate": 9.066937119675458e-05, | |
| "loss": 1.6515, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.0864, | |
| "grad_norm": 1.1137733459472656, | |
| "learning_rate": 9.046653144016227e-05, | |
| "loss": 1.6212, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.088, | |
| "grad_norm": 0.7851391434669495, | |
| "learning_rate": 9.026369168356998e-05, | |
| "loss": 1.6381, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.0896, | |
| "grad_norm": 0.7694608569145203, | |
| "learning_rate": 9.00608519269777e-05, | |
| "loss": 1.4762, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.0912, | |
| "grad_norm": 0.7087869048118591, | |
| "learning_rate": 8.985801217038539e-05, | |
| "loss": 1.5247, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.0928, | |
| "grad_norm": 0.9042320251464844, | |
| "learning_rate": 8.96551724137931e-05, | |
| "loss": 1.6496, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.0944, | |
| "grad_norm": 0.6515716314315796, | |
| "learning_rate": 8.945233265720081e-05, | |
| "loss": 1.621, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.096, | |
| "grad_norm": 0.78260737657547, | |
| "learning_rate": 8.924949290060852e-05, | |
| "loss": 1.5592, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.0976, | |
| "grad_norm": 0.7633717060089111, | |
| "learning_rate": 8.904665314401624e-05, | |
| "loss": 1.5134, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.0992, | |
| "grad_norm": 0.731940746307373, | |
| "learning_rate": 8.884381338742393e-05, | |
| "loss": 1.6229, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.1008, | |
| "grad_norm": 0.710590660572052, | |
| "learning_rate": 8.864097363083164e-05, | |
| "loss": 1.589, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.1024, | |
| "grad_norm": 0.8609699606895447, | |
| "learning_rate": 8.843813387423935e-05, | |
| "loss": 1.5154, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.104, | |
| "grad_norm": 0.8874902129173279, | |
| "learning_rate": 8.823529411764706e-05, | |
| "loss": 1.5012, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.1056, | |
| "grad_norm": 1.0511506795883179, | |
| "learning_rate": 8.803245436105478e-05, | |
| "loss": 1.5922, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.1072, | |
| "grad_norm": 1.154720425605774, | |
| "learning_rate": 8.782961460446247e-05, | |
| "loss": 1.5793, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.1088, | |
| "grad_norm": 0.8440026640892029, | |
| "learning_rate": 8.762677484787018e-05, | |
| "loss": 1.5934, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.1104, | |
| "grad_norm": 0.7727090716362, | |
| "learning_rate": 8.74239350912779e-05, | |
| "loss": 1.5149, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.112, | |
| "grad_norm": 0.8490658402442932, | |
| "learning_rate": 8.72210953346856e-05, | |
| "loss": 1.6429, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.1136, | |
| "grad_norm": 1.0228997468948364, | |
| "learning_rate": 8.701825557809332e-05, | |
| "loss": 1.5675, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.1152, | |
| "grad_norm": 1.192125678062439, | |
| "learning_rate": 8.681541582150103e-05, | |
| "loss": 1.4715, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.1168, | |
| "grad_norm": 0.8283504843711853, | |
| "learning_rate": 8.661257606490872e-05, | |
| "loss": 1.5983, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.1184, | |
| "grad_norm": 1.1739537715911865, | |
| "learning_rate": 8.640973630831643e-05, | |
| "loss": 1.4758, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 0.6867349743843079, | |
| "learning_rate": 8.620689655172413e-05, | |
| "loss": 1.5578, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.1216, | |
| "grad_norm": 0.8097574710845947, | |
| "learning_rate": 8.600405679513184e-05, | |
| "loss": 1.6312, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.1232, | |
| "grad_norm": 0.8128074407577515, | |
| "learning_rate": 8.580121703853957e-05, | |
| "loss": 1.5523, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.1248, | |
| "grad_norm": 0.9583007097244263, | |
| "learning_rate": 8.559837728194726e-05, | |
| "loss": 1.5231, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.1264, | |
| "grad_norm": 0.782313883304596, | |
| "learning_rate": 8.539553752535497e-05, | |
| "loss": 1.4811, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.128, | |
| "grad_norm": 0.6528032422065735, | |
| "learning_rate": 8.519269776876268e-05, | |
| "loss": 1.4877, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.1296, | |
| "grad_norm": 0.8802277445793152, | |
| "learning_rate": 8.498985801217038e-05, | |
| "loss": 1.6482, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.1312, | |
| "grad_norm": 0.8377550840377808, | |
| "learning_rate": 8.478701825557809e-05, | |
| "loss": 1.4813, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.1328, | |
| "grad_norm": 0.7418074011802673, | |
| "learning_rate": 8.45841784989858e-05, | |
| "loss": 1.5555, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.1344, | |
| "grad_norm": 1.0339664220809937, | |
| "learning_rate": 8.438133874239351e-05, | |
| "loss": 1.4302, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.136, | |
| "grad_norm": 0.8376950025558472, | |
| "learning_rate": 8.417849898580123e-05, | |
| "loss": 1.5217, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.1376, | |
| "grad_norm": 0.7994508147239685, | |
| "learning_rate": 8.397565922920892e-05, | |
| "loss": 1.5267, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.1392, | |
| "grad_norm": 0.8002382516860962, | |
| "learning_rate": 8.377281947261663e-05, | |
| "loss": 1.5817, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.1408, | |
| "grad_norm": 0.7412007451057434, | |
| "learning_rate": 8.356997971602434e-05, | |
| "loss": 1.6293, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.1424, | |
| "grad_norm": 1.1818407773971558, | |
| "learning_rate": 8.336713995943205e-05, | |
| "loss": 1.4277, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.144, | |
| "grad_norm": 0.8380255699157715, | |
| "learning_rate": 8.316430020283977e-05, | |
| "loss": 1.4584, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.1456, | |
| "grad_norm": 0.7898982167243958, | |
| "learning_rate": 8.296146044624746e-05, | |
| "loss": 1.6421, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.1472, | |
| "grad_norm": 0.7636155486106873, | |
| "learning_rate": 8.275862068965517e-05, | |
| "loss": 1.5505, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.1488, | |
| "grad_norm": 0.6694974303245544, | |
| "learning_rate": 8.255578093306288e-05, | |
| "loss": 1.5199, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.1504, | |
| "grad_norm": 0.8093082904815674, | |
| "learning_rate": 8.23529411764706e-05, | |
| "loss": 1.543, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.152, | |
| "grad_norm": 0.7055695652961731, | |
| "learning_rate": 8.21501014198783e-05, | |
| "loss": 1.5538, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.1536, | |
| "grad_norm": 0.7818977236747742, | |
| "learning_rate": 8.194726166328602e-05, | |
| "loss": 1.5042, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.1552, | |
| "grad_norm": 0.9525614380836487, | |
| "learning_rate": 8.174442190669371e-05, | |
| "loss": 1.5782, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.1568, | |
| "grad_norm": 0.8219505548477173, | |
| "learning_rate": 8.154158215010142e-05, | |
| "loss": 1.6347, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.1584, | |
| "grad_norm": 0.8257455825805664, | |
| "learning_rate": 8.133874239350912e-05, | |
| "loss": 1.4861, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 0.7356287240982056, | |
| "learning_rate": 8.113590263691683e-05, | |
| "loss": 1.5001, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.1616, | |
| "grad_norm": 0.800564706325531, | |
| "learning_rate": 8.093306288032456e-05, | |
| "loss": 1.5157, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.1632, | |
| "grad_norm": 0.7705591917037964, | |
| "learning_rate": 8.073022312373225e-05, | |
| "loss": 1.4415, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.1648, | |
| "grad_norm": 0.854865312576294, | |
| "learning_rate": 8.052738336713996e-05, | |
| "loss": 1.6235, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.1664, | |
| "grad_norm": 0.6734772324562073, | |
| "learning_rate": 8.032454361054767e-05, | |
| "loss": 1.541, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.168, | |
| "grad_norm": 0.8943230509757996, | |
| "learning_rate": 8.012170385395537e-05, | |
| "loss": 1.5294, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.1696, | |
| "grad_norm": 1.0136455297470093, | |
| "learning_rate": 7.991886409736308e-05, | |
| "loss": 1.5255, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.1712, | |
| "grad_norm": 0.6639547944068909, | |
| "learning_rate": 7.97160243407708e-05, | |
| "loss": 1.5911, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.1728, | |
| "grad_norm": 0.8482313752174377, | |
| "learning_rate": 7.95131845841785e-05, | |
| "loss": 1.4628, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.1744, | |
| "grad_norm": 0.941949725151062, | |
| "learning_rate": 7.931034482758621e-05, | |
| "loss": 1.538, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.176, | |
| "grad_norm": 0.8564821481704712, | |
| "learning_rate": 7.910750507099391e-05, | |
| "loss": 1.5709, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.1776, | |
| "grad_norm": 0.7663846611976624, | |
| "learning_rate": 7.890466531440162e-05, | |
| "loss": 1.4841, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.1792, | |
| "grad_norm": 0.8091522455215454, | |
| "learning_rate": 7.870182555780933e-05, | |
| "loss": 1.5359, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.1808, | |
| "grad_norm": 0.8865818381309509, | |
| "learning_rate": 7.849898580121704e-05, | |
| "loss": 1.5385, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.1824, | |
| "grad_norm": 0.6862669587135315, | |
| "learning_rate": 7.829614604462476e-05, | |
| "loss": 1.3872, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.184, | |
| "grad_norm": 0.84327232837677, | |
| "learning_rate": 7.809330628803245e-05, | |
| "loss": 1.3533, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.1856, | |
| "grad_norm": 1.1154327392578125, | |
| "learning_rate": 7.789046653144016e-05, | |
| "loss": 1.4714, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.1872, | |
| "grad_norm": 0.6653951406478882, | |
| "learning_rate": 7.768762677484787e-05, | |
| "loss": 1.5765, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.1888, | |
| "grad_norm": 0.8485225439071655, | |
| "learning_rate": 7.748478701825558e-05, | |
| "loss": 1.5679, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.1904, | |
| "grad_norm": 0.8555701375007629, | |
| "learning_rate": 7.72819472616633e-05, | |
| "loss": 1.5695, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.192, | |
| "grad_norm": 0.6411374807357788, | |
| "learning_rate": 7.7079107505071e-05, | |
| "loss": 1.4445, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.1936, | |
| "grad_norm": 0.7546342611312866, | |
| "learning_rate": 7.68762677484787e-05, | |
| "loss": 1.513, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.1952, | |
| "grad_norm": 0.7529492974281311, | |
| "learning_rate": 7.667342799188641e-05, | |
| "loss": 1.5546, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.1968, | |
| "grad_norm": 0.8925682902336121, | |
| "learning_rate": 7.647058823529411e-05, | |
| "loss": 1.5897, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.1984, | |
| "grad_norm": 0.8105003833770752, | |
| "learning_rate": 7.626774847870182e-05, | |
| "loss": 1.4026, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 0.8478330373764038, | |
| "learning_rate": 7.606490872210955e-05, | |
| "loss": 1.5367, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.2016, | |
| "grad_norm": 0.7117589712142944, | |
| "learning_rate": 7.586206896551724e-05, | |
| "loss": 1.5096, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.2032, | |
| "grad_norm": 0.6756378412246704, | |
| "learning_rate": 7.565922920892495e-05, | |
| "loss": 1.4363, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.2048, | |
| "grad_norm": 0.8317671418190002, | |
| "learning_rate": 7.545638945233266e-05, | |
| "loss": 1.4908, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.2064, | |
| "grad_norm": 0.8347654342651367, | |
| "learning_rate": 7.525354969574036e-05, | |
| "loss": 1.4809, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.208, | |
| "grad_norm": 0.8407132029533386, | |
| "learning_rate": 7.505070993914807e-05, | |
| "loss": 1.3965, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.2096, | |
| "grad_norm": 0.9293540120124817, | |
| "learning_rate": 7.484787018255578e-05, | |
| "loss": 1.5601, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.2112, | |
| "grad_norm": 0.888976514339447, | |
| "learning_rate": 7.46450304259635e-05, | |
| "loss": 1.3249, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.2128, | |
| "grad_norm": 1.1678154468536377, | |
| "learning_rate": 7.44421906693712e-05, | |
| "loss": 1.5059, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.2144, | |
| "grad_norm": 0.828194260597229, | |
| "learning_rate": 7.42393509127789e-05, | |
| "loss": 1.4796, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.216, | |
| "grad_norm": 0.6538078784942627, | |
| "learning_rate": 7.403651115618661e-05, | |
| "loss": 1.5022, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.2176, | |
| "grad_norm": 0.6212038397789001, | |
| "learning_rate": 7.383367139959432e-05, | |
| "loss": 1.5556, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.2192, | |
| "grad_norm": 0.8193343877792358, | |
| "learning_rate": 7.363083164300203e-05, | |
| "loss": 1.4565, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.2208, | |
| "grad_norm": 0.6584758162498474, | |
| "learning_rate": 7.342799188640974e-05, | |
| "loss": 1.549, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.2224, | |
| "grad_norm": 0.7993935346603394, | |
| "learning_rate": 7.322515212981744e-05, | |
| "loss": 1.6376, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.224, | |
| "grad_norm": 0.6824424266815186, | |
| "learning_rate": 7.302231237322515e-05, | |
| "loss": 1.5205, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.2256, | |
| "grad_norm": 1.070913553237915, | |
| "learning_rate": 7.281947261663286e-05, | |
| "loss": 1.4221, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.2272, | |
| "grad_norm": 1.1080129146575928, | |
| "learning_rate": 7.261663286004057e-05, | |
| "loss": 1.5592, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.2288, | |
| "grad_norm": 0.8505323529243469, | |
| "learning_rate": 7.241379310344828e-05, | |
| "loss": 1.4511, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.2304, | |
| "grad_norm": 0.9794740676879883, | |
| "learning_rate": 7.2210953346856e-05, | |
| "loss": 1.5121, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.232, | |
| "grad_norm": 0.6628521084785461, | |
| "learning_rate": 7.200811359026369e-05, | |
| "loss": 1.5905, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.2336, | |
| "grad_norm": 0.8294353485107422, | |
| "learning_rate": 7.18052738336714e-05, | |
| "loss": 1.6115, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.2352, | |
| "grad_norm": 1.0685625076293945, | |
| "learning_rate": 7.16024340770791e-05, | |
| "loss": 1.4263, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.2368, | |
| "grad_norm": 0.8424131870269775, | |
| "learning_rate": 7.139959432048681e-05, | |
| "loss": 1.5122, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.2384, | |
| "grad_norm": 0.9048033356666565, | |
| "learning_rate": 7.119675456389454e-05, | |
| "loss": 1.4798, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.7316602468490601, | |
| "learning_rate": 7.099391480730223e-05, | |
| "loss": 1.4046, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.2416, | |
| "grad_norm": 0.650810956954956, | |
| "learning_rate": 7.079107505070994e-05, | |
| "loss": 1.6366, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.2432, | |
| "grad_norm": 0.6859937310218811, | |
| "learning_rate": 7.058823529411765e-05, | |
| "loss": 1.5001, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.2448, | |
| "grad_norm": 0.7956719398498535, | |
| "learning_rate": 7.038539553752535e-05, | |
| "loss": 1.4611, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.2464, | |
| "grad_norm": 0.8426589965820312, | |
| "learning_rate": 7.018255578093306e-05, | |
| "loss": 1.3004, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.248, | |
| "grad_norm": 0.8136724829673767, | |
| "learning_rate": 6.997971602434077e-05, | |
| "loss": 1.4873, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.2496, | |
| "grad_norm": 0.8124368786811829, | |
| "learning_rate": 6.977687626774848e-05, | |
| "loss": 1.5005, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.2512, | |
| "grad_norm": 0.8657099604606628, | |
| "learning_rate": 6.95740365111562e-05, | |
| "loss": 1.5456, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.2528, | |
| "grad_norm": 0.8967484831809998, | |
| "learning_rate": 6.937119675456389e-05, | |
| "loss": 1.4589, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.2544, | |
| "grad_norm": 0.7720628976821899, | |
| "learning_rate": 6.91683569979716e-05, | |
| "loss": 1.4424, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.256, | |
| "grad_norm": 0.8142846822738647, | |
| "learning_rate": 6.896551724137931e-05, | |
| "loss": 1.4176, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.2576, | |
| "grad_norm": 0.723961353302002, | |
| "learning_rate": 6.876267748478702e-05, | |
| "loss": 1.4433, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.2592, | |
| "grad_norm": 0.8454954028129578, | |
| "learning_rate": 6.855983772819473e-05, | |
| "loss": 1.579, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.2608, | |
| "grad_norm": 0.8000863194465637, | |
| "learning_rate": 6.835699797160243e-05, | |
| "loss": 1.3919, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.2624, | |
| "grad_norm": 0.8179385662078857, | |
| "learning_rate": 6.815415821501014e-05, | |
| "loss": 1.4501, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.264, | |
| "grad_norm": 0.9470431208610535, | |
| "learning_rate": 6.795131845841785e-05, | |
| "loss": 1.3945, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.2656, | |
| "grad_norm": 0.7068872451782227, | |
| "learning_rate": 6.774847870182556e-05, | |
| "loss": 1.4742, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.2672, | |
| "grad_norm": 0.7054203748703003, | |
| "learning_rate": 6.754563894523327e-05, | |
| "loss": 1.4438, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.2688, | |
| "grad_norm": 0.7306079864501953, | |
| "learning_rate": 6.734279918864099e-05, | |
| "loss": 1.4841, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.2704, | |
| "grad_norm": 0.9410521388053894, | |
| "learning_rate": 6.713995943204868e-05, | |
| "loss": 1.4806, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.272, | |
| "grad_norm": 0.8054413199424744, | |
| "learning_rate": 6.69371196754564e-05, | |
| "loss": 1.5897, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.2736, | |
| "grad_norm": 0.8597100973129272, | |
| "learning_rate": 6.673427991886409e-05, | |
| "loss": 1.5797, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.2752, | |
| "grad_norm": 1.0193672180175781, | |
| "learning_rate": 6.65314401622718e-05, | |
| "loss": 1.4948, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.2768, | |
| "grad_norm": 0.6713398694992065, | |
| "learning_rate": 6.632860040567953e-05, | |
| "loss": 1.4876, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.2784, | |
| "grad_norm": 0.7806057333946228, | |
| "learning_rate": 6.612576064908722e-05, | |
| "loss": 1.3604, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 0.7411827445030212, | |
| "learning_rate": 6.592292089249493e-05, | |
| "loss": 1.4223, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.2816, | |
| "grad_norm": 0.747585654258728, | |
| "learning_rate": 6.572008113590264e-05, | |
| "loss": 1.4759, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.2832, | |
| "grad_norm": 0.655139148235321, | |
| "learning_rate": 6.551724137931034e-05, | |
| "loss": 1.4646, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.2848, | |
| "grad_norm": 0.851161003112793, | |
| "learning_rate": 6.531440162271805e-05, | |
| "loss": 1.524, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.2864, | |
| "grad_norm": 0.8720593452453613, | |
| "learning_rate": 6.511156186612576e-05, | |
| "loss": 1.47, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.288, | |
| "grad_norm": 0.7257354855537415, | |
| "learning_rate": 6.490872210953347e-05, | |
| "loss": 1.4698, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.2896, | |
| "grad_norm": 0.8432925939559937, | |
| "learning_rate": 6.470588235294118e-05, | |
| "loss": 1.5128, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.2912, | |
| "grad_norm": 0.696212649345398, | |
| "learning_rate": 6.450304259634888e-05, | |
| "loss": 1.4325, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.2928, | |
| "grad_norm": 0.709725558757782, | |
| "learning_rate": 6.430020283975659e-05, | |
| "loss": 1.4612, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.2944, | |
| "grad_norm": 0.8701031804084778, | |
| "learning_rate": 6.40973630831643e-05, | |
| "loss": 1.5741, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.296, | |
| "grad_norm": 0.7509334087371826, | |
| "learning_rate": 6.389452332657201e-05, | |
| "loss": 1.3882, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.2976, | |
| "grad_norm": 0.8448138236999512, | |
| "learning_rate": 6.369168356997972e-05, | |
| "loss": 1.4554, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.2992, | |
| "grad_norm": 0.829753041267395, | |
| "learning_rate": 6.348884381338742e-05, | |
| "loss": 1.517, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.3008, | |
| "grad_norm": 0.8275175094604492, | |
| "learning_rate": 6.328600405679513e-05, | |
| "loss": 1.5145, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.3024, | |
| "grad_norm": 0.7375383377075195, | |
| "learning_rate": 6.308316430020284e-05, | |
| "loss": 1.4347, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.304, | |
| "grad_norm": 0.7672603130340576, | |
| "learning_rate": 6.288032454361054e-05, | |
| "loss": 1.3533, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.3056, | |
| "grad_norm": 0.9198083281517029, | |
| "learning_rate": 6.267748478701826e-05, | |
| "loss": 1.5153, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.3072, | |
| "grad_norm": 0.9903436303138733, | |
| "learning_rate": 6.247464503042598e-05, | |
| "loss": 1.4345, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.3088, | |
| "grad_norm": 1.1620358228683472, | |
| "learning_rate": 6.227180527383367e-05, | |
| "loss": 1.5443, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.3104, | |
| "grad_norm": 0.8703617453575134, | |
| "learning_rate": 6.206896551724138e-05, | |
| "loss": 1.3112, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.312, | |
| "grad_norm": 0.771286129951477, | |
| "learning_rate": 6.186612576064908e-05, | |
| "loss": 1.4153, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.3136, | |
| "grad_norm": 0.8943174481391907, | |
| "learning_rate": 6.166328600405679e-05, | |
| "loss": 1.5657, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.3152, | |
| "grad_norm": 0.7589307427406311, | |
| "learning_rate": 6.146044624746452e-05, | |
| "loss": 1.4126, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.3168, | |
| "grad_norm": 0.6583797335624695, | |
| "learning_rate": 6.125760649087221e-05, | |
| "loss": 1.4217, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.3184, | |
| "grad_norm": 0.8261314630508423, | |
| "learning_rate": 6.105476673427992e-05, | |
| "loss": 1.5038, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 0.768459677696228, | |
| "learning_rate": 6.0851926977687634e-05, | |
| "loss": 1.5839, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.3216, | |
| "grad_norm": 0.7238166332244873, | |
| "learning_rate": 6.064908722109534e-05, | |
| "loss": 1.5098, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.3232, | |
| "grad_norm": 0.847366988658905, | |
| "learning_rate": 6.044624746450305e-05, | |
| "loss": 1.3801, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.3248, | |
| "grad_norm": 0.8706203103065491, | |
| "learning_rate": 6.0243407707910746e-05, | |
| "loss": 1.4568, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.3264, | |
| "grad_norm": 0.6880550980567932, | |
| "learning_rate": 6.0040567951318463e-05, | |
| "loss": 1.4095, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.328, | |
| "grad_norm": 0.7416784763336182, | |
| "learning_rate": 5.9837728194726174e-05, | |
| "loss": 1.4224, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.3296, | |
| "grad_norm": 0.778136134147644, | |
| "learning_rate": 5.963488843813387e-05, | |
| "loss": 1.3978, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.3312, | |
| "grad_norm": 1.0044938325881958, | |
| "learning_rate": 5.943204868154159e-05, | |
| "loss": 1.2894, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.3328, | |
| "grad_norm": 0.9396066069602966, | |
| "learning_rate": 5.92292089249493e-05, | |
| "loss": 1.4427, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.3344, | |
| "grad_norm": 0.7602688670158386, | |
| "learning_rate": 5.9026369168357e-05, | |
| "loss": 1.5022, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.336, | |
| "grad_norm": 0.7073438763618469, | |
| "learning_rate": 5.882352941176471e-05, | |
| "loss": 1.4401, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.3376, | |
| "grad_norm": 0.8761054873466492, | |
| "learning_rate": 5.862068965517241e-05, | |
| "loss": 1.5012, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.3392, | |
| "grad_norm": 0.7597482800483704, | |
| "learning_rate": 5.841784989858012e-05, | |
| "loss": 1.5398, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.3408, | |
| "grad_norm": 0.7038214206695557, | |
| "learning_rate": 5.821501014198783e-05, | |
| "loss": 1.5085, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.3424, | |
| "grad_norm": 0.7399268746376038, | |
| "learning_rate": 5.801217038539554e-05, | |
| "loss": 1.5109, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.344, | |
| "grad_norm": 0.6264271140098572, | |
| "learning_rate": 5.780933062880325e-05, | |
| "loss": 1.4252, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.3456, | |
| "grad_norm": 0.6571562886238098, | |
| "learning_rate": 5.760649087221096e-05, | |
| "loss": 1.4404, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.3472, | |
| "grad_norm": 0.7995081543922424, | |
| "learning_rate": 5.740365111561866e-05, | |
| "loss": 1.3708, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.3488, | |
| "grad_norm": 0.7455433011054993, | |
| "learning_rate": 5.720081135902637e-05, | |
| "loss": 1.3199, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.3504, | |
| "grad_norm": 0.8443031907081604, | |
| "learning_rate": 5.699797160243408e-05, | |
| "loss": 1.3461, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.352, | |
| "grad_norm": 0.7456616759300232, | |
| "learning_rate": 5.679513184584179e-05, | |
| "loss": 1.3873, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.3536, | |
| "grad_norm": 0.8237265348434448, | |
| "learning_rate": 5.65922920892495e-05, | |
| "loss": 1.4215, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.3552, | |
| "grad_norm": 0.6858148574829102, | |
| "learning_rate": 5.63894523326572e-05, | |
| "loss": 1.453, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.3568, | |
| "grad_norm": 0.8578169941902161, | |
| "learning_rate": 5.618661257606491e-05, | |
| "loss": 1.4299, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.3584, | |
| "grad_norm": 0.7621243000030518, | |
| "learning_rate": 5.5983772819472624e-05, | |
| "loss": 1.5106, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 0.9632871150970459, | |
| "learning_rate": 5.578093306288033e-05, | |
| "loss": 1.399, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.3616, | |
| "grad_norm": 0.8085991144180298, | |
| "learning_rate": 5.557809330628804e-05, | |
| "loss": 1.3633, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.3632, | |
| "grad_norm": 0.6866801977157593, | |
| "learning_rate": 5.5375253549695736e-05, | |
| "loss": 1.3779, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.3648, | |
| "grad_norm": 0.8746480345726013, | |
| "learning_rate": 5.517241379310345e-05, | |
| "loss": 1.4511, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.3664, | |
| "grad_norm": 0.7714369893074036, | |
| "learning_rate": 5.4969574036511164e-05, | |
| "loss": 1.4124, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.368, | |
| "grad_norm": 0.9127957224845886, | |
| "learning_rate": 5.476673427991886e-05, | |
| "loss": 1.3662, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.3696, | |
| "grad_norm": 0.8726272583007812, | |
| "learning_rate": 5.456389452332657e-05, | |
| "loss": 1.4015, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.3712, | |
| "grad_norm": 0.762836217880249, | |
| "learning_rate": 5.436105476673429e-05, | |
| "loss": 1.4259, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.3728, | |
| "grad_norm": 0.8870751261711121, | |
| "learning_rate": 5.4158215010141987e-05, | |
| "loss": 1.3831, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.3744, | |
| "grad_norm": 0.8370562195777893, | |
| "learning_rate": 5.39553752535497e-05, | |
| "loss": 1.4319, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.376, | |
| "grad_norm": 1.0303740501403809, | |
| "learning_rate": 5.37525354969574e-05, | |
| "loss": 1.4275, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.3776, | |
| "grad_norm": 0.5940431952476501, | |
| "learning_rate": 5.354969574036511e-05, | |
| "loss": 1.3863, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.3792, | |
| "grad_norm": 0.7848506569862366, | |
| "learning_rate": 5.334685598377282e-05, | |
| "loss": 1.5844, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.3808, | |
| "grad_norm": 0.6541472673416138, | |
| "learning_rate": 5.314401622718053e-05, | |
| "loss": 1.3225, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.3824, | |
| "grad_norm": 0.8245867490768433, | |
| "learning_rate": 5.294117647058824e-05, | |
| "loss": 1.4911, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.384, | |
| "grad_norm": 0.6516074538230896, | |
| "learning_rate": 5.273833671399595e-05, | |
| "loss": 1.4401, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.3856, | |
| "grad_norm": 0.9805840849876404, | |
| "learning_rate": 5.253549695740365e-05, | |
| "loss": 1.4857, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.3872, | |
| "grad_norm": 0.7679899334907532, | |
| "learning_rate": 5.233265720081136e-05, | |
| "loss": 1.3695, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.3888, | |
| "grad_norm": 0.8954936861991882, | |
| "learning_rate": 5.212981744421907e-05, | |
| "loss": 1.3894, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.3904, | |
| "grad_norm": 0.7677492499351501, | |
| "learning_rate": 5.192697768762678e-05, | |
| "loss": 1.4339, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.392, | |
| "grad_norm": 0.7712604403495789, | |
| "learning_rate": 5.172413793103449e-05, | |
| "loss": 1.3207, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.3936, | |
| "grad_norm": 0.7585605382919312, | |
| "learning_rate": 5.152129817444219e-05, | |
| "loss": 1.3693, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.3952, | |
| "grad_norm": 1.1001477241516113, | |
| "learning_rate": 5.13184584178499e-05, | |
| "loss": 1.3812, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.3968, | |
| "grad_norm": 0.8667353987693787, | |
| "learning_rate": 5.1115618661257614e-05, | |
| "loss": 1.4472, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.3984, | |
| "grad_norm": 0.7773622870445251, | |
| "learning_rate": 5.091277890466532e-05, | |
| "loss": 1.3703, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 0.9364688992500305, | |
| "learning_rate": 5.070993914807303e-05, | |
| "loss": 1.3569, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.4016, | |
| "grad_norm": 0.6922894716262817, | |
| "learning_rate": 5.0507099391480726e-05, | |
| "loss": 1.3934, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.4032, | |
| "grad_norm": 0.6994293332099915, | |
| "learning_rate": 5.030425963488844e-05, | |
| "loss": 1.3941, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.4048, | |
| "grad_norm": 0.703568160533905, | |
| "learning_rate": 5.0101419878296154e-05, | |
| "loss": 1.3753, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.4064, | |
| "grad_norm": 0.7280287146568298, | |
| "learning_rate": 4.989858012170386e-05, | |
| "loss": 1.4547, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.408, | |
| "grad_norm": 0.769101083278656, | |
| "learning_rate": 4.969574036511156e-05, | |
| "loss": 1.4568, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.4096, | |
| "grad_norm": 0.8144255876541138, | |
| "learning_rate": 4.949290060851927e-05, | |
| "loss": 1.4932, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.4112, | |
| "grad_norm": 0.7549088001251221, | |
| "learning_rate": 4.9290060851926976e-05, | |
| "loss": 1.4626, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.4128, | |
| "grad_norm": 0.8042471408843994, | |
| "learning_rate": 4.908722109533469e-05, | |
| "loss": 1.3664, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.4144, | |
| "grad_norm": 0.8683868646621704, | |
| "learning_rate": 4.88843813387424e-05, | |
| "loss": 1.3343, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.416, | |
| "grad_norm": 0.8438317179679871, | |
| "learning_rate": 4.86815415821501e-05, | |
| "loss": 1.4551, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.4176, | |
| "grad_norm": 0.913829505443573, | |
| "learning_rate": 4.847870182555781e-05, | |
| "loss": 1.4605, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.4192, | |
| "grad_norm": 0.7295091152191162, | |
| "learning_rate": 4.827586206896552e-05, | |
| "loss": 1.4109, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.4208, | |
| "grad_norm": 0.8623864650726318, | |
| "learning_rate": 4.807302231237323e-05, | |
| "loss": 1.295, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.4224, | |
| "grad_norm": 0.6889348030090332, | |
| "learning_rate": 4.787018255578094e-05, | |
| "loss": 1.3165, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.424, | |
| "grad_norm": 0.9166956543922424, | |
| "learning_rate": 4.766734279918864e-05, | |
| "loss": 1.4605, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.4256, | |
| "grad_norm": 0.7856289744377136, | |
| "learning_rate": 4.746450304259635e-05, | |
| "loss": 1.4245, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.4272, | |
| "grad_norm": 0.8294436931610107, | |
| "learning_rate": 4.726166328600406e-05, | |
| "loss": 1.4116, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.4288, | |
| "grad_norm": 0.8435599207878113, | |
| "learning_rate": 4.705882352941177e-05, | |
| "loss": 1.4129, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.4304, | |
| "grad_norm": 0.6988587975502014, | |
| "learning_rate": 4.685598377281947e-05, | |
| "loss": 1.4092, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.432, | |
| "grad_norm": 0.7235763669013977, | |
| "learning_rate": 4.665314401622718e-05, | |
| "loss": 1.3857, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.4336, | |
| "grad_norm": 0.8404995799064636, | |
| "learning_rate": 4.645030425963489e-05, | |
| "loss": 1.3666, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.4352, | |
| "grad_norm": 0.8587724566459656, | |
| "learning_rate": 4.62474645030426e-05, | |
| "loss": 1.3673, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.4368, | |
| "grad_norm": 0.7824047803878784, | |
| "learning_rate": 4.604462474645031e-05, | |
| "loss": 1.4784, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.4384, | |
| "grad_norm": 0.9805149435997009, | |
| "learning_rate": 4.584178498985802e-05, | |
| "loss": 1.3985, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 0.7506240606307983, | |
| "learning_rate": 4.563894523326572e-05, | |
| "loss": 1.399, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.4416, | |
| "grad_norm": 1.1338064670562744, | |
| "learning_rate": 4.543610547667343e-05, | |
| "loss": 1.3585, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.4432, | |
| "grad_norm": 0.8251412510871887, | |
| "learning_rate": 4.523326572008114e-05, | |
| "loss": 1.5158, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.4448, | |
| "grad_norm": 0.8259724974632263, | |
| "learning_rate": 4.503042596348885e-05, | |
| "loss": 1.4496, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.4464, | |
| "grad_norm": 0.9131335020065308, | |
| "learning_rate": 4.482758620689655e-05, | |
| "loss": 1.2838, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.448, | |
| "grad_norm": 0.8390583992004395, | |
| "learning_rate": 4.462474645030426e-05, | |
| "loss": 1.3406, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.4496, | |
| "grad_norm": 0.9235684275627136, | |
| "learning_rate": 4.4421906693711966e-05, | |
| "loss": 1.359, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.4512, | |
| "grad_norm": 0.8435768485069275, | |
| "learning_rate": 4.421906693711968e-05, | |
| "loss": 1.3435, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.4528, | |
| "grad_norm": 0.6606183648109436, | |
| "learning_rate": 4.401622718052739e-05, | |
| "loss": 1.3997, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.4544, | |
| "grad_norm": 0.7467994093894958, | |
| "learning_rate": 4.381338742393509e-05, | |
| "loss": 1.4417, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.456, | |
| "grad_norm": 1.224295973777771, | |
| "learning_rate": 4.36105476673428e-05, | |
| "loss": 1.5001, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.4576, | |
| "grad_norm": 0.8111449480056763, | |
| "learning_rate": 4.340770791075051e-05, | |
| "loss": 1.3596, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.4592, | |
| "grad_norm": 0.8036226630210876, | |
| "learning_rate": 4.320486815415822e-05, | |
| "loss": 1.4546, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.4608, | |
| "grad_norm": 0.816628634929657, | |
| "learning_rate": 4.300202839756592e-05, | |
| "loss": 1.398, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.4624, | |
| "grad_norm": 0.939947247505188, | |
| "learning_rate": 4.279918864097363e-05, | |
| "loss": 1.4399, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.464, | |
| "grad_norm": 0.8091351985931396, | |
| "learning_rate": 4.259634888438134e-05, | |
| "loss": 1.4296, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.4656, | |
| "grad_norm": 0.9602811336517334, | |
| "learning_rate": 4.2393509127789046e-05, | |
| "loss": 1.4441, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.4672, | |
| "grad_norm": 0.7727035284042358, | |
| "learning_rate": 4.219066937119676e-05, | |
| "loss": 1.4771, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.4688, | |
| "grad_norm": 0.7776862978935242, | |
| "learning_rate": 4.198782961460446e-05, | |
| "loss": 1.4367, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.4704, | |
| "grad_norm": 0.9107199907302856, | |
| "learning_rate": 4.178498985801217e-05, | |
| "loss": 1.285, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.472, | |
| "grad_norm": 0.7945104837417603, | |
| "learning_rate": 4.158215010141988e-05, | |
| "loss": 1.435, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.4736, | |
| "grad_norm": 1.391619324684143, | |
| "learning_rate": 4.1379310344827587e-05, | |
| "loss": 1.524, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.4752, | |
| "grad_norm": 0.8388264179229736, | |
| "learning_rate": 4.11764705882353e-05, | |
| "loss": 1.4508, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.4768, | |
| "grad_norm": 1.0040127038955688, | |
| "learning_rate": 4.097363083164301e-05, | |
| "loss": 1.3741, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.4784, | |
| "grad_norm": 0.8224083185195923, | |
| "learning_rate": 4.077079107505071e-05, | |
| "loss": 1.5105, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.8134840726852417, | |
| "learning_rate": 4.0567951318458416e-05, | |
| "loss": 1.3409, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.4816, | |
| "grad_norm": 0.7531696557998657, | |
| "learning_rate": 4.036511156186613e-05, | |
| "loss": 1.4369, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.4832, | |
| "grad_norm": 0.8255640268325806, | |
| "learning_rate": 4.016227180527384e-05, | |
| "loss": 1.4782, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.4848, | |
| "grad_norm": 0.8592851161956787, | |
| "learning_rate": 3.995943204868154e-05, | |
| "loss": 1.4689, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.4864, | |
| "grad_norm": 0.839590847492218, | |
| "learning_rate": 3.975659229208925e-05, | |
| "loss": 1.4, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.488, | |
| "grad_norm": 0.8546445965766907, | |
| "learning_rate": 3.9553752535496956e-05, | |
| "loss": 1.4078, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.4896, | |
| "grad_norm": 0.8112866878509521, | |
| "learning_rate": 3.935091277890467e-05, | |
| "loss": 1.4288, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.4912, | |
| "grad_norm": 1.23469877243042, | |
| "learning_rate": 3.914807302231238e-05, | |
| "loss": 1.4295, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.4928, | |
| "grad_norm": 0.7116204500198364, | |
| "learning_rate": 3.894523326572008e-05, | |
| "loss": 1.3944, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.4944, | |
| "grad_norm": 0.8031020760536194, | |
| "learning_rate": 3.874239350912779e-05, | |
| "loss": 1.3761, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.496, | |
| "grad_norm": 0.7796714901924133, | |
| "learning_rate": 3.85395537525355e-05, | |
| "loss": 1.4383, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.4976, | |
| "grad_norm": 0.6973742842674255, | |
| "learning_rate": 3.833671399594321e-05, | |
| "loss": 1.3507, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.4992, | |
| "grad_norm": 0.9106045961380005, | |
| "learning_rate": 3.813387423935091e-05, | |
| "loss": 1.3073, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.5008, | |
| "grad_norm": 0.8738226294517517, | |
| "learning_rate": 3.793103448275862e-05, | |
| "loss": 1.435, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.5024, | |
| "grad_norm": 0.8685261607170105, | |
| "learning_rate": 3.772819472616633e-05, | |
| "loss": 1.3957, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.504, | |
| "grad_norm": 0.7489401698112488, | |
| "learning_rate": 3.7525354969574036e-05, | |
| "loss": 1.3978, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.5056, | |
| "grad_norm": 0.931246280670166, | |
| "learning_rate": 3.732251521298175e-05, | |
| "loss": 1.4185, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.5072, | |
| "grad_norm": 0.6278898119926453, | |
| "learning_rate": 3.711967545638945e-05, | |
| "loss": 1.3093, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.5088, | |
| "grad_norm": 0.7450694441795349, | |
| "learning_rate": 3.691683569979716e-05, | |
| "loss": 1.2888, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.5104, | |
| "grad_norm": 0.9010635614395142, | |
| "learning_rate": 3.671399594320487e-05, | |
| "loss": 1.4189, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.512, | |
| "grad_norm": 0.7903779745101929, | |
| "learning_rate": 3.6511156186612576e-05, | |
| "loss": 1.3984, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.5136, | |
| "grad_norm": 0.8521105647087097, | |
| "learning_rate": 3.630831643002029e-05, | |
| "loss": 1.372, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.5152, | |
| "grad_norm": 0.832203209400177, | |
| "learning_rate": 3.6105476673428e-05, | |
| "loss": 1.3513, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.5168, | |
| "grad_norm": 0.6941149830818176, | |
| "learning_rate": 3.59026369168357e-05, | |
| "loss": 1.4557, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.5184, | |
| "grad_norm": 0.7686852812767029, | |
| "learning_rate": 3.5699797160243406e-05, | |
| "loss": 1.3021, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 0.823742687702179, | |
| "learning_rate": 3.5496957403651116e-05, | |
| "loss": 1.4165, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.5216, | |
| "grad_norm": 0.7812134623527527, | |
| "learning_rate": 3.529411764705883e-05, | |
| "loss": 1.452, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.5232, | |
| "grad_norm": 0.9503807425498962, | |
| "learning_rate": 3.509127789046653e-05, | |
| "loss": 1.3479, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.5248, | |
| "grad_norm": 0.7697770595550537, | |
| "learning_rate": 3.488843813387424e-05, | |
| "loss": 1.3111, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.5264, | |
| "grad_norm": 1.1085420846939087, | |
| "learning_rate": 3.4685598377281946e-05, | |
| "loss": 1.2394, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.528, | |
| "grad_norm": 0.9299151301383972, | |
| "learning_rate": 3.4482758620689657e-05, | |
| "loss": 1.2936, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.5296, | |
| "grad_norm": 0.7949966192245483, | |
| "learning_rate": 3.427991886409737e-05, | |
| "loss": 1.4256, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.5312, | |
| "grad_norm": 0.8013599514961243, | |
| "learning_rate": 3.407707910750507e-05, | |
| "loss": 1.4491, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.5328, | |
| "grad_norm": 1.1205172538757324, | |
| "learning_rate": 3.387423935091278e-05, | |
| "loss": 1.2401, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.5344, | |
| "grad_norm": 0.7110530734062195, | |
| "learning_rate": 3.367139959432049e-05, | |
| "loss": 1.4416, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.536, | |
| "grad_norm": 0.8773007988929749, | |
| "learning_rate": 3.34685598377282e-05, | |
| "loss": 1.3766, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.5376, | |
| "grad_norm": 0.9566447734832764, | |
| "learning_rate": 3.32657200811359e-05, | |
| "loss": 1.3417, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.5392, | |
| "grad_norm": 0.8582170605659485, | |
| "learning_rate": 3.306288032454361e-05, | |
| "loss": 1.3405, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.5408, | |
| "grad_norm": 1.08076810836792, | |
| "learning_rate": 3.286004056795132e-05, | |
| "loss": 1.3765, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.5424, | |
| "grad_norm": 0.8801867961883545, | |
| "learning_rate": 3.2657200811359026e-05, | |
| "loss": 1.4233, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.544, | |
| "grad_norm": 0.9401204586029053, | |
| "learning_rate": 3.245436105476674e-05, | |
| "loss": 1.2817, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.5456, | |
| "grad_norm": 0.7613691091537476, | |
| "learning_rate": 3.225152129817444e-05, | |
| "loss": 1.3568, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.5472, | |
| "grad_norm": 0.7864488959312439, | |
| "learning_rate": 3.204868154158215e-05, | |
| "loss": 1.3435, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.5488, | |
| "grad_norm": 0.8781312108039856, | |
| "learning_rate": 3.184584178498986e-05, | |
| "loss": 1.4176, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.5504, | |
| "grad_norm": 0.9100823998451233, | |
| "learning_rate": 3.1643002028397566e-05, | |
| "loss": 1.3533, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.552, | |
| "grad_norm": 0.832099437713623, | |
| "learning_rate": 3.144016227180527e-05, | |
| "loss": 1.4135, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.5536, | |
| "grad_norm": 0.7773131728172302, | |
| "learning_rate": 3.123732251521299e-05, | |
| "loss": 1.3946, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.5552, | |
| "grad_norm": 0.9809824228286743, | |
| "learning_rate": 3.103448275862069e-05, | |
| "loss": 1.2571, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.5568, | |
| "grad_norm": 0.8351847529411316, | |
| "learning_rate": 3.0831643002028396e-05, | |
| "loss": 1.5343, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.5584, | |
| "grad_norm": 0.8704239130020142, | |
| "learning_rate": 3.0628803245436106e-05, | |
| "loss": 1.4628, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.747197151184082, | |
| "learning_rate": 3.0425963488843817e-05, | |
| "loss": 1.341, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.5616, | |
| "grad_norm": 0.8803126811981201, | |
| "learning_rate": 3.0223123732251524e-05, | |
| "loss": 1.3711, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.5632, | |
| "grad_norm": 0.9751145839691162, | |
| "learning_rate": 3.0020283975659232e-05, | |
| "loss": 1.2958, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.5648, | |
| "grad_norm": 0.7865713238716125, | |
| "learning_rate": 2.9817444219066936e-05, | |
| "loss": 1.3419, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.5664, | |
| "grad_norm": 0.8873676657676697, | |
| "learning_rate": 2.961460446247465e-05, | |
| "loss": 1.3827, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.568, | |
| "grad_norm": 0.8832893371582031, | |
| "learning_rate": 2.9411764705882354e-05, | |
| "loss": 1.3512, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.5696, | |
| "grad_norm": 0.6538761854171753, | |
| "learning_rate": 2.920892494929006e-05, | |
| "loss": 1.3941, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.5712, | |
| "grad_norm": 1.031559705734253, | |
| "learning_rate": 2.900608519269777e-05, | |
| "loss": 1.3678, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.5728, | |
| "grad_norm": 0.9533334970474243, | |
| "learning_rate": 2.880324543610548e-05, | |
| "loss": 1.3249, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.5744, | |
| "grad_norm": 1.0496972799301147, | |
| "learning_rate": 2.8600405679513187e-05, | |
| "loss": 1.343, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.576, | |
| "grad_norm": 1.0601290464401245, | |
| "learning_rate": 2.8397565922920894e-05, | |
| "loss": 1.3382, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.5776, | |
| "grad_norm": 0.8638586401939392, | |
| "learning_rate": 2.81947261663286e-05, | |
| "loss": 1.3433, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.5792, | |
| "grad_norm": 0.7777538299560547, | |
| "learning_rate": 2.7991886409736312e-05, | |
| "loss": 1.3665, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.5808, | |
| "grad_norm": 0.6789288520812988, | |
| "learning_rate": 2.778904665314402e-05, | |
| "loss": 1.3623, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.5824, | |
| "grad_norm": 1.0239415168762207, | |
| "learning_rate": 2.7586206896551727e-05, | |
| "loss": 1.4301, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.584, | |
| "grad_norm": 0.9509468078613281, | |
| "learning_rate": 2.738336713995943e-05, | |
| "loss": 1.4437, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.5856, | |
| "grad_norm": 0.9083086252212524, | |
| "learning_rate": 2.7180527383367145e-05, | |
| "loss": 1.3129, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.5872, | |
| "grad_norm": 0.7706483006477356, | |
| "learning_rate": 2.697768762677485e-05, | |
| "loss": 1.4301, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.5888, | |
| "grad_norm": 0.8252948522567749, | |
| "learning_rate": 2.6774847870182556e-05, | |
| "loss": 1.4042, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.5904, | |
| "grad_norm": 0.8689036965370178, | |
| "learning_rate": 2.6572008113590263e-05, | |
| "loss": 1.1999, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.592, | |
| "grad_norm": 0.7418636679649353, | |
| "learning_rate": 2.6369168356997974e-05, | |
| "loss": 1.4241, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.5936, | |
| "grad_norm": 0.832951545715332, | |
| "learning_rate": 2.616632860040568e-05, | |
| "loss": 1.3442, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.5952, | |
| "grad_norm": 0.996060311794281, | |
| "learning_rate": 2.596348884381339e-05, | |
| "loss": 1.3034, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.5968, | |
| "grad_norm": 0.705885112285614, | |
| "learning_rate": 2.5760649087221096e-05, | |
| "loss": 1.4364, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.5984, | |
| "grad_norm": 0.7900152206420898, | |
| "learning_rate": 2.5557809330628807e-05, | |
| "loss": 1.3484, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 0.8224239349365234, | |
| "learning_rate": 2.5354969574036514e-05, | |
| "loss": 1.4556, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.6016, | |
| "grad_norm": 0.8629553914070129, | |
| "learning_rate": 2.515212981744422e-05, | |
| "loss": 1.3952, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.6032, | |
| "grad_norm": 0.9885224103927612, | |
| "learning_rate": 2.494929006085193e-05, | |
| "loss": 1.3182, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.6048, | |
| "grad_norm": 0.8529279232025146, | |
| "learning_rate": 2.4746450304259636e-05, | |
| "loss": 1.3986, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.6064, | |
| "grad_norm": 0.7550370693206787, | |
| "learning_rate": 2.4543610547667344e-05, | |
| "loss": 1.3796, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.608, | |
| "grad_norm": 0.8512799739837646, | |
| "learning_rate": 2.434077079107505e-05, | |
| "loss": 1.3888, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.6096, | |
| "grad_norm": 0.706964910030365, | |
| "learning_rate": 2.413793103448276e-05, | |
| "loss": 1.3735, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.6112, | |
| "grad_norm": 0.7493021488189697, | |
| "learning_rate": 2.393509127789047e-05, | |
| "loss": 1.4348, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.6128, | |
| "grad_norm": 0.9412333369255066, | |
| "learning_rate": 2.3732251521298176e-05, | |
| "loss": 1.4326, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.6144, | |
| "grad_norm": 0.6976621747016907, | |
| "learning_rate": 2.3529411764705884e-05, | |
| "loss": 1.2963, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.616, | |
| "grad_norm": 0.8718717098236084, | |
| "learning_rate": 2.332657200811359e-05, | |
| "loss": 1.403, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.6176, | |
| "grad_norm": 0.7933664321899414, | |
| "learning_rate": 2.31237322515213e-05, | |
| "loss": 1.3301, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.6192, | |
| "grad_norm": 0.9271290302276611, | |
| "learning_rate": 2.292089249492901e-05, | |
| "loss": 1.252, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.6208, | |
| "grad_norm": 0.7640563249588013, | |
| "learning_rate": 2.2718052738336716e-05, | |
| "loss": 1.3344, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.6224, | |
| "grad_norm": 0.735242486000061, | |
| "learning_rate": 2.2515212981744424e-05, | |
| "loss": 1.2704, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.624, | |
| "grad_norm": 0.8058167695999146, | |
| "learning_rate": 2.231237322515213e-05, | |
| "loss": 1.3281, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.6256, | |
| "grad_norm": 0.9089419841766357, | |
| "learning_rate": 2.210953346855984e-05, | |
| "loss": 1.3272, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.6272, | |
| "grad_norm": 0.8395382761955261, | |
| "learning_rate": 2.1906693711967546e-05, | |
| "loss": 1.4408, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.6288, | |
| "grad_norm": 0.9245477914810181, | |
| "learning_rate": 2.1703853955375257e-05, | |
| "loss": 1.362, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.6304, | |
| "grad_norm": 0.9677096009254456, | |
| "learning_rate": 2.150101419878296e-05, | |
| "loss": 1.3991, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.632, | |
| "grad_norm": 0.9897236824035645, | |
| "learning_rate": 2.129817444219067e-05, | |
| "loss": 1.5116, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.6336, | |
| "grad_norm": 0.9850999712944031, | |
| "learning_rate": 2.109533468559838e-05, | |
| "loss": 1.3876, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.6352, | |
| "grad_norm": 0.7112181782722473, | |
| "learning_rate": 2.0892494929006086e-05, | |
| "loss": 1.4135, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.6368, | |
| "grad_norm": 0.6867976784706116, | |
| "learning_rate": 2.0689655172413793e-05, | |
| "loss": 1.3143, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.6384, | |
| "grad_norm": 1.0180323123931885, | |
| "learning_rate": 2.0486815415821504e-05, | |
| "loss": 1.3705, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.1014171838760376, | |
| "learning_rate": 2.0283975659229208e-05, | |
| "loss": 1.3141, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.6416, | |
| "grad_norm": 0.9209902286529541, | |
| "learning_rate": 2.008113590263692e-05, | |
| "loss": 1.2576, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.6432, | |
| "grad_norm": 0.8500963449478149, | |
| "learning_rate": 1.9878296146044626e-05, | |
| "loss": 1.4116, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.6448, | |
| "grad_norm": 0.9721909165382385, | |
| "learning_rate": 1.9675456389452333e-05, | |
| "loss": 1.3289, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.6464, | |
| "grad_norm": 1.317718267440796, | |
| "learning_rate": 1.947261663286004e-05, | |
| "loss": 1.3778, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.648, | |
| "grad_norm": 0.9101319313049316, | |
| "learning_rate": 1.926977687626775e-05, | |
| "loss": 1.426, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.6496, | |
| "grad_norm": 0.8741719126701355, | |
| "learning_rate": 1.9066937119675455e-05, | |
| "loss": 1.4806, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.6512, | |
| "grad_norm": 0.885848343372345, | |
| "learning_rate": 1.8864097363083166e-05, | |
| "loss": 1.3996, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.6528, | |
| "grad_norm": 0.7545533180236816, | |
| "learning_rate": 1.8661257606490873e-05, | |
| "loss": 1.4227, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.6544, | |
| "grad_norm": 0.9819992184638977, | |
| "learning_rate": 1.845841784989858e-05, | |
| "loss": 1.4271, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.656, | |
| "grad_norm": 0.8249715566635132, | |
| "learning_rate": 1.8255578093306288e-05, | |
| "loss": 1.3331, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.6576, | |
| "grad_norm": 0.8639522790908813, | |
| "learning_rate": 1.8052738336714e-05, | |
| "loss": 1.3474, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.6592, | |
| "grad_norm": 0.7227104306221008, | |
| "learning_rate": 1.7849898580121703e-05, | |
| "loss": 1.3276, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.6608, | |
| "grad_norm": 0.9825816750526428, | |
| "learning_rate": 1.7647058823529414e-05, | |
| "loss": 1.3308, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.6624, | |
| "grad_norm": 0.7120647430419922, | |
| "learning_rate": 1.744421906693712e-05, | |
| "loss": 1.3679, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.664, | |
| "grad_norm": 0.6898847222328186, | |
| "learning_rate": 1.7241379310344828e-05, | |
| "loss": 1.4823, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.6656, | |
| "grad_norm": 0.9716871380805969, | |
| "learning_rate": 1.7038539553752536e-05, | |
| "loss": 1.3503, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.6672, | |
| "grad_norm": 0.8497072458267212, | |
| "learning_rate": 1.6835699797160246e-05, | |
| "loss": 1.3294, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.6688, | |
| "grad_norm": 0.9010389447212219, | |
| "learning_rate": 1.663286004056795e-05, | |
| "loss": 1.3643, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.6704, | |
| "grad_norm": 0.7756496071815491, | |
| "learning_rate": 1.643002028397566e-05, | |
| "loss": 1.3912, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.672, | |
| "grad_norm": 0.7958990931510925, | |
| "learning_rate": 1.622718052738337e-05, | |
| "loss": 1.2344, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.6736, | |
| "grad_norm": 1.5075416564941406, | |
| "learning_rate": 1.6024340770791076e-05, | |
| "loss": 1.3564, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.6752, | |
| "grad_norm": 0.9622378945350647, | |
| "learning_rate": 1.5821501014198783e-05, | |
| "loss": 1.4722, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.6768, | |
| "grad_norm": 0.6588869690895081, | |
| "learning_rate": 1.5618661257606494e-05, | |
| "loss": 1.2721, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.6784, | |
| "grad_norm": 0.9116512537002563, | |
| "learning_rate": 1.5415821501014198e-05, | |
| "loss": 1.3334, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.9403899908065796, | |
| "learning_rate": 1.5212981744421909e-05, | |
| "loss": 1.4443, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.6816, | |
| "grad_norm": 0.9178708791732788, | |
| "learning_rate": 1.5010141987829616e-05, | |
| "loss": 1.3642, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.6832, | |
| "grad_norm": 0.9384081959724426, | |
| "learning_rate": 1.4807302231237325e-05, | |
| "loss": 1.3698, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.6848, | |
| "grad_norm": 0.7728821039199829, | |
| "learning_rate": 1.460446247464503e-05, | |
| "loss": 1.4612, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.6864, | |
| "grad_norm": 0.8059030175209045, | |
| "learning_rate": 1.440162271805274e-05, | |
| "loss": 1.41, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.688, | |
| "grad_norm": 0.7898443937301636, | |
| "learning_rate": 1.4198782961460447e-05, | |
| "loss": 1.3698, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.6896, | |
| "grad_norm": 0.8634099960327148, | |
| "learning_rate": 1.3995943204868156e-05, | |
| "loss": 1.3239, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.6912, | |
| "grad_norm": 1.0027539730072021, | |
| "learning_rate": 1.3793103448275863e-05, | |
| "loss": 1.3668, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.6928, | |
| "grad_norm": 0.8978163003921509, | |
| "learning_rate": 1.3590263691683572e-05, | |
| "loss": 1.3609, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.6944, | |
| "grad_norm": 0.9273055791854858, | |
| "learning_rate": 1.3387423935091278e-05, | |
| "loss": 1.3224, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.696, | |
| "grad_norm": 0.6951127052307129, | |
| "learning_rate": 1.3184584178498987e-05, | |
| "loss": 1.3958, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.6976, | |
| "grad_norm": 0.8663493990898132, | |
| "learning_rate": 1.2981744421906694e-05, | |
| "loss": 1.2714, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.6992, | |
| "grad_norm": 0.8381744027137756, | |
| "learning_rate": 1.2778904665314403e-05, | |
| "loss": 1.2578, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.7008, | |
| "grad_norm": 1.1157745122909546, | |
| "learning_rate": 1.257606490872211e-05, | |
| "loss": 1.284, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.7024, | |
| "grad_norm": 0.8827899098396301, | |
| "learning_rate": 1.2373225152129818e-05, | |
| "loss": 1.4336, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.704, | |
| "grad_norm": 0.7563768625259399, | |
| "learning_rate": 1.2170385395537525e-05, | |
| "loss": 1.3222, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.7056, | |
| "grad_norm": 0.9869970083236694, | |
| "learning_rate": 1.1967545638945234e-05, | |
| "loss": 1.3535, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.7072, | |
| "grad_norm": 0.8248376846313477, | |
| "learning_rate": 1.1764705882352942e-05, | |
| "loss": 1.3953, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.7088, | |
| "grad_norm": 0.8114254474639893, | |
| "learning_rate": 1.156186612576065e-05, | |
| "loss": 1.3259, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.7104, | |
| "grad_norm": 1.0196243524551392, | |
| "learning_rate": 1.1359026369168358e-05, | |
| "loss": 1.5373, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.712, | |
| "grad_norm": 0.8844570517539978, | |
| "learning_rate": 1.1156186612576066e-05, | |
| "loss": 1.4636, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.7136, | |
| "grad_norm": 0.9430680871009827, | |
| "learning_rate": 1.0953346855983773e-05, | |
| "loss": 1.3663, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.7152, | |
| "grad_norm": 0.9466381072998047, | |
| "learning_rate": 1.075050709939148e-05, | |
| "loss": 1.3144, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.7168, | |
| "grad_norm": 0.8997886180877686, | |
| "learning_rate": 1.054766734279919e-05, | |
| "loss": 1.4141, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.7184, | |
| "grad_norm": 0.9555508494377136, | |
| "learning_rate": 1.0344827586206897e-05, | |
| "loss": 1.4292, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.8645280003547668, | |
| "learning_rate": 1.0141987829614604e-05, | |
| "loss": 1.4092, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.7216, | |
| "grad_norm": 0.8445830941200256, | |
| "learning_rate": 9.939148073022313e-06, | |
| "loss": 1.3546, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.7232, | |
| "grad_norm": 0.7512080669403076, | |
| "learning_rate": 9.73630831643002e-06, | |
| "loss": 1.4346, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.7248, | |
| "grad_norm": 0.8196505308151245, | |
| "learning_rate": 9.533468559837728e-06, | |
| "loss": 1.354, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.7264, | |
| "grad_norm": 0.8481506109237671, | |
| "learning_rate": 9.330628803245437e-06, | |
| "loss": 1.3504, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.728, | |
| "grad_norm": 0.9044469594955444, | |
| "learning_rate": 9.127789046653144e-06, | |
| "loss": 1.3297, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.7296, | |
| "grad_norm": 0.8796373009681702, | |
| "learning_rate": 8.924949290060851e-06, | |
| "loss": 1.3237, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.7312, | |
| "grad_norm": 0.7506686449050903, | |
| "learning_rate": 8.72210953346856e-06, | |
| "loss": 1.3449, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.7328, | |
| "grad_norm": 0.9704193472862244, | |
| "learning_rate": 8.519269776876268e-06, | |
| "loss": 1.4063, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.7344, | |
| "grad_norm": 0.8338668346405029, | |
| "learning_rate": 8.316430020283975e-06, | |
| "loss": 1.3435, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.736, | |
| "grad_norm": 0.9024910926818848, | |
| "learning_rate": 8.113590263691684e-06, | |
| "loss": 1.3664, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.7376, | |
| "grad_norm": 0.9343035221099854, | |
| "learning_rate": 7.910750507099392e-06, | |
| "loss": 1.3153, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.7392, | |
| "grad_norm": 0.76399827003479, | |
| "learning_rate": 7.707910750507099e-06, | |
| "loss": 1.2957, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.7408, | |
| "grad_norm": 0.8459974527359009, | |
| "learning_rate": 7.505070993914808e-06, | |
| "loss": 1.3934, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.7424, | |
| "grad_norm": 0.7022688984870911, | |
| "learning_rate": 7.302231237322515e-06, | |
| "loss": 1.3167, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.744, | |
| "grad_norm": 0.6942615509033203, | |
| "learning_rate": 7.0993914807302235e-06, | |
| "loss": 1.4671, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.7456, | |
| "grad_norm": 0.8787364959716797, | |
| "learning_rate": 6.896551724137932e-06, | |
| "loss": 1.4055, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.7472, | |
| "grad_norm": 0.8846457600593567, | |
| "learning_rate": 6.693711967545639e-06, | |
| "loss": 1.3936, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.7488, | |
| "grad_norm": 0.8653333187103271, | |
| "learning_rate": 6.490872210953347e-06, | |
| "loss": 1.4805, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.7504, | |
| "grad_norm": 0.8352001905441284, | |
| "learning_rate": 6.288032454361055e-06, | |
| "loss": 1.3985, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.752, | |
| "grad_norm": 0.8867529630661011, | |
| "learning_rate": 6.085192697768763e-06, | |
| "loss": 1.4465, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.7536, | |
| "grad_norm": 0.6923931241035461, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 1.333, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.7552, | |
| "grad_norm": 0.9098489880561829, | |
| "learning_rate": 5.679513184584179e-06, | |
| "loss": 1.3858, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.7568, | |
| "grad_norm": 0.899730920791626, | |
| "learning_rate": 5.4766734279918865e-06, | |
| "loss": 1.3489, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.7584, | |
| "grad_norm": 0.924389660358429, | |
| "learning_rate": 5.273833671399595e-06, | |
| "loss": 1.3702, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.9712046980857849, | |
| "learning_rate": 5.070993914807302e-06, | |
| "loss": 1.356, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.7616, | |
| "grad_norm": 0.9079488515853882, | |
| "learning_rate": 4.86815415821501e-06, | |
| "loss": 1.4066, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.7632, | |
| "grad_norm": 0.7201410531997681, | |
| "learning_rate": 4.665314401622718e-06, | |
| "loss": 1.3654, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.7648, | |
| "grad_norm": 0.8959711194038391, | |
| "learning_rate": 4.462474645030426e-06, | |
| "loss": 1.3248, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.7664, | |
| "grad_norm": 1.0216981172561646, | |
| "learning_rate": 4.259634888438134e-06, | |
| "loss": 1.4433, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.768, | |
| "grad_norm": 0.8937973976135254, | |
| "learning_rate": 4.056795131845842e-06, | |
| "loss": 1.4326, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.7696, | |
| "grad_norm": 0.8216485381126404, | |
| "learning_rate": 3.8539553752535494e-06, | |
| "loss": 1.4409, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.7712, | |
| "grad_norm": 0.9484663605690002, | |
| "learning_rate": 3.6511156186612576e-06, | |
| "loss": 1.3283, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.7728, | |
| "grad_norm": 0.7381030917167664, | |
| "learning_rate": 3.448275862068966e-06, | |
| "loss": 1.4376, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.7744, | |
| "grad_norm": 0.798279881477356, | |
| "learning_rate": 3.2454361054766736e-06, | |
| "loss": 1.3134, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.776, | |
| "grad_norm": 0.9430242776870728, | |
| "learning_rate": 3.0425963488843814e-06, | |
| "loss": 1.3852, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.7776, | |
| "grad_norm": 0.7687798142433167, | |
| "learning_rate": 2.8397565922920896e-06, | |
| "loss": 1.283, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.7792, | |
| "grad_norm": 0.8667353391647339, | |
| "learning_rate": 2.6369168356997973e-06, | |
| "loss": 1.3975, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 0.7808, | |
| "grad_norm": 0.7604361772537231, | |
| "learning_rate": 2.434077079107505e-06, | |
| "loss": 1.4119, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 0.7824, | |
| "grad_norm": 0.8409517407417297, | |
| "learning_rate": 2.231237322515213e-06, | |
| "loss": 1.4006, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.784, | |
| "grad_norm": 0.7949575185775757, | |
| "learning_rate": 2.028397565922921e-06, | |
| "loss": 1.3408, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.7856, | |
| "grad_norm": 0.9525842070579529, | |
| "learning_rate": 1.8255578093306288e-06, | |
| "loss": 1.3606, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 0.7872, | |
| "grad_norm": 0.9600664973258972, | |
| "learning_rate": 1.6227180527383368e-06, | |
| "loss": 1.1382, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 0.7888, | |
| "grad_norm": 0.8406458497047424, | |
| "learning_rate": 1.4198782961460448e-06, | |
| "loss": 1.3669, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 0.7904, | |
| "grad_norm": 0.8937062621116638, | |
| "learning_rate": 1.2170385395537525e-06, | |
| "loss": 1.4203, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 0.792, | |
| "grad_norm": 0.8134217262268066, | |
| "learning_rate": 1.0141987829614605e-06, | |
| "loss": 1.2862, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.7936, | |
| "grad_norm": 0.9232685565948486, | |
| "learning_rate": 8.113590263691684e-07, | |
| "loss": 1.3666, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 0.7952, | |
| "grad_norm": 0.7853943705558777, | |
| "learning_rate": 6.085192697768763e-07, | |
| "loss": 1.4673, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 0.7968, | |
| "grad_norm": 0.8831775784492493, | |
| "learning_rate": 4.056795131845842e-07, | |
| "loss": 1.3468, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 0.7984, | |
| "grad_norm": 1.4679898023605347, | |
| "learning_rate": 2.028397565922921e-07, | |
| "loss": 1.3509, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.8490538597106934, | |
| "learning_rate": 0.0, | |
| "loss": 1.3766, | |
| "step": 5000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.157765325651548e+17, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |