| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 222, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.857142857142857e-05, | |
| "loss": 0.2358, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 5.714285714285714e-05, | |
| "loss": 0.21, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 8.571428571428571e-05, | |
| "loss": 0.1914, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00011428571428571428, | |
| "loss": 0.2148, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00014285714285714287, | |
| "loss": 0.1733, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00017142857142857143, | |
| "loss": 0.1304, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.0002, | |
| "loss": 0.1372, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00019998932457674902, | |
| "loss": 0.124, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00019995730058628927, | |
| "loss": 0.1387, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00019990393486601384, | |
| "loss": 0.125, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00019982923880995572, | |
| "loss": 0.123, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00019973322836635518, | |
| "loss": 0.1196, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00019961592403425468, | |
| "loss": 0.1223, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00019947735085912207, | |
| "loss": 0.1208, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0001993175384275033, | |
| "loss": 0.1211, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00019913652086070535, | |
| "loss": 0.1084, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00019893433680751103, | |
| "loss": 0.1169, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00019871102943592715, | |
| "loss": 0.1125, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.0001984666464239679, | |
| "loss": 0.1206, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00019820123994947504, | |
| "loss": 0.124, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.0001979148666789775, | |
| "loss": 0.116, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00019760758775559274, | |
| "loss": 0.0994, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00019727946878597195, | |
| "loss": 0.1128, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00019693057982629277, | |
| "loss": 0.1267, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00019656099536730136, | |
| "loss": 0.1096, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001961707943184083, | |
| "loss": 0.1226, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.0001957600599908406, | |
| "loss": 0.1084, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00019532888007985406, | |
| "loss": 0.1018, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00019487734664600957, | |
| "loss": 0.1099, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0001944055560955176, | |
| "loss": 0.116, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00019391360915965426, | |
| "loss": 0.1101, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.0001934016108732548, | |
| "loss": 0.1052, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00019286967055228744, | |
| "loss": 0.1208, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00019231790177051355, | |
| "loss": 0.0981, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00019174642233523877, | |
| "loss": 0.1147, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00019115535426216017, | |
| "loss": 0.1003, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00019054482374931467, | |
| "loss": 0.0989, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.000189914961150135, | |
| "loss": 0.1091, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 0.00018926590094561782, | |
| "loss": 0.104, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 0.00018859778171561114, | |
| "loss": 0.0955, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00018791074610922622, | |
| "loss": 0.1384, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 0.00018720494081438078, | |
| "loss": 0.0906, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 0.0001864805165264799, | |
| "loss": 0.0945, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00018573762791624133, | |
| "loss": 0.0991, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 0.0001849764335966719, | |
| "loss": 0.0876, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 0.00018419709608920242, | |
| "loss": 0.0918, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 0.0001833997817889878, | |
| "loss": 0.1074, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 0.00018258466092938044, | |
| "loss": 0.1099, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00018175190754558384, | |
| "loss": 0.1045, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 0.00018090169943749476, | |
| "loss": 0.1025, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 0.00018003421813174127, | |
| "loss": 0.0847, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00017914964884292544, | |
| "loss": 0.0842, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 0.00017824818043407826, | |
| "loss": 0.0991, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 0.00017733000537633605, | |
| "loss": 0.1055, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.00017639531970784593, | |
| "loss": 0.092, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 0.00017544432299191026, | |
| "loss": 0.0923, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.0001744772182743782, | |
| "loss": 0.1121, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 0.00017349421204029342, | |
| "loss": 0.0942, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 0.00017249551416980804, | |
| "loss": 0.1079, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00017148133789337145, | |
| "loss": 0.0967, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 0.0001704518997462037, | |
| "loss": 0.1094, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 0.0001694074195220634, | |
| "loss": 0.0942, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00016834812022631997, | |
| "loss": 0.0894, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 0.00016727422802834, | |
| "loss": 0.0972, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00016618597221319833, | |
| "loss": 0.0894, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 0.00016508358513272358, | |
| "loss": 0.0791, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 0.00016396730215588915, | |
| "loss": 0.1182, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00016283736161855993, | |
| "loss": 0.1042, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 0.00016169400477260566, | |
| "loss": 0.1021, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 0.00016053747573439145, | |
| "loss": 0.0808, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.0001593680214326571, | |
| "loss": 0.0967, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 0.0001581858915557953, | |
| "loss": 0.0884, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00015699133849854163, | |
| "loss": 0.0903, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 0.00015578461730808574, | |
| "loss": 0.0989, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 0.00015456598562961667, | |
| "loss": 0.0784, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.00015333570365131353, | |
| "loss": 0.0815, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 0.00015209403404879303, | |
| "loss": 0.0815, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 0.0001508412419290261, | |
| "loss": 0.0786, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.0001495775947737352, | |
| "loss": 0.0776, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 0.0001483033623822848, | |
| "loss": 0.0925, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 0.00014701881681407683, | |
| "loss": 0.0713, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 0.00014572423233046386, | |
| "loss": 0.0918, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 0.00014441988533619182, | |
| "loss": 0.0981, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.00014310605432038526, | |
| "loss": 0.0781, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 0.0001417830197970877, | |
| "loss": 0.0811, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 0.00014045106424536937, | |
| "loss": 0.0811, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.0001391104720490156, | |
| "loss": 0.1033, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 0.00013776152943580848, | |
| "loss": 0.0828, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 0.00013640452441641463, | |
| "loss": 0.0933, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 0.00013503974672289296, | |
| "loss": 0.0818, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 0.00013366748774683375, | |
| "loss": 0.0764, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 0.00013228804047714463, | |
| "loss": 0.0774, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 0.00013090169943749476, | |
| "loss": 0.1003, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 0.00012950876062343148, | |
| "loss": 0.0674, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 0.00012810952143918285, | |
| "loss": 0.0684, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 0.0001267042806341593, | |
| "loss": 0.0815, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 0.00012529333823916807, | |
| "loss": 0.0938, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.0001238769955023542, | |
| "loss": 0.0791, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 0.00012245555482488134, | |
| "loss": 0.0828, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 0.00012102931969636664, | |
| "loss": 0.0693, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.00011959859463008316, | |
| "loss": 0.0979, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 0.00011816368509794364, | |
| "loss": 0.0996, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 0.00011672489746527979, | |
| "loss": 0.0774, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 0.00011528253892543053, | |
| "loss": 0.0852, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 0.00011383691743415363, | |
| "loss": 0.0701, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.0001123883416438748, | |
| "loss": 0.0618, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 0.00011093712083778746, | |
| "loss": 0.0669, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 0.00010948356486381827, | |
| "loss": 0.103, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.00010802798406847212, | |
| "loss": 0.0793, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 0.0001065706892305703, | |
| "loss": 0.0837, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 0.00010511199149489673, | |
| "loss": 0.0659, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.0001036522023057659, | |
| "loss": 0.0945, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 0.00010219163334052682, | |
| "loss": 0.0901, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.0001007305964430173, | |
| "loss": 0.0867, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 9.92694035569827e-05, | |
| "loss": 0.0752, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 9.780836665947319e-05, | |
| "loss": 0.092, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.63477976942341e-05, | |
| "loss": 0.084, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.48880085051033e-05, | |
| "loss": 0.0732, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.342931076942972e-05, | |
| "loss": 0.1045, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.19720159315279e-05, | |
| "loss": 0.0955, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.051643513618175e-05, | |
| "loss": 0.1003, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 8.906287916221259e-05, | |
| "loss": 0.0901, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.76116583561252e-05, | |
| "loss": 0.0732, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.616308256584637e-05, | |
| "loss": 0.0852, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.47174610745695e-05, | |
| "loss": 0.0989, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.327510253472023e-05, | |
| "loss": 0.0701, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.183631490205637e-05, | |
| "loss": 0.0898, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.040140536991687e-05, | |
| "loss": 0.0867, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 7.89706803036334e-05, | |
| "loss": 0.0901, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 7.75444451751187e-05, | |
| "loss": 0.0697, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 7.61230044976458e-05, | |
| "loss": 0.0854, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 7.470666176083192e-05, | |
| "loss": 0.0779, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 7.329571936584072e-05, | |
| "loss": 0.0852, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 7.189047856081719e-05, | |
| "loss": 0.0747, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 7.049123937656855e-05, | |
| "loss": 0.0928, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 6.909830056250527e-05, | |
| "loss": 0.0786, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 6.77119595228554e-05, | |
| "loss": 0.0933, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 6.63325122531663e-05, | |
| "loss": 0.0662, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 6.496025327710707e-05, | |
| "loss": 0.0847, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 6.359547558358533e-05, | |
| "loss": 0.0808, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 6.223847056419154e-05, | |
| "loss": 0.0776, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 6.0889527950984416e-05, | |
| "loss": 0.1028, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 5.954893575463064e-05, | |
| "loss": 0.0725, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.821698020291234e-05, | |
| "loss": 0.072, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 5.689394567961477e-05, | |
| "loss": 0.0823, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 5.558011466380823e-05, | |
| "loss": 0.083, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 5.4275767669536146e-05, | |
| "loss": 0.0752, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 5.2981183185923156e-05, | |
| "loss": 0.0928, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 5.169663761771521e-05, | |
| "loss": 0.0654, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 5.042240522626482e-05, | |
| "loss": 0.0669, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 4.91587580709739e-05, | |
| "loss": 0.079, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.790596595120699e-05, | |
| "loss": 0.0623, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 4.666429634868651e-05, | |
| "loss": 0.0542, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.543401437038335e-05, | |
| "loss": 0.0703, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 4.4215382691914266e-05, | |
| "loss": 0.0664, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 4.300866150145837e-05, | |
| "loss": 0.0723, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.181410844420474e-05, | |
| "loss": 0.0591, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 4.0631978567342946e-05, | |
| "loss": 0.0537, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.946252426560855e-05, | |
| "loss": 0.0554, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 3.8305995227394365e-05, | |
| "loss": 0.0703, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 3.716263838144007e-05, | |
| "loss": 0.0525, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 3.60326978441109e-05, | |
| "loss": 0.0798, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 3.491641486727645e-05, | |
| "loss": 0.054, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.3814027786801673e-05, | |
| "loss": 0.0657, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 3.272577197166e-05, | |
| "loss": 0.0546, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 3.1651879773680074e-05, | |
| "loss": 0.0648, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 3.059258047793661e-05, | |
| "loss": 0.0575, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 2.954810025379633e-05, | |
| "loss": 0.0476, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 2.851866210662858e-05, | |
| "loss": 0.0433, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 2.7504485830191984e-05, | |
| "loss": 0.0784, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.650578795970661e-05, | |
| "loss": 0.0466, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 2.5522781725621813e-05, | |
| "loss": 0.0612, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 2.455567700808974e-05, | |
| "loss": 0.0498, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 2.360468029215409e-05, | |
| "loss": 0.0426, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.2669994623664005e-05, | |
| "loss": 0.049, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 2.1751819565921773e-05, | |
| "loss": 0.047, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 2.0850351157074598e-05, | |
| "loss": 0.0652, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.996578186825876e-05, | |
| "loss": 0.0532, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.9098300562505266e-05, | |
| "loss": 0.0613, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.8248092454416165e-05, | |
| "loss": 0.0614, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.7415339070619586e-05, | |
| "loss": 0.048, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.660021821101222e-05, | |
| "loss": 0.0685, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.5802903910797583e-05, | |
| "loss": 0.0409, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.5023566403328104e-05, | |
| "loss": 0.0531, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.4262372083758713e-05, | |
| "loss": 0.0634, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.3519483473520122e-05, | |
| "loss": 0.0593, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.2795059185619229e-05, | |
| "loss": 0.0687, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.2089253890773789e-05, | |
| "loss": 0.0696, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.1402218284388844e-05, | |
| "loss": 0.0641, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.0734099054382185e-05, | |
| "loss": 0.059, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.0085038849865025e-05, | |
| "loss": 0.0601, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.455176250685338e-06, | |
| "loss": 0.0424, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 8.844645737839873e-06, | |
| "loss": 0.0754, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.253577664761258e-06, | |
| "loss": 0.0491, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 7.682098229486479e-06, | |
| "loss": 0.0425, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 7.130329447712581e-06, | |
| "loss": 0.0505, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 6.598389126745208e-06, | |
| "loss": 0.0499, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 6.086390840345757e-06, | |
| "loss": 0.0457, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.594443904482438e-06, | |
| "loss": 0.0533, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 5.122653353990436e-06, | |
| "loss": 0.0522, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.671119920145983e-06, | |
| "loss": 0.0596, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.2399400091594154e-06, | |
| "loss": 0.0486, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 3.829205681591697e-06, | |
| "loss": 0.0425, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.4390046326986504e-06, | |
| "loss": 0.0455, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.0694201737072492e-06, | |
| "loss": 0.0505, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 2.720531214028055e-06, | |
| "loss": 0.0503, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 2.392412244407294e-06, | |
| "loss": 0.0674, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 2.0851333210225033e-06, | |
| "loss": 0.0629, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.7987600505249724e-06, | |
| "loss": 0.0491, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.5333535760320928e-06, | |
| "loss": 0.0413, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.2889705640728444e-06, | |
| "loss": 0.0515, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.0656631924889749e-06, | |
| "loss": 0.0476, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 8.634791392946428e-07, | |
| "loss": 0.0555, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 6.824615724966843e-07, | |
| "loss": 0.0582, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 5.226491408779288e-07, | |
| "loss": 0.0586, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.840759657453452e-07, | |
| "loss": 0.0559, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 2.667716336448356e-07, | |
| "loss": 0.0583, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.707611900442996e-07, | |
| "loss": 0.0441, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 9.606513398617845e-08, | |
| "loss": 0.0754, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 4.269941371073394e-08, | |
| "loss": 0.0579, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 1.0675423250994243e-08, | |
| "loss": 0.0377, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.0557, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 222, | |
| "total_flos": 5266094948352.0, | |
| "train_loss": 0.08446495812218469, | |
| "train_runtime": 2892.0758, | |
| "train_samples_per_second": 4.87, | |
| "train_steps_per_second": 0.077 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 222, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 800, | |
| "total_flos": 5266094948352.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |