| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 330, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 1.1157, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 1.082, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 6e-06, | |
| "loss": 1.0156, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.7246, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1e-05, | |
| "loss": 0.3215, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.2169, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.4e-05, | |
| "loss": 0.1764, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 0.3184, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.8e-05, | |
| "loss": 0.1483, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 2e-05, | |
| "loss": 0.1492, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.999951808959328e-05, | |
| "loss": 0.3608, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9998072404820648e-05, | |
| "loss": 0.1768, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9995663085020215e-05, | |
| "loss": 0.1752, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.9992290362407232e-05, | |
| "loss": 0.1899, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9987954562051724e-05, | |
| "loss": 0.1331, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.998265610184716e-05, | |
| "loss": 0.1675, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.997639549247016e-05, | |
| "loss": 0.1531, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9969173337331283e-05, | |
| "loss": 0.1414, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.9960990332516875e-05, | |
| "loss": 0.1227, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.995184726672197e-05, | |
| "loss": 0.1495, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9941745021174284e-05, | |
| "loss": 0.1515, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9930684569549265e-05, | |
| "loss": 0.1419, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.991866697787626e-05, | |
| "loss": 0.1406, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.990569340443577e-05, | |
| "loss": 0.1284, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.989176509964781e-05, | |
| "loss": 0.1436, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9876883405951378e-05, | |
| "loss": 0.1414, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9861049757675087e-05, | |
| "loss": 0.1323, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9844265680898917e-05, | |
| "loss": 0.1447, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.982653279330712e-05, | |
| "loss": 0.1517, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.9807852804032306e-05, | |
| "loss": 0.1463, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9788227513490724e-05, | |
| "loss": 0.1349, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9767658813208725e-05, | |
| "loss": 0.1534, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.974614868564045e-05, | |
| "loss": 0.1437, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9723699203976768e-05, | |
| "loss": 0.1225, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9700312531945444e-05, | |
| "loss": 0.1322, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.96759909236026e-05, | |
| "loss": 0.1331, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9650736723115476e-05, | |
| "loss": 0.1261, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.9624552364536472e-05, | |
| "loss": 0.1213, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.9597440371568576e-05, | |
| "loss": 0.1149, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.956940335732209e-05, | |
| "loss": 0.1254, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9540444024062807e-05, | |
| "loss": 0.121, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.9510565162951538e-05, | |
| "loss": 0.1371, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9479769653775107e-05, | |
| "loss": 0.1579, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.944806046466878e-05, | |
| "loss": 0.1309, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.941544065183021e-05, | |
| "loss": 0.1232, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9381913359224844e-05, | |
| "loss": 0.1309, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9347481818282927e-05, | |
| "loss": 0.1241, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9312149347588035e-05, | |
| "loss": 0.1333, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.9275919352557242e-05, | |
| "loss": 0.1309, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.9238795325112867e-05, | |
| "loss": 0.1167, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.920078084334595e-05, | |
| "loss": 0.1165, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.916187957117136e-05, | |
| "loss": 0.1158, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9122095257974676e-05, | |
| "loss": 0.1201, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9081431738250815e-05, | |
| "loss": 0.1453, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.9039892931234434e-05, | |
| "loss": 0.1194, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.8997482840522218e-05, | |
| "loss": 0.1323, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.895420555368697e-05, | |
| "loss": 0.1325, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.891006524188368e-05, | |
| "loss": 0.1209, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.8865066159447468e-05, | |
| "loss": 0.1304, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.881921264348355e-05, | |
| "loss": 0.1283, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8772509113449243e-05, | |
| "loss": 0.1204, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.8724960070727974e-05, | |
| "loss": 0.1387, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8676570098195443e-05, | |
| "loss": 0.126, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.862734385977792e-05, | |
| "loss": 0.1206, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.8577286100002723e-05, | |
| "loss": 0.1086, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8526401643540924e-05, | |
| "loss": 0.1212, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8474695394742345e-05, | |
| "loss": 0.1266, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8422172337162865e-05, | |
| "loss": 0.1277, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8368837533084092e-05, | |
| "loss": 0.1342, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8314696123025456e-05, | |
| "loss": 0.1227, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.825975332524873e-05, | |
| "loss": 0.1191, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8204014435255136e-05, | |
| "loss": 0.11, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.8147484825274895e-05, | |
| "loss": 0.1202, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.8090169943749477e-05, | |
| "loss": 0.1083, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.803207531480645e-05, | |
| "loss": 0.0953, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.797320653772707e-05, | |
| "loss": 0.1032, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.7913569286406606e-05, | |
| "loss": 0.1109, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.785316930880745e-05, | |
| "loss": 0.127, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.779201242640517e-05, | |
| "loss": 0.143, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.773010453362737e-05, | |
| "loss": 0.124, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.7667451597285617e-05, | |
| "loss": 0.1215, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.7604059656000313e-05, | |
| "loss": 0.1086, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.7539934819618696e-05, | |
| "loss": 0.1035, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.747508326862597e-05, | |
| "loss": 0.0967, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.7409511253549592e-05, | |
| "loss": 0.1171, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.7343225094356857e-05, | |
| "loss": 0.1216, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.727623117984575e-05, | |
| "loss": 0.1075, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.720853596702919e-05, | |
| "loss": 0.1187, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7140145980512684e-05, | |
| "loss": 0.1092, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7071067811865477e-05, | |
| "loss": 0.134, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.7001308118985237e-05, | |
| "loss": 0.1008, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.6930873625456362e-05, | |
| "loss": 0.1127, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.685977111990193e-05, | |
| "loss": 0.1024, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.678800745532942e-05, | |
| "loss": 0.1041, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.6715589548470187e-05, | |
| "loss": 0.1287, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.664252437911282e-05, | |
| "loss": 0.1157, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.6568818989430416e-05, | |
| "loss": 0.1132, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6494480483301836e-05, | |
| "loss": 0.1151, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.641951602562703e-05, | |
| "loss": 0.1016, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.6343932841636455e-05, | |
| "loss": 0.1094, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.6267738216194698e-05, | |
| "loss": 0.1066, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.6190939493098344e-05, | |
| "loss": 0.1069, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6113544074368166e-05, | |
| "loss": 0.1185, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.6035559419535714e-05, | |
| "loss": 0.1226, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.5956993044924334e-05, | |
| "loss": 0.1036, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.5877852522924733e-05, | |
| "loss": 0.1055, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.579814548126514e-05, | |
| "loss": 0.1236, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.5717879602276123e-05, | |
| "loss": 0.1119, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.5637062622150168e-05, | |
| "loss": 0.1162, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.5555702330196024e-05, | |
| "loss": 0.1254, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.547380656808797e-05, | |
| "loss": 0.1168, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.5391383229110005e-05, | |
| "loss": 0.102, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.5308440257395095e-05, | |
| "loss": 0.1043, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.5224985647159489e-05, | |
| "loss": 0.0925, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.5141027441932217e-05, | |
| "loss": 0.1136, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.5056573733779848e-05, | |
| "loss": 0.1081, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.4971632662526545e-05, | |
| "loss": 0.1047, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.4886212414969551e-05, | |
| "loss": 0.1124, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.4800321224090114e-05, | |
| "loss": 0.0949, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.4713967368259981e-05, | |
| "loss": 0.1113, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.4627159170443504e-05, | |
| "loss": 0.0907, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.4539904997395468e-05, | |
| "loss": 0.1129, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.4452213258854684e-05, | |
| "loss": 0.0899, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.436409240673342e-05, | |
| "loss": 0.0906, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.4275550934302822e-05, | |
| "loss": 0.0928, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.4186597375374283e-05, | |
| "loss": 0.118, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.4097240303476955e-05, | |
| "loss": 0.0967, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.4007488331031409e-05, | |
| "loss": 0.0887, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.391735010851956e-05, | |
| "loss": 0.0891, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.3826834323650899e-05, | |
| "loss": 0.0977, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.3735949700525164e-05, | |
| "loss": 0.1004, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.3644704998791501e-05, | |
| "loss": 0.0825, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.3553109012804162e-05, | |
| "loss": 0.0958, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.346117057077493e-05, | |
| "loss": 0.1005, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.3368898533922202e-05, | |
| "loss": 0.0959, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.3276301795616937e-05, | |
| "loss": 0.0959, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.3183389280525497e-05, | |
| "loss": 0.0836, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.3090169943749475e-05, | |
| "loss": 0.0912, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.2996652769962567e-05, | |
| "loss": 0.0966, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.2902846772544625e-05, | |
| "loss": 0.0916, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.2808760992712923e-05, | |
| "loss": 0.0861, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.2714404498650743e-05, | |
| "loss": 0.0884, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.2619786384633374e-05, | |
| "loss": 0.0791, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.252491577015158e-05, | |
| "loss": 0.0821, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.242980179903264e-05, | |
| "loss": 0.1128, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.2334453638559057e-05, | |
| "loss": 0.0871, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.2238880478584987e-05, | |
| "loss": 0.0884, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.2143091530650508e-05, | |
| "loss": 0.1058, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.2047096027093798e-05, | |
| "loss": 0.092, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.1950903220161286e-05, | |
| "loss": 0.1069, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.185452238111591e-05, | |
| "loss": 0.1006, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.1757962799343548e-05, | |
| "loss": 0.087, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.1661233781457655e-05, | |
| "loss": 0.0894, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.156434465040231e-05, | |
| "loss": 0.1061, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.1467304744553618e-05, | |
| "loss": 0.1179, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.1370123416819683e-05, | |
| "loss": 0.0969, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.1272810033739134e-05, | |
| "loss": 0.1108, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.1175373974578378e-05, | |
| "loss": 0.0968, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.1077824630427593e-05, | |
| "loss": 0.1031, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.098017140329561e-05, | |
| "loss": 0.094, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.0882423705203698e-05, | |
| "loss": 0.0778, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.0784590957278452e-05, | |
| "loss": 0.0812, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.0686682588843737e-05, | |
| "loss": 0.1117, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.058870803651189e-05, | |
| "loss": 0.1008, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.0490676743274181e-05, | |
| "loss": 0.092, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.0392598157590687e-05, | |
| "loss": 0.0959, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.0294481732479635e-05, | |
| "loss": 0.1012, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.0196336924606282e-05, | |
| "loss": 0.0991, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.0098173193371498e-05, | |
| "loss": 0.1132, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1e-05, | |
| "loss": 0.0865, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 9.901826806628505e-06, | |
| "loss": 0.0828, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.80366307539372e-06, | |
| "loss": 0.087, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 9.705518267520369e-06, | |
| "loss": 0.0967, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.607401842409318e-06, | |
| "loss": 0.0854, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.50932325672582e-06, | |
| "loss": 0.0903, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.41129196348811e-06, | |
| "loss": 0.0935, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.313317411156265e-06, | |
| "loss": 0.0974, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.215409042721553e-06, | |
| "loss": 0.0976, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.117576294796307e-06, | |
| "loss": 0.0968, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.019828596704394e-06, | |
| "loss": 0.0985, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 8.922175369572407e-06, | |
| "loss": 0.0834, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 8.824626025421625e-06, | |
| "loss": 0.1207, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.72718996626087e-06, | |
| "loss": 0.088, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.629876583180322e-06, | |
| "loss": 0.0934, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.532695255446384e-06, | |
| "loss": 0.0891, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.43565534959769e-06, | |
| "loss": 0.1017, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.338766218542348e-06, | |
| "loss": 0.0998, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.242037200656455e-06, | |
| "loss": 0.0726, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.145477618884092e-06, | |
| "loss": 0.091, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.04909677983872e-06, | |
| "loss": 0.1099, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 7.952903972906205e-06, | |
| "loss": 0.1021, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 7.856908469349495e-06, | |
| "loss": 0.0925, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 7.761119521415017e-06, | |
| "loss": 0.1089, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 7.66554636144095e-06, | |
| "loss": 0.1067, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 7.570198200967363e-06, | |
| "loss": 0.0988, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 7.4750842298484205e-06, | |
| "loss": 0.0957, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 7.380213615366627e-06, | |
| "loss": 0.1194, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 7.285595501349259e-06, | |
| "loss": 0.1039, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 7.191239007287082e-06, | |
| "loss": 0.0911, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 7.097153227455379e-06, | |
| "loss": 0.1081, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 7.003347230037434e-06, | |
| "loss": 0.1016, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 6.909830056250527e-06, | |
| "loss": 0.0986, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 6.816610719474503e-06, | |
| "loss": 0.1006, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 6.723698204383067e-06, | |
| "loss": 0.0978, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 6.631101466077801e-06, | |
| "loss": 0.1046, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 6.538829429225068e-06, | |
| "loss": 0.0872, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 6.446890987195842e-06, | |
| "loss": 0.0891, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 6.355295001208504e-06, | |
| "loss": 0.0919, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 6.2640502994748375e-06, | |
| "loss": 0.1004, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 6.173165676349103e-06, | |
| "loss": 0.0837, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 6.082649891480441e-06, | |
| "loss": 0.0828, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 5.9925116689685925e-06, | |
| "loss": 0.0937, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 5.902759696523046e-06, | |
| "loss": 0.0836, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.813402624625722e-06, | |
| "loss": 0.1102, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.724449065697182e-06, | |
| "loss": 0.097, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 5.635907593266578e-06, | |
| "loss": 0.0809, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 5.54778674114532e-06, | |
| "loss": 0.0947, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 5.460095002604533e-06, | |
| "loss": 0.0814, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 5.3728408295565e-06, | |
| "loss": 0.0982, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 5.286032631740023e-06, | |
| "loss": 0.0726, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 5.199678775909889e-06, | |
| "loss": 0.0674, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 5.1137875850304545e-06, | |
| "loss": 0.079, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 5.0283673374734546e-06, | |
| "loss": 0.0707, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 4.943426266220156e-06, | |
| "loss": 0.0679, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.858972558067784e-06, | |
| "loss": 0.0661, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.775014352840512e-06, | |
| "loss": 0.0699, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 4.691559742604906e-06, | |
| "loss": 0.0608, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 4.608616770889998e-06, | |
| "loss": 0.0905, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.526193431912038e-06, | |
| "loss": 0.0687, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 4.444297669803981e-06, | |
| "loss": 0.0648, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 4.362937377849832e-06, | |
| "loss": 0.0662, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 4.282120397723879e-06, | |
| "loss": 0.0502, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.2018545187348645e-06, | |
| "loss": 0.0625, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 4.12214747707527e-06, | |
| "loss": 0.0853, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 4.043006955075667e-06, | |
| "loss": 0.0774, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.964440580464286e-06, | |
| "loss": 0.0585, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.8864559256318375e-06, | |
| "loss": 0.0697, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 3.8090605069016596e-06, | |
| "loss": 0.0663, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 3.7322617838053066e-06, | |
| "loss": 0.0665, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 3.6560671583635467e-06, | |
| "loss": 0.0691, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 3.58048397437297e-06, | |
| "loss": 0.0827, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 3.505519516698165e-06, | |
| "loss": 0.0594, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 3.4311810105695875e-06, | |
| "loss": 0.0781, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.3574756208871862e-06, | |
| "loss": 0.0573, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 3.284410451529816e-06, | |
| "loss": 0.0809, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 3.2119925446705824e-06, | |
| "loss": 0.0698, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 3.140228880098074e-06, | |
| "loss": 0.0694, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 3.069126374543643e-06, | |
| "loss": 0.0482, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 2.998691881014765e-06, | |
| "loss": 0.0688, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 2.9289321881345257e-06, | |
| "loss": 0.0829, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 2.859854019487318e-06, | |
| "loss": 0.0689, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 2.791464032970812e-06, | |
| "loss": 0.0615, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 2.723768820154251e-06, | |
| "loss": 0.0619, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.656774905643147e-06, | |
| "loss": 0.0605, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 2.5904887464504115e-06, | |
| "loss": 0.0607, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 2.5249167313740307e-06, | |
| "loss": 0.0531, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 2.4600651803813057e-06, | |
| "loss": 0.0531, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 2.395940343999691e-06, | |
| "loss": 0.0504, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 2.332548402714385e-06, | |
| "loss": 0.0983, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.26989546637263e-06, | |
| "loss": 0.0811, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 2.207987573594833e-06, | |
| "loss": 0.0576, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 2.146830691192553e-06, | |
| "loss": 0.0946, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 2.086430713593397e-06, | |
| "loss": 0.0441, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 2.02679346227293e-06, | |
| "loss": 0.0585, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.967924685193552e-06, | |
| "loss": 0.0527, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 0.0662, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.8525151747251058e-06, | |
| "loss": 0.0449, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.7959855647448642e-06, | |
| "loss": 0.0384, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.7402466747512704e-06, | |
| "loss": 0.0834, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.6853038769745466e-06, | |
| "loss": 0.0517, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.6311624669159064e-06, | |
| "loss": 0.0505, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.577827662837136e-06, | |
| "loss": 0.066, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.5253046052576559e-06, | |
| "loss": 0.0609, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.4735983564590784e-06, | |
| "loss": 0.0614, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.4227138999972801e-06, | |
| "loss": 0.0618, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.3726561402220818e-06, | |
| "loss": 0.0662, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.3234299018045615e-06, | |
| "loss": 0.0628, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.2750399292720284e-06, | |
| "loss": 0.072, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.2274908865507595e-06, | |
| "loss": 0.0593, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.1807873565164507e-06, | |
| "loss": 0.0758, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.1349338405525368e-06, | |
| "loss": 0.0627, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.0899347581163222e-06, | |
| "loss": 0.059, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.045794446313031e-06, | |
| "loss": 0.0748, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.0025171594777872e-06, | |
| "loss": 0.0499, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 9.601070687655667e-07, | |
| "loss": 0.0541, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.185682617491865e-07, | |
| "loss": 0.0505, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 8.779047420253239e-07, | |
| "loss": 0.0478, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.381204288286415e-07, | |
| "loss": 0.0704, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 7.992191566540519e-07, | |
| "loss": 0.0565, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 7.612046748871327e-07, | |
| "loss": 0.0654, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 7.240806474427598e-07, | |
| "loss": 0.0633, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 6.878506524119644e-07, | |
| "loss": 0.0647, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 6.525181817170756e-07, | |
| "loss": 0.077, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 6.180866407751595e-07, | |
| "loss": 0.0616, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 5.845593481697931e-07, | |
| "loss": 0.0638, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.519395353312195e-07, | |
| "loss": 0.0668, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 5.20230346224897e-07, | |
| "loss": 0.059, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 4.894348370484648e-07, | |
| "loss": 0.0596, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.5955597593719593e-07, | |
| "loss": 0.0668, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.305966426779118e-07, | |
| "loss": 0.0503, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.025596284314259e-07, | |
| "loss": 0.054, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.7544763546352834e-07, | |
| "loss": 0.0558, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.492632768845261e-07, | |
| "loss": 0.0583, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.2400907639740243e-07, | |
| "loss": 0.0621, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 2.996874680545603e-07, | |
| "loss": 0.0617, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 2.7630079602323447e-07, | |
| "loss": 0.0547, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 2.5385131435955e-07, | |
| "loss": 0.0583, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 2.3234118679127615e-07, | |
| "loss": 0.0546, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 2.117724865092774e-07, | |
| "loss": 0.0476, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.921471959676957e-07, | |
| "loss": 0.0627, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.734672066928822e-07, | |
| "loss": 0.0578, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.5573431910108404e-07, | |
| "loss": 0.0457, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.3895024232491338e-07, | |
| "loss": 0.0713, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.231165940486234e-07, | |
| "loss": 0.0668, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.0823490035218986e-07, | |
| "loss": 0.0524, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 9.43065955642275e-08, | |
| "loss": 0.0484, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 8.133302212373961e-08, | |
| "loss": 0.0517, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 6.931543045073708e-08, | |
| "loss": 0.0462, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 5.8254978825718065e-08, | |
| "loss": 0.0568, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 4.815273327803183e-08, | |
| "loss": 0.0475, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 3.900966748312862e-08, | |
| "loss": 0.0692, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 3.082666266872036e-08, | |
| "loss": 0.0603, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 2.3604507529843e-08, | |
| "loss": 0.0667, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.7343898152841765e-08, | |
| "loss": 0.0449, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.2045437948275952e-08, | |
| "loss": 0.0583, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 7.70963759277099e-09, | |
| "loss": 0.0738, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 4.336914979787832e-09, | |
| "loss": 0.0706, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 1.9275951793518154e-09, | |
| "loss": 0.0468, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 4.819104067199653e-10, | |
| "loss": 0.0531, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 0.0, | |
| "loss": 0.0516, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 330, | |
| "total_flos": 7827978977280.0, | |
| "train_loss": 0.10793415416370739, | |
| "train_runtime": 3315.1857, | |
| "train_samples_per_second": 6.334, | |
| "train_steps_per_second": 0.1 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 330, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 111, | |
| "total_flos": 7827978977280.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |