Training in progress, step 2700, checkpoint
Browse files
last-checkpoint/model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2066752
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1fb3a406c3ae2178178694f09a8cfb135844a7fa1e5db83421b6125aaf57c021
|
| 3 |
size 2066752
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 4121235
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a7e9af3023a55c609b9c260ad3ed3c5b75db9d913adcfd13ef5e442cc958c4d6
|
| 3 |
size 4121235
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14391
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:be40c1172aa39c40ee109bbd94cd7cacfb86d0ec63ba2aa8711edd3ba6ea6be7
|
| 3 |
size 14391
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1401
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2d378ac30a5ddd89f2b36d4bb043bc41719f19f23d599820219ac57dbfbf22cc
|
| 3 |
size 1401
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -2,9 +2,9 @@
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
-
"epoch": 0.
|
| 6 |
"eval_steps": 100,
|
| 7 |
-
"global_step":
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
@@ -18416,6 +18416,714 @@
|
|
| 18416 |
"eval_samples_per_second": 1.551,
|
| 18417 |
"eval_steps_per_second": 0.194,
|
| 18418 |
"step": 2600
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18419 |
}
|
| 18420 |
],
|
| 18421 |
"logging_steps": 1,
|
|
@@ -18435,7 +19143,7 @@
|
|
| 18435 |
"attributes": {}
|
| 18436 |
}
|
| 18437 |
},
|
| 18438 |
-
"total_flos":
|
| 18439 |
"train_batch_size": 1,
|
| 18440 |
"trial_name": null,
|
| 18441 |
"trial_params": null
|
|
|
|
| 2 |
"best_global_step": null,
|
| 3 |
"best_metric": null,
|
| 4 |
"best_model_checkpoint": null,
|
| 5 |
+
"epoch": 0.11661052086032651,
|
| 6 |
"eval_steps": 100,
|
| 7 |
+
"global_step": 2700,
|
| 8 |
"is_hyper_param_search": false,
|
| 9 |
"is_local_process_zero": true,
|
| 10 |
"is_world_process_zero": true,
|
|
|
|
| 18416 |
"eval_samples_per_second": 1.551,
|
| 18417 |
"eval_steps_per_second": 0.194,
|
| 18418 |
"step": 2600
|
| 18419 |
+
},
|
| 18420 |
+
{
|
| 18421 |
+
"epoch": 0.11233480176211454,
|
| 18422 |
+
"grad_norm": 0.5625,
|
| 18423 |
+
"learning_rate": 0.0009894331102272887,
|
| 18424 |
+
"loss": 8.2588,
|
| 18425 |
+
"step": 2601
|
| 18426 |
+
},
|
| 18427 |
+
{
|
| 18428 |
+
"epoch": 0.11237799084391466,
|
| 18429 |
+
"grad_norm": 0.70703125,
|
| 18430 |
+
"learning_rate": 0.0009894185011967993,
|
| 18431 |
+
"loss": 7.9767,
|
| 18432 |
+
"step": 2602
|
| 18433 |
+
},
|
| 18434 |
+
{
|
| 18435 |
+
"epoch": 0.11242117992571478,
|
| 18436 |
+
"grad_norm": 0.515625,
|
| 18437 |
+
"learning_rate": 0.0009894038821825829,
|
| 18438 |
+
"loss": 8.2055,
|
| 18439 |
+
"step": 2603
|
| 18440 |
+
},
|
| 18441 |
+
{
|
| 18442 |
+
"epoch": 0.1124643690075149,
|
| 18443 |
+
"grad_norm": 0.6796875,
|
| 18444 |
+
"learning_rate": 0.0009893892531849372,
|
| 18445 |
+
"loss": 8.27,
|
| 18446 |
+
"step": 2604
|
| 18447 |
+
},
|
| 18448 |
+
{
|
| 18449 |
+
"epoch": 0.11250755808931502,
|
| 18450 |
+
"grad_norm": 0.5,
|
| 18451 |
+
"learning_rate": 0.0009893746142041612,
|
| 18452 |
+
"loss": 8.2553,
|
| 18453 |
+
"step": 2605
|
| 18454 |
+
},
|
| 18455 |
+
{
|
| 18456 |
+
"epoch": 0.11255074717111514,
|
| 18457 |
+
"grad_norm": 0.427734375,
|
| 18458 |
+
"learning_rate": 0.0009893599652405533,
|
| 18459 |
+
"loss": 8.4115,
|
| 18460 |
+
"step": 2606
|
| 18461 |
+
},
|
| 18462 |
+
{
|
| 18463 |
+
"epoch": 0.11259393625291526,
|
| 18464 |
+
"grad_norm": 0.58984375,
|
| 18465 |
+
"learning_rate": 0.000989345306294412,
|
| 18466 |
+
"loss": 8.1283,
|
| 18467 |
+
"step": 2607
|
| 18468 |
+
},
|
| 18469 |
+
{
|
| 18470 |
+
"epoch": 0.11263712533471539,
|
| 18471 |
+
"grad_norm": 0.578125,
|
| 18472 |
+
"learning_rate": 0.0009893306373660368,
|
| 18473 |
+
"loss": 8.1705,
|
| 18474 |
+
"step": 2608
|
| 18475 |
+
},
|
| 18476 |
+
{
|
| 18477 |
+
"epoch": 0.1126803144165155,
|
| 18478 |
+
"grad_norm": 0.458984375,
|
| 18479 |
+
"learning_rate": 0.0009893159584557266,
|
| 18480 |
+
"loss": 8.4239,
|
| 18481 |
+
"step": 2609
|
| 18482 |
+
},
|
| 18483 |
+
{
|
| 18484 |
+
"epoch": 0.11272350349831563,
|
| 18485 |
+
"grad_norm": 0.421875,
|
| 18486 |
+
"learning_rate": 0.0009893012695637812,
|
| 18487 |
+
"loss": 8.3437,
|
| 18488 |
+
"step": 2610
|
| 18489 |
+
},
|
| 18490 |
+
{
|
| 18491 |
+
"epoch": 0.11276669258011575,
|
| 18492 |
+
"grad_norm": 0.400390625,
|
| 18493 |
+
"learning_rate": 0.0009892865706904999,
|
| 18494 |
+
"loss": 8.4394,
|
| 18495 |
+
"step": 2611
|
| 18496 |
+
},
|
| 18497 |
+
{
|
| 18498 |
+
"epoch": 0.11280988166191587,
|
| 18499 |
+
"grad_norm": 0.439453125,
|
| 18500 |
+
"learning_rate": 0.0009892718618361828,
|
| 18501 |
+
"loss": 8.4852,
|
| 18502 |
+
"step": 2612
|
| 18503 |
+
},
|
| 18504 |
+
{
|
| 18505 |
+
"epoch": 0.11285307074371599,
|
| 18506 |
+
"grad_norm": 0.6328125,
|
| 18507 |
+
"learning_rate": 0.0009892571430011297,
|
| 18508 |
+
"loss": 8.2534,
|
| 18509 |
+
"step": 2613
|
| 18510 |
+
},
|
| 18511 |
+
{
|
| 18512 |
+
"epoch": 0.11289625982551611,
|
| 18513 |
+
"grad_norm": 0.51171875,
|
| 18514 |
+
"learning_rate": 0.000989242414185641,
|
| 18515 |
+
"loss": 8.6993,
|
| 18516 |
+
"step": 2614
|
| 18517 |
+
},
|
| 18518 |
+
{
|
| 18519 |
+
"epoch": 0.11293944890731623,
|
| 18520 |
+
"grad_norm": 0.73828125,
|
| 18521 |
+
"learning_rate": 0.0009892276753900174,
|
| 18522 |
+
"loss": 8.6099,
|
| 18523 |
+
"step": 2615
|
| 18524 |
+
},
|
| 18525 |
+
{
|
| 18526 |
+
"epoch": 0.11298263798911636,
|
| 18527 |
+
"grad_norm": 0.7265625,
|
| 18528 |
+
"learning_rate": 0.000989212926614559,
|
| 18529 |
+
"loss": 8.0905,
|
| 18530 |
+
"step": 2616
|
| 18531 |
+
},
|
| 18532 |
+
{
|
| 18533 |
+
"epoch": 0.11302582707091648,
|
| 18534 |
+
"grad_norm": 0.48046875,
|
| 18535 |
+
"learning_rate": 0.000989198167859567,
|
| 18536 |
+
"loss": 8.3292,
|
| 18537 |
+
"step": 2617
|
| 18538 |
+
},
|
| 18539 |
+
{
|
| 18540 |
+
"epoch": 0.1130690161527166,
|
| 18541 |
+
"grad_norm": 0.546875,
|
| 18542 |
+
"learning_rate": 0.0009891833991253424,
|
| 18543 |
+
"loss": 8.4157,
|
| 18544 |
+
"step": 2618
|
| 18545 |
+
},
|
| 18546 |
+
{
|
| 18547 |
+
"epoch": 0.11311220523451672,
|
| 18548 |
+
"grad_norm": 0.470703125,
|
| 18549 |
+
"learning_rate": 0.0009891686204121866,
|
| 18550 |
+
"loss": 8.4406,
|
| 18551 |
+
"step": 2619
|
| 18552 |
+
},
|
| 18553 |
+
{
|
| 18554 |
+
"epoch": 0.11315539431631684,
|
| 18555 |
+
"grad_norm": 0.7421875,
|
| 18556 |
+
"learning_rate": 0.0009891538317204009,
|
| 18557 |
+
"loss": 8.4709,
|
| 18558 |
+
"step": 2620
|
| 18559 |
+
},
|
| 18560 |
+
{
|
| 18561 |
+
"epoch": 0.11319858339811696,
|
| 18562 |
+
"grad_norm": 0.79296875,
|
| 18563 |
+
"learning_rate": 0.0009891390330502869,
|
| 18564 |
+
"loss": 8.0221,
|
| 18565 |
+
"step": 2621
|
| 18566 |
+
},
|
| 18567 |
+
{
|
| 18568 |
+
"epoch": 0.11324177247991708,
|
| 18569 |
+
"grad_norm": 0.6171875,
|
| 18570 |
+
"learning_rate": 0.000989124224402147,
|
| 18571 |
+
"loss": 8.5957,
|
| 18572 |
+
"step": 2622
|
| 18573 |
+
},
|
| 18574 |
+
{
|
| 18575 |
+
"epoch": 0.1132849615617172,
|
| 18576 |
+
"grad_norm": 0.73046875,
|
| 18577 |
+
"learning_rate": 0.0009891094057762825,
|
| 18578 |
+
"loss": 8.5834,
|
| 18579 |
+
"step": 2623
|
| 18580 |
+
},
|
| 18581 |
+
{
|
| 18582 |
+
"epoch": 0.11332815064351733,
|
| 18583 |
+
"grad_norm": 0.4609375,
|
| 18584 |
+
"learning_rate": 0.0009890945771729965,
|
| 18585 |
+
"loss": 8.6162,
|
| 18586 |
+
"step": 2624
|
| 18587 |
+
},
|
| 18588 |
+
{
|
| 18589 |
+
"epoch": 0.11337133972531745,
|
| 18590 |
+
"grad_norm": 0.93359375,
|
| 18591 |
+
"learning_rate": 0.0009890797385925906,
|
| 18592 |
+
"loss": 8.3808,
|
| 18593 |
+
"step": 2625
|
| 18594 |
+
},
|
| 18595 |
+
{
|
| 18596 |
+
"epoch": 0.11341452880711755,
|
| 18597 |
+
"grad_norm": 0.84375,
|
| 18598 |
+
"learning_rate": 0.0009890648900353683,
|
| 18599 |
+
"loss": 8.176,
|
| 18600 |
+
"step": 2626
|
| 18601 |
+
},
|
| 18602 |
+
{
|
| 18603 |
+
"epoch": 0.11345771788891768,
|
| 18604 |
+
"grad_norm": 0.55078125,
|
| 18605 |
+
"learning_rate": 0.0009890500315016322,
|
| 18606 |
+
"loss": 8.3731,
|
| 18607 |
+
"step": 2627
|
| 18608 |
+
},
|
| 18609 |
+
{
|
| 18610 |
+
"epoch": 0.1135009069707178,
|
| 18611 |
+
"grad_norm": 0.435546875,
|
| 18612 |
+
"learning_rate": 0.0009890351629916853,
|
| 18613 |
+
"loss": 8.1031,
|
| 18614 |
+
"step": 2628
|
| 18615 |
+
},
|
| 18616 |
+
{
|
| 18617 |
+
"epoch": 0.11354409605251792,
|
| 18618 |
+
"grad_norm": 0.57421875,
|
| 18619 |
+
"learning_rate": 0.000989020284505831,
|
| 18620 |
+
"loss": 8.2986,
|
| 18621 |
+
"step": 2629
|
| 18622 |
+
},
|
| 18623 |
+
{
|
| 18624 |
+
"epoch": 0.11358728513431804,
|
| 18625 |
+
"grad_norm": 0.482421875,
|
| 18626 |
+
"learning_rate": 0.0009890053960443727,
|
| 18627 |
+
"loss": 8.685,
|
| 18628 |
+
"step": 2630
|
| 18629 |
+
},
|
| 18630 |
+
{
|
| 18631 |
+
"epoch": 0.11363047421611816,
|
| 18632 |
+
"grad_norm": 0.58984375,
|
| 18633 |
+
"learning_rate": 0.0009889904976076143,
|
| 18634 |
+
"loss": 8.3749,
|
| 18635 |
+
"step": 2631
|
| 18636 |
+
},
|
| 18637 |
+
{
|
| 18638 |
+
"epoch": 0.11367366329791828,
|
| 18639 |
+
"grad_norm": 0.5078125,
|
| 18640 |
+
"learning_rate": 0.0009889755891958596,
|
| 18641 |
+
"loss": 8.1719,
|
| 18642 |
+
"step": 2632
|
| 18643 |
+
},
|
| 18644 |
+
{
|
| 18645 |
+
"epoch": 0.1137168523797184,
|
| 18646 |
+
"grad_norm": 0.546875,
|
| 18647 |
+
"learning_rate": 0.0009889606708094127,
|
| 18648 |
+
"loss": 8.4152,
|
| 18649 |
+
"step": 2633
|
| 18650 |
+
},
|
| 18651 |
+
{
|
| 18652 |
+
"epoch": 0.11376004146151852,
|
| 18653 |
+
"grad_norm": 0.54296875,
|
| 18654 |
+
"learning_rate": 0.000988945742448578,
|
| 18655 |
+
"loss": 8.4868,
|
| 18656 |
+
"step": 2634
|
| 18657 |
+
},
|
| 18658 |
+
{
|
| 18659 |
+
"epoch": 0.11380323054331865,
|
| 18660 |
+
"grad_norm": 0.48046875,
|
| 18661 |
+
"learning_rate": 0.00098893080411366,
|
| 18662 |
+
"loss": 8.3051,
|
| 18663 |
+
"step": 2635
|
| 18664 |
+
},
|
| 18665 |
+
{
|
| 18666 |
+
"epoch": 0.11384641962511877,
|
| 18667 |
+
"grad_norm": 0.7265625,
|
| 18668 |
+
"learning_rate": 0.0009889158558049635,
|
| 18669 |
+
"loss": 7.9139,
|
| 18670 |
+
"step": 2636
|
| 18671 |
+
},
|
| 18672 |
+
{
|
| 18673 |
+
"epoch": 0.11388960870691889,
|
| 18674 |
+
"grad_norm": 0.52734375,
|
| 18675 |
+
"learning_rate": 0.0009889008975227933,
|
| 18676 |
+
"loss": 8.3378,
|
| 18677 |
+
"step": 2637
|
| 18678 |
+
},
|
| 18679 |
+
{
|
| 18680 |
+
"epoch": 0.11393279778871901,
|
| 18681 |
+
"grad_norm": 0.6171875,
|
| 18682 |
+
"learning_rate": 0.0009888859292674545,
|
| 18683 |
+
"loss": 8.3744,
|
| 18684 |
+
"step": 2638
|
| 18685 |
+
},
|
| 18686 |
+
{
|
| 18687 |
+
"epoch": 0.11397598687051913,
|
| 18688 |
+
"grad_norm": 0.53515625,
|
| 18689 |
+
"learning_rate": 0.0009888709510392525,
|
| 18690 |
+
"loss": 8.6874,
|
| 18691 |
+
"step": 2639
|
| 18692 |
+
},
|
| 18693 |
+
{
|
| 18694 |
+
"epoch": 0.11401917595231925,
|
| 18695 |
+
"grad_norm": 0.53515625,
|
| 18696 |
+
"learning_rate": 0.000988855962838493,
|
| 18697 |
+
"loss": 8.4216,
|
| 18698 |
+
"step": 2640
|
| 18699 |
+
},
|
| 18700 |
+
{
|
| 18701 |
+
"epoch": 0.11406236503411937,
|
| 18702 |
+
"grad_norm": 0.578125,
|
| 18703 |
+
"learning_rate": 0.0009888409646654818,
|
| 18704 |
+
"loss": 8.4905,
|
| 18705 |
+
"step": 2641
|
| 18706 |
+
},
|
| 18707 |
+
{
|
| 18708 |
+
"epoch": 0.1141055541159195,
|
| 18709 |
+
"grad_norm": 0.51171875,
|
| 18710 |
+
"learning_rate": 0.0009888259565205243,
|
| 18711 |
+
"loss": 8.1824,
|
| 18712 |
+
"step": 2642
|
| 18713 |
+
},
|
| 18714 |
+
{
|
| 18715 |
+
"epoch": 0.11414874319771962,
|
| 18716 |
+
"grad_norm": 0.47265625,
|
| 18717 |
+
"learning_rate": 0.0009888109384039272,
|
| 18718 |
+
"loss": 8.3441,
|
| 18719 |
+
"step": 2643
|
| 18720 |
+
},
|
| 18721 |
+
{
|
| 18722 |
+
"epoch": 0.11419193227951974,
|
| 18723 |
+
"grad_norm": 0.498046875,
|
| 18724 |
+
"learning_rate": 0.0009887959103159966,
|
| 18725 |
+
"loss": 8.6919,
|
| 18726 |
+
"step": 2644
|
| 18727 |
+
},
|
| 18728 |
+
{
|
| 18729 |
+
"epoch": 0.11423512136131986,
|
| 18730 |
+
"grad_norm": 0.466796875,
|
| 18731 |
+
"learning_rate": 0.0009887808722570393,
|
| 18732 |
+
"loss": 8.5054,
|
| 18733 |
+
"step": 2645
|
| 18734 |
+
},
|
| 18735 |
+
{
|
| 18736 |
+
"epoch": 0.11427831044311998,
|
| 18737 |
+
"grad_norm": 0.5625,
|
| 18738 |
+
"learning_rate": 0.0009887658242273617,
|
| 18739 |
+
"loss": 8.2274,
|
| 18740 |
+
"step": 2646
|
| 18741 |
+
},
|
| 18742 |
+
{
|
| 18743 |
+
"epoch": 0.1143214995249201,
|
| 18744 |
+
"grad_norm": 0.4921875,
|
| 18745 |
+
"learning_rate": 0.000988750766227271,
|
| 18746 |
+
"loss": 8.5321,
|
| 18747 |
+
"step": 2647
|
| 18748 |
+
},
|
| 18749 |
+
{
|
| 18750 |
+
"epoch": 0.11436468860672022,
|
| 18751 |
+
"grad_norm": 0.56640625,
|
| 18752 |
+
"learning_rate": 0.0009887356982570745,
|
| 18753 |
+
"loss": 8.3781,
|
| 18754 |
+
"step": 2648
|
| 18755 |
+
},
|
| 18756 |
+
{
|
| 18757 |
+
"epoch": 0.11440787768852034,
|
| 18758 |
+
"grad_norm": 0.61328125,
|
| 18759 |
+
"learning_rate": 0.0009887206203170794,
|
| 18760 |
+
"loss": 8.1832,
|
| 18761 |
+
"step": 2649
|
| 18762 |
+
},
|
| 18763 |
+
{
|
| 18764 |
+
"epoch": 0.11445106677032046,
|
| 18765 |
+
"grad_norm": 0.74609375,
|
| 18766 |
+
"learning_rate": 0.0009887055324075932,
|
| 18767 |
+
"loss": 8.4196,
|
| 18768 |
+
"step": 2650
|
| 18769 |
+
},
|
| 18770 |
+
{
|
| 18771 |
+
"epoch": 0.11449425585212059,
|
| 18772 |
+
"grad_norm": 0.61328125,
|
| 18773 |
+
"learning_rate": 0.0009886904345289237,
|
| 18774 |
+
"loss": 8.4519,
|
| 18775 |
+
"step": 2651
|
| 18776 |
+
},
|
| 18777 |
+
{
|
| 18778 |
+
"epoch": 0.1145374449339207,
|
| 18779 |
+
"grad_norm": 0.58984375,
|
| 18780 |
+
"learning_rate": 0.0009886753266813788,
|
| 18781 |
+
"loss": 8.1727,
|
| 18782 |
+
"step": 2652
|
| 18783 |
+
},
|
| 18784 |
+
{
|
| 18785 |
+
"epoch": 0.11458063401572083,
|
| 18786 |
+
"grad_norm": 0.470703125,
|
| 18787 |
+
"learning_rate": 0.0009886602088652672,
|
| 18788 |
+
"loss": 8.2705,
|
| 18789 |
+
"step": 2653
|
| 18790 |
+
},
|
| 18791 |
+
{
|
| 18792 |
+
"epoch": 0.11462382309752095,
|
| 18793 |
+
"grad_norm": 0.73046875,
|
| 18794 |
+
"learning_rate": 0.000988645081080897,
|
| 18795 |
+
"loss": 8.1534,
|
| 18796 |
+
"step": 2654
|
| 18797 |
+
},
|
| 18798 |
+
{
|
| 18799 |
+
"epoch": 0.11466701217932107,
|
| 18800 |
+
"grad_norm": 0.515625,
|
| 18801 |
+
"learning_rate": 0.0009886299433285763,
|
| 18802 |
+
"loss": 8.3726,
|
| 18803 |
+
"step": 2655
|
| 18804 |
+
},
|
| 18805 |
+
{
|
| 18806 |
+
"epoch": 0.11471020126112119,
|
| 18807 |
+
"grad_norm": 0.87890625,
|
| 18808 |
+
"learning_rate": 0.0009886147956086147,
|
| 18809 |
+
"loss": 8.0826,
|
| 18810 |
+
"step": 2656
|
| 18811 |
+
},
|
| 18812 |
+
{
|
| 18813 |
+
"epoch": 0.11475339034292131,
|
| 18814 |
+
"grad_norm": 0.5390625,
|
| 18815 |
+
"learning_rate": 0.0009885996379213207,
|
| 18816 |
+
"loss": 8.5905,
|
| 18817 |
+
"step": 2657
|
| 18818 |
+
},
|
| 18819 |
+
{
|
| 18820 |
+
"epoch": 0.11479657942472143,
|
| 18821 |
+
"grad_norm": 0.421875,
|
| 18822 |
+
"learning_rate": 0.0009885844702670035,
|
| 18823 |
+
"loss": 8.4927,
|
| 18824 |
+
"step": 2658
|
| 18825 |
+
},
|
| 18826 |
+
{
|
| 18827 |
+
"epoch": 0.11483976850652156,
|
| 18828 |
+
"grad_norm": 0.6015625,
|
| 18829 |
+
"learning_rate": 0.000988569292645973,
|
| 18830 |
+
"loss": 8.4423,
|
| 18831 |
+
"step": 2659
|
| 18832 |
+
},
|
| 18833 |
+
{
|
| 18834 |
+
"epoch": 0.11488295758832168,
|
| 18835 |
+
"grad_norm": 0.6875,
|
| 18836 |
+
"learning_rate": 0.000988554105058538,
|
| 18837 |
+
"loss": 8.3766,
|
| 18838 |
+
"step": 2660
|
| 18839 |
+
},
|
| 18840 |
+
{
|
| 18841 |
+
"epoch": 0.1149261466701218,
|
| 18842 |
+
"grad_norm": 0.52734375,
|
| 18843 |
+
"learning_rate": 0.000988538907505009,
|
| 18844 |
+
"loss": 8.1138,
|
| 18845 |
+
"step": 2661
|
| 18846 |
+
},
|
| 18847 |
+
{
|
| 18848 |
+
"epoch": 0.11496933575192192,
|
| 18849 |
+
"grad_norm": 0.5390625,
|
| 18850 |
+
"learning_rate": 0.0009885236999856958,
|
| 18851 |
+
"loss": 8.3805,
|
| 18852 |
+
"step": 2662
|
| 18853 |
+
},
|
| 18854 |
+
{
|
| 18855 |
+
"epoch": 0.11501252483372204,
|
| 18856 |
+
"grad_norm": 0.75,
|
| 18857 |
+
"learning_rate": 0.0009885084825009085,
|
| 18858 |
+
"loss": 8.1684,
|
| 18859 |
+
"step": 2663
|
| 18860 |
+
},
|
| 18861 |
+
{
|
| 18862 |
+
"epoch": 0.11505571391552216,
|
| 18863 |
+
"grad_norm": 0.671875,
|
| 18864 |
+
"learning_rate": 0.0009884932550509578,
|
| 18865 |
+
"loss": 8.306,
|
| 18866 |
+
"step": 2664
|
| 18867 |
+
},
|
| 18868 |
+
{
|
| 18869 |
+
"epoch": 0.11509890299732228,
|
| 18870 |
+
"grad_norm": 0.41015625,
|
| 18871 |
+
"learning_rate": 0.000988478017636154,
|
| 18872 |
+
"loss": 8.3887,
|
| 18873 |
+
"step": 2665
|
| 18874 |
+
},
|
| 18875 |
+
{
|
| 18876 |
+
"epoch": 0.1151420920791224,
|
| 18877 |
+
"grad_norm": 0.703125,
|
| 18878 |
+
"learning_rate": 0.000988462770256808,
|
| 18879 |
+
"loss": 7.8748,
|
| 18880 |
+
"step": 2666
|
| 18881 |
+
},
|
| 18882 |
+
{
|
| 18883 |
+
"epoch": 0.11518528116092251,
|
| 18884 |
+
"grad_norm": 0.453125,
|
| 18885 |
+
"learning_rate": 0.0009884475129132311,
|
| 18886 |
+
"loss": 8.3002,
|
| 18887 |
+
"step": 2667
|
| 18888 |
+
},
|
| 18889 |
+
{
|
| 18890 |
+
"epoch": 0.11522847024272263,
|
| 18891 |
+
"grad_norm": 0.46875,
|
| 18892 |
+
"learning_rate": 0.0009884322456057343,
|
| 18893 |
+
"loss": 8.3754,
|
| 18894 |
+
"step": 2668
|
| 18895 |
+
},
|
| 18896 |
+
{
|
| 18897 |
+
"epoch": 0.11527165932452275,
|
| 18898 |
+
"grad_norm": 0.69921875,
|
| 18899 |
+
"learning_rate": 0.000988416968334629,
|
| 18900 |
+
"loss": 8.2877,
|
| 18901 |
+
"step": 2669
|
| 18902 |
+
},
|
| 18903 |
+
{
|
| 18904 |
+
"epoch": 0.11531484840632288,
|
| 18905 |
+
"grad_norm": 0.921875,
|
| 18906 |
+
"learning_rate": 0.0009884016811002273,
|
| 18907 |
+
"loss": 8.4747,
|
| 18908 |
+
"step": 2670
|
| 18909 |
+
},
|
| 18910 |
+
{
|
| 18911 |
+
"epoch": 0.115358037488123,
|
| 18912 |
+
"grad_norm": 0.82421875,
|
| 18913 |
+
"learning_rate": 0.0009883863839028402,
|
| 18914 |
+
"loss": 7.8003,
|
| 18915 |
+
"step": 2671
|
| 18916 |
+
},
|
| 18917 |
+
{
|
| 18918 |
+
"epoch": 0.11540122656992312,
|
| 18919 |
+
"grad_norm": 0.6484375,
|
| 18920 |
+
"learning_rate": 0.0009883710767427806,
|
| 18921 |
+
"loss": 8.1563,
|
| 18922 |
+
"step": 2672
|
| 18923 |
+
},
|
| 18924 |
+
{
|
| 18925 |
+
"epoch": 0.11544441565172324,
|
| 18926 |
+
"grad_norm": 0.4765625,
|
| 18927 |
+
"learning_rate": 0.0009883557596203601,
|
| 18928 |
+
"loss": 8.3803,
|
| 18929 |
+
"step": 2673
|
| 18930 |
+
},
|
| 18931 |
+
{
|
| 18932 |
+
"epoch": 0.11548760473352336,
|
| 18933 |
+
"grad_norm": 0.458984375,
|
| 18934 |
+
"learning_rate": 0.0009883404325358915,
|
| 18935 |
+
"loss": 8.3169,
|
| 18936 |
+
"step": 2674
|
| 18937 |
+
},
|
| 18938 |
+
{
|
| 18939 |
+
"epoch": 0.11553079381532348,
|
| 18940 |
+
"grad_norm": 0.59765625,
|
| 18941 |
+
"learning_rate": 0.0009883250954896877,
|
| 18942 |
+
"loss": 8.2138,
|
| 18943 |
+
"step": 2675
|
| 18944 |
+
},
|
| 18945 |
+
{
|
| 18946 |
+
"epoch": 0.1155739828971236,
|
| 18947 |
+
"grad_norm": 0.65234375,
|
| 18948 |
+
"learning_rate": 0.000988309748482061,
|
| 18949 |
+
"loss": 7.8497,
|
| 18950 |
+
"step": 2676
|
| 18951 |
+
},
|
| 18952 |
+
{
|
| 18953 |
+
"epoch": 0.11561717197892372,
|
| 18954 |
+
"grad_norm": 0.494140625,
|
| 18955 |
+
"learning_rate": 0.0009882943915133246,
|
| 18956 |
+
"loss": 8.3327,
|
| 18957 |
+
"step": 2677
|
| 18958 |
+
},
|
| 18959 |
+
{
|
| 18960 |
+
"epoch": 0.11566036106072385,
|
| 18961 |
+
"grad_norm": 0.8671875,
|
| 18962 |
+
"learning_rate": 0.0009882790245837924,
|
| 18963 |
+
"loss": 8.2778,
|
| 18964 |
+
"step": 2678
|
| 18965 |
+
},
|
| 18966 |
+
{
|
| 18967 |
+
"epoch": 0.11570355014252397,
|
| 18968 |
+
"grad_norm": 0.408203125,
|
| 18969 |
+
"learning_rate": 0.000988263647693777,
|
| 18970 |
+
"loss": 8.4779,
|
| 18971 |
+
"step": 2679
|
| 18972 |
+
},
|
| 18973 |
+
{
|
| 18974 |
+
"epoch": 0.11574673922432409,
|
| 18975 |
+
"grad_norm": 0.8046875,
|
| 18976 |
+
"learning_rate": 0.0009882482608435923,
|
| 18977 |
+
"loss": 8.3256,
|
| 18978 |
+
"step": 2680
|
| 18979 |
+
},
|
| 18980 |
+
{
|
| 18981 |
+
"epoch": 0.11578992830612421,
|
| 18982 |
+
"grad_norm": 0.66796875,
|
| 18983 |
+
"learning_rate": 0.0009882328640335525,
|
| 18984 |
+
"loss": 8.164,
|
| 18985 |
+
"step": 2681
|
| 18986 |
+
},
|
| 18987 |
+
{
|
| 18988 |
+
"epoch": 0.11583311738792433,
|
| 18989 |
+
"grad_norm": 0.48046875,
|
| 18990 |
+
"learning_rate": 0.0009882174572639717,
|
| 18991 |
+
"loss": 8.201,
|
| 18992 |
+
"step": 2682
|
| 18993 |
+
},
|
| 18994 |
+
{
|
| 18995 |
+
"epoch": 0.11587630646972445,
|
| 18996 |
+
"grad_norm": 0.455078125,
|
| 18997 |
+
"learning_rate": 0.0009882020405351639,
|
| 18998 |
+
"loss": 8.3834,
|
| 18999 |
+
"step": 2683
|
| 19000 |
+
},
|
| 19001 |
+
{
|
| 19002 |
+
"epoch": 0.11591949555152457,
|
| 19003 |
+
"grad_norm": 0.6640625,
|
| 19004 |
+
"learning_rate": 0.0009881866138474436,
|
| 19005 |
+
"loss": 8.173,
|
| 19006 |
+
"step": 2684
|
| 19007 |
+
},
|
| 19008 |
+
{
|
| 19009 |
+
"epoch": 0.1159626846333247,
|
| 19010 |
+
"grad_norm": 0.75390625,
|
| 19011 |
+
"learning_rate": 0.0009881711772011254,
|
| 19012 |
+
"loss": 8.4729,
|
| 19013 |
+
"step": 2685
|
| 19014 |
+
},
|
| 19015 |
+
{
|
| 19016 |
+
"epoch": 0.11600587371512482,
|
| 19017 |
+
"grad_norm": 0.69921875,
|
| 19018 |
+
"learning_rate": 0.0009881557305965248,
|
| 19019 |
+
"loss": 8.5104,
|
| 19020 |
+
"step": 2686
|
| 19021 |
+
},
|
| 19022 |
+
{
|
| 19023 |
+
"epoch": 0.11604906279692494,
|
| 19024 |
+
"grad_norm": 0.49609375,
|
| 19025 |
+
"learning_rate": 0.0009881402740339563,
|
| 19026 |
+
"loss": 8.5633,
|
| 19027 |
+
"step": 2687
|
| 19028 |
+
},
|
| 19029 |
+
{
|
| 19030 |
+
"epoch": 0.11609225187872506,
|
| 19031 |
+
"grad_norm": 0.45703125,
|
| 19032 |
+
"learning_rate": 0.0009881248075137353,
|
| 19033 |
+
"loss": 8.3657,
|
| 19034 |
+
"step": 2688
|
| 19035 |
+
},
|
| 19036 |
+
{
|
| 19037 |
+
"epoch": 0.11613544096052518,
|
| 19038 |
+
"grad_norm": 0.625,
|
| 19039 |
+
"learning_rate": 0.0009881093310361772,
|
| 19040 |
+
"loss": 8.0986,
|
| 19041 |
+
"step": 2689
|
| 19042 |
+
},
|
| 19043 |
+
{
|
| 19044 |
+
"epoch": 0.1161786300423253,
|
| 19045 |
+
"grad_norm": 0.67578125,
|
| 19046 |
+
"learning_rate": 0.000988093844601598,
|
| 19047 |
+
"loss": 8.2304,
|
| 19048 |
+
"step": 2690
|
| 19049 |
+
},
|
| 19050 |
+
{
|
| 19051 |
+
"epoch": 0.11622181912412542,
|
| 19052 |
+
"grad_norm": 0.4375,
|
| 19053 |
+
"learning_rate": 0.0009880783482103138,
|
| 19054 |
+
"loss": 8.4729,
|
| 19055 |
+
"step": 2691
|
| 19056 |
+
},
|
| 19057 |
+
{
|
| 19058 |
+
"epoch": 0.11626500820592554,
|
| 19059 |
+
"grad_norm": 0.67578125,
|
| 19060 |
+
"learning_rate": 0.00098806284186264,
|
| 19061 |
+
"loss": 8.4413,
|
| 19062 |
+
"step": 2692
|
| 19063 |
+
},
|
| 19064 |
+
{
|
| 19065 |
+
"epoch": 0.11630819728772566,
|
| 19066 |
+
"grad_norm": 0.53125,
|
| 19067 |
+
"learning_rate": 0.0009880473255588936,
|
| 19068 |
+
"loss": 8.3424,
|
| 19069 |
+
"step": 2693
|
| 19070 |
+
},
|
| 19071 |
+
{
|
| 19072 |
+
"epoch": 0.11635138636952579,
|
| 19073 |
+
"grad_norm": 0.5390625,
|
| 19074 |
+
"learning_rate": 0.0009880317992993907,
|
| 19075 |
+
"loss": 8.1611,
|
| 19076 |
+
"step": 2694
|
| 19077 |
+
},
|
| 19078 |
+
{
|
| 19079 |
+
"epoch": 0.11639457545132591,
|
| 19080 |
+
"grad_norm": 0.671875,
|
| 19081 |
+
"learning_rate": 0.0009880162630844483,
|
| 19082 |
+
"loss": 8.6446,
|
| 19083 |
+
"step": 2695
|
| 19084 |
+
},
|
| 19085 |
+
{
|
| 19086 |
+
"epoch": 0.11643776453312603,
|
| 19087 |
+
"grad_norm": 0.53515625,
|
| 19088 |
+
"learning_rate": 0.000988000716914383,
|
| 19089 |
+
"loss": 8.2999,
|
| 19090 |
+
"step": 2696
|
| 19091 |
+
},
|
| 19092 |
+
{
|
| 19093 |
+
"epoch": 0.11648095361492615,
|
| 19094 |
+
"grad_norm": 0.64453125,
|
| 19095 |
+
"learning_rate": 0.0009879851607895122,
|
| 19096 |
+
"loss": 8.2054,
|
| 19097 |
+
"step": 2697
|
| 19098 |
+
},
|
| 19099 |
+
{
|
| 19100 |
+
"epoch": 0.11652414269672627,
|
| 19101 |
+
"grad_norm": 0.85546875,
|
| 19102 |
+
"learning_rate": 0.000987969594710153,
|
| 19103 |
+
"loss": 8.5063,
|
| 19104 |
+
"step": 2698
|
| 19105 |
+
},
|
| 19106 |
+
{
|
| 19107 |
+
"epoch": 0.11656733177852639,
|
| 19108 |
+
"grad_norm": 0.466796875,
|
| 19109 |
+
"learning_rate": 0.0009879540186766233,
|
| 19110 |
+
"loss": 8.2664,
|
| 19111 |
+
"step": 2699
|
| 19112 |
+
},
|
| 19113 |
+
{
|
| 19114 |
+
"epoch": 0.11661052086032651,
|
| 19115 |
+
"grad_norm": 0.68359375,
|
| 19116 |
+
"learning_rate": 0.0009879384326892406,
|
| 19117 |
+
"loss": 8.3649,
|
| 19118 |
+
"step": 2700
|
| 19119 |
+
},
|
| 19120 |
+
{
|
| 19121 |
+
"epoch": 0.11661052086032651,
|
| 19122 |
+
"eval_loss": 8.321394920349121,
|
| 19123 |
+
"eval_runtime": 14.3806,
|
| 19124 |
+
"eval_samples_per_second": 1.669,
|
| 19125 |
+
"eval_steps_per_second": 0.209,
|
| 19126 |
+
"step": 2700
|
| 19127 |
}
|
| 19128 |
],
|
| 19129 |
"logging_steps": 1,
|
|
|
|
| 19143 |
"attributes": {}
|
| 19144 |
}
|
| 19145 |
},
|
| 19146 |
+
"total_flos": 8630688153600.0,
|
| 19147 |
"train_batch_size": 1,
|
| 19148 |
"trial_name": null,
|
| 19149 |
"trial_params": null
|