Commit
·
2756794
1
Parent(s):
bfe9f4d
End of training
Browse files- README.md +112 -9
- pytorch_model.bin +1 -1
- tokenizer_config.json +4 -0
- training_args.bin +1 -1
README.md
CHANGED
|
@@ -1,6 +1,8 @@
|
|
| 1 |
---
|
| 2 |
tags:
|
| 3 |
- generated_from_trainer
|
|
|
|
|
|
|
| 4 |
model-index:
|
| 5 |
- name: t5-small-finetuned-en-to-fr
|
| 6 |
results: []
|
|
@@ -13,14 +15,9 @@ should probably proofread and complete it, then remove this comment. -->
|
|
| 13 |
|
| 14 |
This model was trained from scratch on the None dataset.
|
| 15 |
It achieves the following results on the evaluation set:
|
| 16 |
-
-
|
| 17 |
-
-
|
| 18 |
-
-
|
| 19 |
-
- eval_runtime: 0.209
|
| 20 |
-
- eval_samples_per_second: 9.568
|
| 21 |
-
- eval_steps_per_second: 4.784
|
| 22 |
-
- epoch: 103.0
|
| 23 |
-
- step: 103
|
| 24 |
|
| 25 |
## Model description
|
| 26 |
|
|
@@ -45,7 +42,113 @@ The following hyperparameters were used during training:
|
|
| 45 |
- seed: 42
|
| 46 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
| 47 |
- lr_scheduler_type: linear
|
| 48 |
-
- num_epochs:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 49 |
|
| 50 |
### Framework versions
|
| 51 |
|
|
|
|
| 1 |
---
|
| 2 |
tags:
|
| 3 |
- generated_from_trainer
|
| 4 |
+
metrics:
|
| 5 |
+
- bleu
|
| 6 |
model-index:
|
| 7 |
- name: t5-small-finetuned-en-to-fr
|
| 8 |
results: []
|
|
|
|
| 15 |
|
| 16 |
This model was trained from scratch on the None dataset.
|
| 17 |
It achieves the following results on the evaluation set:
|
| 18 |
+
- Loss: 0.1355
|
| 19 |
+
- Bleu: 0.0
|
| 20 |
+
- Gen Len: 3.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
|
| 22 |
## Model description
|
| 23 |
|
|
|
|
| 42 |
- seed: 42
|
| 43 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
| 44 |
- lr_scheduler_type: linear
|
| 45 |
+
- num_epochs: 100
|
| 46 |
+
|
| 47 |
+
### Training results
|
| 48 |
+
|
| 49 |
+
| Training Loss | Epoch | Step | Validation Loss | Bleu | Gen Len |
|
| 50 |
+
|:-------------:|:-----:|:----:|:---------------:|:----:|:-------:|
|
| 51 |
+
| No log | 1.0 | 1 | 0.7383 | 0.0 | 3.0 |
|
| 52 |
+
| No log | 2.0 | 2 | 0.6949 | 0.0 | 3.0 |
|
| 53 |
+
| No log | 3.0 | 3 | 0.6472 | 0.0 | 3.0 |
|
| 54 |
+
| No log | 4.0 | 4 | 0.6009 | 0.0 | 3.0 |
|
| 55 |
+
| No log | 5.0 | 5 | 0.5615 | 0.0 | 3.0 |
|
| 56 |
+
| No log | 6.0 | 6 | 0.5312 | 0.0 | 3.0 |
|
| 57 |
+
| No log | 7.0 | 7 | 0.4994 | 0.0 | 3.0 |
|
| 58 |
+
| No log | 8.0 | 8 | 0.4685 | 0.0 | 3.0 |
|
| 59 |
+
| No log | 9.0 | 9 | 0.4413 | 0.0 | 3.0 |
|
| 60 |
+
| No log | 10.0 | 10 | 0.4179 | 0.0 | 3.0 |
|
| 61 |
+
| No log | 11.0 | 11 | 0.3927 | 0.0 | 3.0 |
|
| 62 |
+
| No log | 12.0 | 12 | 0.3675 | 0.0 | 3.0 |
|
| 63 |
+
| No log | 13.0 | 13 | 0.3460 | 0.0 | 3.0 |
|
| 64 |
+
| No log | 14.0 | 14 | 0.3225 | 0.0 | 3.0 |
|
| 65 |
+
| No log | 15.0 | 15 | 0.3018 | 0.0 | 3.0 |
|
| 66 |
+
| No log | 16.0 | 16 | 0.2821 | 0.0 | 3.0 |
|
| 67 |
+
| No log | 17.0 | 17 | 0.2593 | 0.0 | 3.0 |
|
| 68 |
+
| No log | 18.0 | 18 | 0.2388 | 0.0 | 3.0 |
|
| 69 |
+
| No log | 19.0 | 19 | 0.2215 | 0.0 | 3.0 |
|
| 70 |
+
| No log | 20.0 | 20 | 0.2098 | 0.0 | 3.0 |
|
| 71 |
+
| No log | 21.0 | 21 | 0.2031 | 0.0 | 3.0 |
|
| 72 |
+
| No log | 22.0 | 22 | 0.1964 | 0.0 | 3.0 |
|
| 73 |
+
| No log | 23.0 | 23 | 0.1908 | 0.0 | 3.0 |
|
| 74 |
+
| No log | 24.0 | 24 | 0.1842 | 0.0 | 3.0 |
|
| 75 |
+
| No log | 25.0 | 25 | 0.1765 | 0.0 | 3.0 |
|
| 76 |
+
| No log | 26.0 | 26 | 0.1695 | 0.0 | 3.0 |
|
| 77 |
+
| No log | 27.0 | 27 | 0.1643 | 0.0 | 3.0 |
|
| 78 |
+
| No log | 28.0 | 28 | 0.1588 | 0.0 | 3.0 |
|
| 79 |
+
| No log | 29.0 | 29 | 0.1543 | 0.0 | 3.0 |
|
| 80 |
+
| No log | 30.0 | 30 | 0.1503 | 0.0 | 3.0 |
|
| 81 |
+
| No log | 31.0 | 31 | 0.1490 | 0.0 | 3.0 |
|
| 82 |
+
| No log | 32.0 | 32 | 0.1468 | 0.0 | 3.0 |
|
| 83 |
+
| No log | 33.0 | 33 | 0.1430 | 0.0 | 3.0 |
|
| 84 |
+
| No log | 34.0 | 34 | 0.1408 | 0.0 | 3.0 |
|
| 85 |
+
| No log | 35.0 | 35 | 0.1400 | 0.0 | 3.0 |
|
| 86 |
+
| No log | 36.0 | 36 | 0.1386 | 0.0 | 3.0 |
|
| 87 |
+
| No log | 37.0 | 37 | 0.1363 | 0.0 | 3.0 |
|
| 88 |
+
| No log | 38.0 | 38 | 0.1329 | 0.0 | 3.0 |
|
| 89 |
+
| No log | 39.0 | 39 | 0.1298 | 0.0 | 3.0 |
|
| 90 |
+
| No log | 40.0 | 40 | 0.1268 | 0.0 | 3.0 |
|
| 91 |
+
| No log | 41.0 | 41 | 0.1245 | 0.0 | 3.0 |
|
| 92 |
+
| No log | 42.0 | 42 | 0.1231 | 0.0 | 3.0 |
|
| 93 |
+
| No log | 43.0 | 43 | 0.1219 | 0.0 | 3.0 |
|
| 94 |
+
| No log | 44.0 | 44 | 0.1210 | 0.0 | 3.0 |
|
| 95 |
+
| No log | 45.0 | 45 | 0.1204 | 0.0 | 3.0 |
|
| 96 |
+
| No log | 46.0 | 46 | 0.1204 | 0.0 | 3.0 |
|
| 97 |
+
| No log | 47.0 | 47 | 0.1202 | 0.0 | 3.0 |
|
| 98 |
+
| No log | 48.0 | 48 | 0.1200 | 0.0 | 3.0 |
|
| 99 |
+
| No log | 49.0 | 49 | 0.1202 | 0.0 | 3.0 |
|
| 100 |
+
| No log | 50.0 | 50 | 0.1205 | 0.0 | 3.0 |
|
| 101 |
+
| No log | 51.0 | 51 | 0.1206 | 0.0 | 3.0 |
|
| 102 |
+
| No log | 52.0 | 52 | 0.1205 | 0.0 | 3.0 |
|
| 103 |
+
| No log | 53.0 | 53 | 0.1207 | 0.0 | 3.0 |
|
| 104 |
+
| No log | 54.0 | 54 | 0.1208 | 0.0 | 3.0 |
|
| 105 |
+
| No log | 55.0 | 55 | 0.1211 | 0.0 | 3.0 |
|
| 106 |
+
| No log | 56.0 | 56 | 0.1214 | 0.0 | 3.0 |
|
| 107 |
+
| No log | 57.0 | 57 | 0.1215 | 0.0 | 3.0 |
|
| 108 |
+
| No log | 58.0 | 58 | 0.1216 | 0.0 | 3.0 |
|
| 109 |
+
| No log | 59.0 | 59 | 0.1215 | 0.0 | 3.0 |
|
| 110 |
+
| No log | 60.0 | 60 | 0.1213 | 0.0 | 3.0 |
|
| 111 |
+
| No log | 61.0 | 61 | 0.1210 | 0.0 | 3.0 |
|
| 112 |
+
| No log | 62.0 | 62 | 0.1210 | 0.0 | 3.0 |
|
| 113 |
+
| No log | 63.0 | 63 | 0.1212 | 0.0 | 3.0 |
|
| 114 |
+
| No log | 64.0 | 64 | 0.1216 | 0.0 | 3.0 |
|
| 115 |
+
| No log | 65.0 | 65 | 0.1225 | 0.0 | 3.0 |
|
| 116 |
+
| No log | 66.0 | 66 | 0.1233 | 0.0 | 3.0 |
|
| 117 |
+
| No log | 67.0 | 67 | 0.1243 | 0.0 | 3.0 |
|
| 118 |
+
| No log | 68.0 | 68 | 0.1252 | 0.0 | 3.0 |
|
| 119 |
+
| No log | 69.0 | 69 | 0.1260 | 0.0 | 3.0 |
|
| 120 |
+
| No log | 70.0 | 70 | 0.1272 | 0.0 | 3.0 |
|
| 121 |
+
| No log | 71.0 | 71 | 0.1282 | 0.0 | 3.0 |
|
| 122 |
+
| No log | 72.0 | 72 | 0.1287 | 0.0 | 3.0 |
|
| 123 |
+
| No log | 73.0 | 73 | 0.1291 | 0.0 | 3.0 |
|
| 124 |
+
| No log | 74.0 | 74 | 0.1297 | 0.0 | 3.0 |
|
| 125 |
+
| No log | 75.0 | 75 | 0.1304 | 0.0 | 3.0 |
|
| 126 |
+
| No log | 76.0 | 76 | 0.1309 | 0.0 | 3.0 |
|
| 127 |
+
| No log | 77.0 | 77 | 0.1313 | 0.0 | 3.0 |
|
| 128 |
+
| No log | 78.0 | 78 | 0.1318 | 0.0 | 3.0 |
|
| 129 |
+
| No log | 79.0 | 79 | 0.1321 | 0.0 | 3.0 |
|
| 130 |
+
| No log | 80.0 | 80 | 0.1325 | 0.0 | 3.0 |
|
| 131 |
+
| No log | 81.0 | 81 | 0.1327 | 0.0 | 3.0 |
|
| 132 |
+
| No log | 82.0 | 82 | 0.1330 | 0.0 | 3.0 |
|
| 133 |
+
| No log | 83.0 | 83 | 0.1333 | 0.0 | 3.0 |
|
| 134 |
+
| No log | 84.0 | 84 | 0.1336 | 0.0 | 3.0 |
|
| 135 |
+
| No log | 85.0 | 85 | 0.1339 | 0.0 | 3.0 |
|
| 136 |
+
| No log | 86.0 | 86 | 0.1342 | 0.0 | 3.0 |
|
| 137 |
+
| No log | 87.0 | 87 | 0.1343 | 0.0 | 3.0 |
|
| 138 |
+
| No log | 88.0 | 88 | 0.1344 | 0.0 | 3.0 |
|
| 139 |
+
| No log | 89.0 | 89 | 0.1345 | 0.0 | 3.0 |
|
| 140 |
+
| No log | 90.0 | 90 | 0.1347 | 0.0 | 3.0 |
|
| 141 |
+
| No log | 91.0 | 91 | 0.1349 | 0.0 | 3.0 |
|
| 142 |
+
| No log | 92.0 | 92 | 0.1352 | 0.0 | 3.0 |
|
| 143 |
+
| No log | 93.0 | 93 | 0.1353 | 0.0 | 3.0 |
|
| 144 |
+
| No log | 94.0 | 94 | 0.1354 | 0.0 | 3.0 |
|
| 145 |
+
| No log | 95.0 | 95 | 0.1355 | 0.0 | 3.0 |
|
| 146 |
+
| No log | 96.0 | 96 | 0.1355 | 0.0 | 3.0 |
|
| 147 |
+
| No log | 97.0 | 97 | 0.1355 | 0.0 | 3.0 |
|
| 148 |
+
| No log | 98.0 | 98 | 0.1355 | 0.0 | 3.0 |
|
| 149 |
+
| No log | 99.0 | 99 | 0.1355 | 0.0 | 3.0 |
|
| 150 |
+
| No log | 100.0 | 100 | 0.1355 | 0.0 | 3.0 |
|
| 151 |
+
|
| 152 |
|
| 153 |
### Framework versions
|
| 154 |
|
pytorch_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 242069785
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:82bb02c7e56ce0d60f87199c5b5d7b0e34538e18bae1264d4f709ec1e8d77be3
|
| 3 |
size 242069785
|
tokenizer_config.json
CHANGED
|
@@ -104,8 +104,12 @@
|
|
| 104 |
"clean_up_tokenization_spaces": true,
|
| 105 |
"eos_token": "</s>",
|
| 106 |
"extra_ids": 100,
|
|
|
|
| 107 |
"model_max_length": 512,
|
|
|
|
| 108 |
"pad_token": "<pad>",
|
|
|
|
|
|
|
| 109 |
"tokenizer_class": "T5Tokenizer",
|
| 110 |
"unk_token": "<unk>"
|
| 111 |
}
|
|
|
|
| 104 |
"clean_up_tokenization_spaces": true,
|
| 105 |
"eos_token": "</s>",
|
| 106 |
"extra_ids": 100,
|
| 107 |
+
"max_length": null,
|
| 108 |
"model_max_length": 512,
|
| 109 |
+
"pad_to_multiple_of": null,
|
| 110 |
"pad_token": "<pad>",
|
| 111 |
+
"pad_token_type_id": 0,
|
| 112 |
+
"padding_side": "right",
|
| 113 |
"tokenizer_class": "T5Tokenizer",
|
| 114 |
"unk_token": "<unk>"
|
| 115 |
}
|
training_args.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 4219
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:4e9aa47a42d271876f281b7c2643e31044a5161b056a1ea226bf8cc3104d03f2
|
| 3 |
size 4219
|