| include 't5x/examples/t5/byt5/large.gin' | |
| include 'pretrain_cont.gin' | |
| #include 't5x/configs/runs/pretrain.gin' | |
| #iinclude 't5x/configs/runs/finetune.gin' | |
| # Register necessary SeqIO Tasks/Mixtures. | |
| import t5.data.mixtures | |
| import tasks | |
| MIXTURE_OR_TASK_NAME = "byt5_ncc_english_span_corruption_stream" | |
| TASK_FEATURE_LENGTHS = {"inputs": 512, "targets": 512} | |
| TRAIN_STEPS = 1_500_000 | |
| DROPOUT_RATE = 0.0 # Changed from the default since T5-1.1 recomments this. | |
| INITIAL_CHECKPOINT_PATH = "gs://t5-data/pretrained_models/byt5/large/model.ckpt-1000000" | |
| #PjitPartitioner.num_partitions = 1 | |
| # `LOSS_NORMALIZING_FACTOR`: When fine-tuning a model that was pre-trained | |
| # # using Mesh Tensorflow (e.g. the public T5 / mT5 / ByT5 models), this should be | |
| # # set to `pretraining batch_size` * `target_token_length`. For T5 and T5.1.1: | |
| # # `2048 * 114`. For mT5: `1024 * 229`. For ByT5: `1024 * 189`. | |
| # The instructions above is from T5X. We here have to convert the Mesh Tensorflow byt5-model, so this needs to be set | |
| LOSS_NORMALIZING_FACTOR = 193536 | |