File size: 887 Bytes
f30a887 94e77ae f30a887 88be052 f30a887 94e77ae f30a887 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 |
from transformers import PretrainedConfig
from typing import List
class MetaLoRAConfig(PretrainedConfig):
model_type = "MLoRAModel"
def __init__(
self,
mlora_layers: List[int]=["init"],
base_size:int=384,
embd_model:str="init",
llm_tokenizer:str="init",
hidden_size_coef:float=0.5,
**kwargs,
):
self.auto_map = {"AutoModel": "Arthur-LAGACHERIE/MetaLoRA-code--modeling_metalora.MLoRAModel", "AutoModelForCausalLM": "Arthur-LAGACHERIE/MetaLoRA-code--modeling_metalora.MLoRAModel", "AutoConfig":"Arthur-LAGACHERIE/MetaLoRA-code--configuration_metalora.MetaLoRAConfig"}
self.mlora_layers = mlora_layers
self.base_size = base_size
self.embd_model = embd_model
self.llm_tokenizer = llm_tokenizer
self.hidden_size_coef = hidden_size_coef
super().__init__(**kwargs) |