from transformers import PretrainedConfig | |
from typing import List | |
class MetaLoRAConfig(PretrainedConfig): | |
model_type = "MLoRAModel" | |
def __init__( | |
self, | |
mlora_layers: List[int]=["init"], | |
base_size:int=384, | |
embd_model:str="init", | |
llm_tokenizer:str="init", | |
hidden_size_coef:float=0.5, | |
**kwargs, | |
): | |
self.auto_map = {"AutoModel": "Arthur-LAGACHERIE/MetaLoRA-code--modeling_metalora.MLoRAModel", "AutoModelForCausalLM": "Arthur-LAGACHERIE/MetaLoRA-code--modeling_metalora.MLoRAModel", "AutoConfig":"Arthur-LAGACHERIE/MetaLoRA-code--configuration_metalora.MetaLoRAConfig"} | |
self.mlora_layers = mlora_layers | |
self.base_size = base_size | |
self.embd_model = embd_model | |
self.llm_tokenizer = llm_tokenizer | |
self.hidden_size_coef = hidden_size_coef | |
super().__init__(**kwargs) |