MetaLoRA-code / configuration_metalora.py
Arthur-LAGACHERIE's picture
add mutiple layers in MetaLoRALayer
94e77ae verified
raw
history blame contribute delete
887 Bytes
from transformers import PretrainedConfig
from typing import List
class MetaLoRAConfig(PretrainedConfig):
model_type = "MLoRAModel"
def __init__(
self,
mlora_layers: List[int]=["init"],
base_size:int=384,
embd_model:str="init",
llm_tokenizer:str="init",
hidden_size_coef:float=0.5,
**kwargs,
):
self.auto_map = {"AutoModel": "Arthur-LAGACHERIE/MetaLoRA-code--modeling_metalora.MLoRAModel", "AutoModelForCausalLM": "Arthur-LAGACHERIE/MetaLoRA-code--modeling_metalora.MLoRAModel", "AutoConfig":"Arthur-LAGACHERIE/MetaLoRA-code--configuration_metalora.MetaLoRAConfig"}
self.mlora_layers = mlora_layers
self.base_size = base_size
self.embd_model = embd_model
self.llm_tokenizer = llm_tokenizer
self.hidden_size_coef = hidden_size_coef
super().__init__(**kwargs)