Lora中的例子
- https://github.com/michaelnny/QLoRA-LLM/blob/main/qlora_llm/models/lora.py#L211C1-L243C10
- 如果继承两个父类,并且父类的__init__参数不一样,则可以显式的调用父类init;如果用
super().__init__()
则需要 父类每一个类中也都调用super().__init__()
.
class LoRALinear4bit(Linear4bit, LoRALayer):def __init__(self,in_features,out_features,bias=True,compress_statistics=True,quant_type='fp4',compute_dtype=None,device=None,r: int = 0,lora_scaling: float = 1.0,lora_dropout: float = 0.0,merge_weights: bool = True,) -> None:Linear4bit.__init__(self,input_features=in_features,output_features=out_features,bias=bias,compute_dtype=compute_dtype,compress_statistics=compress_statistics,quant_type=quant_type,device=device,)LoRALayer.__init__(self,r=r,lora_scaling=lora_scaling,lora_dropout=lora_dropout,merge_weights=merge_weights,)