Spaces:
Running
on
Zero
Running
on
Zero
Revert back to large LoRA layers
Browse files- llama_diffusion_model.py +1 -1
llama_diffusion_model.py
CHANGED
@@ -37,7 +37,7 @@ class CustomTransformerModel(PreTrainedModel):
|
|
37 |
param.requires_grad = True
|
38 |
|
39 |
lora_config = LoraConfig(
|
40 |
-
r=
|
41 |
target_modules=["q_proj", "v_proj", "k_proj", "o_proj"],
|
42 |
bias="none", task_type=None
|
43 |
)
|
|
|
37 |
param.requires_grad = True
|
38 |
|
39 |
lora_config = LoraConfig(
|
40 |
+
r=512, lora_alpha=512, lora_dropout=0.0,
|
41 |
target_modules=["q_proj", "v_proj", "k_proj", "o_proj"],
|
42 |
bias="none", task_type=None
|
43 |
)
|