Skip to content

Commit

Permalink
chore: review suggestions
Browse files Browse the repository at this point in the history
  • Loading branch information
ariG23498 authored and ariG23498 committed Oct 8, 2024
1 parent c8a2700 commit 09a1958
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 19 deletions.
3 changes: 0 additions & 3 deletions src/peft/tuners/lora/dora.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,6 @@ def forward(self, x, *, lora_A, lora_B, scaling, base_layer, base_result=None):

result_dora = (mag_norm_scale - 1) * base_result + mag_norm_scale * lora_result * scaling

if bias is not None:
result_dora = result_dora + bias

return result_dora

def __repr__(self) -> str:
Expand Down
27 changes: 11 additions & 16 deletions src/peft/tuners/lora/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -586,24 +586,19 @@ def forward(self, x: torch.Tensor, *args: Any, **kwargs: Any) -> torch.Tensor:
result = result + lora_B(lora_A(dropout(x))) * scaling
else:
if isinstance(dropout, nn.Identity) or not self.training:
result = self.lora_magnitude_vector[active_adapter](
x,
lora_A=lora_A,
lora_B=lora_B,
scaling=scaling,
base_layer=self.get_base_layer(),
base_result=result,
)
base_result = result
else:
x = dropout(x)
result = result + self.lora_magnitude_vector[active_adapter](
x,
lora_A=lora_A,
lora_B=lora_B,
scaling=scaling,
base_layer=self.get_base_layer(),
result=None,
)
base_result = None

result = result + self.lora_magnitude_vector[active_adapter](
x,
lora_A=lora_A,
lora_B=lora_B,
scaling=scaling,
base_layer=self.get_base_layer(),
base_result=base_layer,
)

result = result.to(torch_result_dtype)

Expand Down

0 comments on commit 09a1958

Please sign in to comment.