Skip to content

Commit

Permalink
FIX BC breaking change to boft conv2d scaling variable (#2127)
Browse files Browse the repository at this point in the history
  • Loading branch information
Zeju1997 authored Oct 7, 2024
1 parent 8d9ecbe commit e6f927b
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions src/peft/tuners/boft/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -772,7 +772,7 @@ def update_layer(
self.boft_R[adapter_name] = nn.Parameter(
torch.zeros(boft_n_butterfly_factor + 1, boft_block_num, boft_block_size, boft_block_size)
)
self.boft_s[adapter_name] = nn.Parameter(torch.ones(int(self.out_features), 1))
self.boft_s[adapter_name] = nn.Parameter(torch.ones(1, int(self.out_features)))

self.reset_boft_parameters(adapter_name, init_weights)

Expand Down Expand Up @@ -881,7 +881,7 @@ def get_delta_weight(self, adapter) -> tuple[torch.Tensor, torch.Tensor]:
"""

boft_R = self.boft_R[adapter]
boft_s = self.boft_s[adapter]
boft_s = self.boft_s[adapter].transpose(0, 1)

N, D, H, _ = boft_R.shape
boft_R = boft_R.view(N * D, H, H)
Expand Down Expand Up @@ -925,7 +925,7 @@ def forward(self, x: torch.Tensor, *args: Any, **kwargs: Any) -> torch.Tensor:
if active_adapter not in self.boft_R.keys():
continue
boft_R = self.boft_R[active_adapter]
boft_s = self.boft_s[active_adapter]
boft_s = self.boft_s[active_adapter].transpose(0, 1)
dropout = self.boft_dropout[active_adapter]

N, D, H, _ = boft_R.shape
Expand Down

0 comments on commit e6f927b

Please sign in to comment.