Skip to content

Commit

Permalink
fix formating
Browse files Browse the repository at this point in the history
  • Loading branch information
gagika committed Feb 19, 2025
1 parent 66d68e3 commit 8b6d384
Showing 1 changed file with 3 additions and 1 deletion.
4 changes: 3 additions & 1 deletion MaxText/layers/attentions.py
Original file line number Diff line number Diff line change
Expand Up @@ -1386,7 +1386,9 @@ def setup(self):
super().setup()

# Assert required configuration parameters for MLA attention.
assert self.config.attention_type == AttentionType.MLA.value, f"MLA requires MLA attention type {AttentionType.MLA.value}"
assert (
self.config.attention_type == AttentionType.MLA.value
), f"MLA requires MLA attention type {AttentionType.MLA.value}"
assert self.kv_lora_rank > 0, "KV LoRA rank must be > 0"
assert self.qk_nope_head_dim > 0, "QK NoPe head dim must be > 0"
assert self.qk_rope_head_dim > 0, "QK RoPE head dim must be > 0"
Expand Down

0 comments on commit 8b6d384

Please sign in to comment.