Tiny Llama4 type error in constructor (#6752)

This commit is contained in:
Brayden Zhong
2025-08-09 04:03:59 -04:00
committed by GitHub
parent 1b7afad0dd
commit 4a9f3eef90

View File

@@ -204,7 +204,7 @@ class Llama4Attention(nn.Module):
super().__init__()
self.layer_id = layer_id
self.hidden_size = hidden_size
self.use_rope = int((layer_id + 1) % 4 != 0)
self.use_rope = (layer_id + 1) % 4 != 0
self.use_qk_norm = config.use_qk_norm and self.use_rope
attn_tp_rank = get_attention_tp_rank()