Skip to content

Commit 38ba061

Browse files
lkm2835hmellor
andauthored
[BugFix] Fix EXAONE4 rotary embeddings (vllm-project#23918)
Signed-off-by: lkm2835 <[email protected]> Signed-off-by: Harry Mellor <[email protected]> Co-authored-by: Harry Mellor <[email protected]>
1 parent 0a74e9d commit 38ba061

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

vllm/model_executor/models/exaone4.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -164,8 +164,8 @@ def __init__(
164164
is_sliding = config.layer_types[layer_idx] == "sliding_attention"
165165
self.sliding_window = config.sliding_window if is_sliding else None
166166

167-
# apply rotary embeddings to every layer
168-
self.apply_all_layers = not is_sliding
167+
# apply rotary embeddings to every layer in full attention models
168+
self.apply_rope_all_layers = "sliding_attention" not in config.layer_types
169169

170170
self.rotary_emb = get_rope(
171171
self.head_dim,
@@ -201,7 +201,7 @@ def forward(
201201
k = self.k_norm(k)
202202
k = k.flatten(-2, -1)
203203

204-
if self.sliding_window or self.apply_all_layers:
204+
if self.sliding_window or self.apply_rope_all_layers:
205205
q, k = self.rotary_emb(positions, q, k)
206206
attn_output = self.attn(q, k, v)
207207
output, _ = self.o_proj(attn_output)

0 commit comments

Comments
 (0)