Skip to content

Commit 212c87e

Browse files
committed
rope arg changed
1 parent 4532bcd commit 212c87e

2 files changed

Lines changed: 2 additions & 3 deletions

File tree

src/maxdiffusion/models/ltx2/transformer_ltx2.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -676,7 +676,7 @@ def __init__(
676676

677677
cross_attn_pos_embed_max_pos = max(self.pos_embed_max_pos, self.audio_pos_embed_max_pos)
678678
self.cross_attn_rope = LTX2RotaryPosEmbed(
679-
dim=self.audio_cross_attention_dim,
679+
dim=inner_dim,
680680
patch_size=self.patch_size,
681681
patch_size_t=self.patch_size_t,
682682
base_num_frames=cross_attn_pos_embed_max_pos,
@@ -690,7 +690,7 @@ def __init__(
690690
num_attention_heads=self.num_attention_heads,
691691
)
692692
self.cross_attn_audio_rope = LTX2RotaryPosEmbed(
693-
dim=self.audio_cross_attention_dim,
693+
dim=audio_inner_dim,
694694
patch_size=self.audio_patch_size,
695695
patch_size_t=self.audio_patch_size_t,
696696
base_num_frames=cross_attn_pos_embed_max_pos,

src/maxdiffusion/tests/ltx_2_transformer_test.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -279,7 +279,6 @@ def test_transformer_3d_model_dot_product_attention(self):
279279
audio_attention_head_dim=128,
280280
audio_cross_attention_dim=32,
281281
num_layers=1, # Reduced layers for speed
282-
config=self.config,
283282
scan_layers=False,
284283
mesh=self.mesh,
285284
attention_kernel="dot_product"

0 commit comments

Comments
 (0)