Skip to content

Commit 946a723

Browse files
committed
debug
1 parent 3807a4e commit 946a723

5 files changed

Lines changed: 504 additions & 16 deletions

File tree

src/maxdiffusion/models/ltx2/attention_ltx2.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -503,11 +503,13 @@ def __call__(
503503
attn_output = self.attention_op.apply_attention(query=query, key=key, value=value, attention_mask=attention_mask)
504504

505505
if perturbation_mask is not None:
506+
print("DEBUG: Applying perturbation mask")
506507
# value is [B, S, InnerDim]
507508
# attn_output is [B, S, InnerDim]
508509
attn_output = value + perturbation_mask * (attn_output - value)
509510

510511
if getattr(self, "to_gate_logits", None) is not None:
512+
print("DEBUG: Applying gated attention")
511513
gate_logits = self.to_gate_logits(hidden_states)
512514
b, s, _ = attn_output.shape
513515
attn_output = attn_output.reshape(b, s, self.heads, self.dim_head)

0 commit comments

Comments
 (0)