We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent af2b144 commit 1e79c0bCopy full SHA for 1e79c0b
1 file changed
src/maxdiffusion/models/wan/wan_utils.py
@@ -283,11 +283,6 @@ def load_base_wan_transformer(
283
# Rename weight to kernel
284
if "weight" in renamed_pt_key:
285
renamed_pt_key = renamed_pt_key.replace("weight", "kernel")
286
-
287
- # CRITICAL FIX: Transpose the weights
288
- # PyTorch Linear is (Out, In), JAX Dense is (In, Out).
289
- # Ensure 'pt_tensor' is the variable holding your weight tensor.
290
- pt_tensor = pt_tensor.T
291
292
# 5. Fix for 'norm_added_q' which showed up in your missing keys list
293
# The error said 'kernel' was missing, implying this specific norm might act like a dense layer
0 commit comments