We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 8d8d1a0 commit a71eb74Copy full SHA for a71eb74
1 file changed
src/maxdiffusion/tests/wan_transformer_test.py
@@ -234,7 +234,6 @@ def test_wan_attention(self):
234
)
235
config = pyconfig.config
236
with mesh, nn_partitioning.axis_rules(config.logical_axis_rules):
237
- config.attention = attention_kernel
238
flash_block_sizes = get_flash_block_sizes(config)
239
attention = FlaxWanAttention(
240
rngs=rngs,
0 commit comments