Skip to content

Commit 2f42c23

Browse files
committed
ruff related checks
1 parent 88338f3 commit 2f42c23

3 files changed

Lines changed: 0 additions & 22 deletions

File tree

src/maxdiffusion/generate_ltx2.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,6 @@ def call_pipeline(config, pipeline, prompt, negative_prompt):
8383
# Set default generation arguments
8484
generator = jax.random.key(config.seed) if hasattr(config, "seed") else jax.random.key(0)
8585
guidance_scale = config.guidance_scale if hasattr(config, "guidance_scale") else 3.0
86-
do_classifier_free_guidance = guidance_scale > 1.0
8786

8887
out = pipeline(
8988
prompt=prompt,

src/maxdiffusion/models/ltx2/ltx2_utils.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -313,7 +313,6 @@ def load_vae_weights(
313313
flax_key, flax_tensor = rename_key_and_reshape_tensor(pt_tuple_key, tensor, random_flax_state_dict)
314314
flax_key = _tuple_str_to_int(flax_key)
315315

316-
flax_key_str = [str(x) for x in flax_key]
317316

318317
if resnet_index is not None:
319318
if flax_key in flax_state_dict:

src/maxdiffusion/pipelines/ltx2/ltx2_pipeline.py

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -665,26 +665,6 @@ def quantize_transformer(cls, config: HyperParameters, model: Any, pipeline: "LT
665665
max_logging.log("Qwix Quantization complete.")
666666
return quantized_model
667667

668-
@classmethod
669-
def load_transformer(
670-
cls,
671-
devices_array: np.array,
672-
mesh: Mesh,
673-
rngs: nnx.Rngs,
674-
config: HyperParameters,
675-
restored_checkpoint=None,
676-
subfolder="transformer",
677-
):
678-
with mesh:
679-
transformer = create_sharded_logical_transformer(
680-
devices_array=devices_array,
681-
mesh=mesh,
682-
rngs=rngs,
683-
config=config,
684-
restored_checkpoint=restored_checkpoint,
685-
subfolder=subfolder,
686-
)
687-
return transformer
688668

689669
def _get_gemma_prompt_embeds(
690670
self,

0 commit comments

Comments
 (0)