Skip to content

Commit 43fccc1

Browse files
committed
fix
1 parent 1536e5c commit 43fccc1

3 files changed

Lines changed: 5 additions & 5 deletions

File tree

src/maxdiffusion/generate_wan.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ def run(config, pipeline=None, filename_prefix=""):
157157
lora_config = config.lora_config
158158

159159
if len(lora_config["lora_model_name_or_path"]) > 1:
160-
max_logging.warning("Found multiple LoRAs in config, but only loading the first one for WAN 2.2.")
160+
max_logging.log("Found multiple LoRAs in config, but only loading the first one for WAN 2.2.")
161161

162162
pipeline = lora_loader.load_lora_weights(
163163
pipeline,

src/maxdiffusion/loaders/wan_lora_nnx_loader.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ def load_lora_weights(
5151
)
5252
lora_nnx.merge_lora(pipeline.high_noise_transformer, h_state_dict, scale)
5353
else:
54-
max_logging.warning("high_noise_transformer not found or no weight name provided for LoRA.")
54+
max_logging.log("high_noise_transformer not found or no weight name provided for LoRA.")
5555

5656
# Handle low noise model
5757
if hasattr(pipeline, "low_noise_transformer") and low_noise_weight_name:
@@ -61,6 +61,6 @@ def load_lora_weights(
6161
)
6262
lora_nnx.merge_lora(pipeline.low_noise_transformer, l_state_dict, scale)
6363
else:
64-
max_logging.warning("low_noise_transformer not found or no weight name provided for LoRA.")
64+
max_logging.log("low_noise_transformer not found or no weight name provided for LoRA.")
6565

6666
return pipeline

src/maxdiffusion/models/lora_nnx.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -217,7 +217,7 @@ def merge_lora(model: nnx.Module, state_dict: dict, scale: float):
217217
lora_params[module_path_str] = {}
218218
lora_params[module_path_str][weight_type] = jnp.array(v)
219219
else:
220-
max_logging.warning(f"Could not parse LoRA key: {k}")
220+
max_logging.log(f"Could not parse LoRA key: {k}")
221221

222222
assigned_count = 0
223223
for path, module in nnx.iter_graph(model):
@@ -257,5 +257,5 @@ def merge_lora(model: nnx.Module, state_dict: dict, scale: float):
257257
f"with kernel_size {module.kernel_size} > 1 is not supported."
258258
)
259259
else:
260-
max_logging.warning(f"LoRA weights for {matched_key} incomplete.")
260+
max_logging.log(f"LoRA weights for {matched_key} incomplete.")
261261
max_logging.log(f"Merged weights into {assigned_count} layers in {type(model).__name__}.")

0 commit comments

Comments
 (0)