Skip to content

Commit 717d6e4

Browse files
committed
[lora_conversion] Enhance key handling for OneTrainer components in LORA conversion utility (#11441)
1 parent 8c661ea commit 717d6e4

File tree

1 file changed

+18
-1
lines changed

1 file changed

+18
-1
lines changed

src/diffusers/loaders/lora_conversion_utils.py

+18-1
Original file line numberDiff line numberDiff line change
@@ -727,8 +727,25 @@ def _convert(original_key, diffusers_key, state_dict, new_state_dict):
727727
elif k.startswith("lora_te1_"):
728728
has_te_keys = True
729729
continue
730+
elif k.startswith("lora_transformer_context_embedder"):
731+
diffusers_key = "context_embedder"
732+
elif k.startswith("lora_transformer_norm_out_linear"):
733+
diffusers_key = "norm_out.linear"
734+
elif k.startswith("lora_transformer_proj_out"):
735+
diffusers_key = "proj_out"
736+
elif k.startswith("lora_transformer_x_embedder"):
737+
diffusers_key = "x_embedder"
738+
elif k.startswith("lora_transformer_time_text_embed_guidance_embedder_linear_"):
739+
i = int(k.split("lora_transformer_time_text_embed_guidance_embedder_linear_")[-1])
740+
diffusers_key = f"time_text_embed.guidance_embedder.linear_{i}"
741+
elif k.startswith("lora_transformer_time_text_embed_text_embedder_linear_"):
742+
i = int(k.split("lora_transformer_time_text_embed_text_embedder_linear_")[-1])
743+
diffusers_key = f"time_text_embed.text_embedder.linear_{i}"
744+
elif k.startswith("lora_transformer_time_text_embed_timestep_embedder_linear_"):
745+
i = int(k.split("lora_transformer_time_text_embed_timestep_embedder_linear_")[-1])
746+
diffusers_key = f"time_text_embed.timestep_embedder.linear_{i}"
730747
else:
731-
raise NotImplementedError
748+
raise NotImplementedError(k)
732749

733750
if "attn_" in k:
734751
if "_to_out_0" in k:

0 commit comments

Comments
 (0)