Skip to content

Commit e89b229

Browse files
authored
Support ModelScope-Trainer/DiffSynth LoRA format for Flux.2 Klein models (Comfy-Org#12042)
1 parent 55bd606 commit e89b229

File tree

1 file changed

+1
-0
lines changed

1 file changed

+1
-0
lines changed

comfy/lora.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -260,6 +260,7 @@ def model_lora_keys_unet(model, key_map={}):
260260
key_map["transformer.{}".format(k[:-len(".weight")])] = to #simpletrainer and probably regular diffusers flux lora format
261261
key_map["lycoris_{}".format(k[:-len(".weight")].replace(".", "_"))] = to #simpletrainer lycoris
262262
key_map["lora_transformer_{}".format(k[:-len(".weight")].replace(".", "_"))] = to #onetrainer
263+
key_map[k[:-len(".weight")]] = to #DiffSynth lora format
263264
for k in sdk:
264265
hidden_size = model.model_config.unet_config.get("hidden_size", 0)
265266
if k.endswith(".weight") and ".linear1." in k:

0 commit comments

Comments
 (0)