Skip to content

Commit 06f85e2

Browse files
authored
Fix text encoder lora loading for wrapped models (Comfy-Org#12852)
1 parent e4b0bb8 commit 06f85e2

1 file changed

Lines changed: 3 additions & 0 deletions

File tree

comfy/lora.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,9 @@ def model_lora_keys_clip(model, key_map={}):
9999
for k in sdk:
100100
if k.endswith(".weight"):
101101
key_map["text_encoders.{}".format(k[:-len(".weight")])] = k #generic lora format without any weird key names
102+
tp = k.find(".transformer.") #also map without wrapper prefix for composite text encoder models
103+
if tp > 0 and not k.startswith("clip_"):
104+
key_map["text_encoders.{}".format(k[tp + 1:-len(".weight")])] = k
102105

103106
text_model_lora_key = "lora_te_text_model_encoder_layers_{}_{}"
104107
clip_l_present = False

0 commit comments

Comments
 (0)