Skip to content

Commit 95e1059

Browse files
authored
fix(ace15): handle missing lm_metadata in memory estimation during checkpoint export Comfy-Org#12669 (Comfy-Org#12686)
1 parent 80d4944 commit 95e1059

1 file changed

Lines changed: 2 additions & 2 deletions

File tree

comfy/text_encoders/ace15.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -328,14 +328,14 @@ def load_sd(self, sd):
328328
return getattr(self, self.lm_model).load_sd(sd)
329329

330330
def memory_estimation_function(self, token_weight_pairs, device=None):
331-
lm_metadata = token_weight_pairs["lm_metadata"]
331+
lm_metadata = token_weight_pairs.get("lm_metadata", {})
332332
constant = self.constant
333333
if comfy.model_management.should_use_bf16(device):
334334
constant *= 0.5
335335

336336
token_weight_pairs = token_weight_pairs.get("lm_prompt", [])
337337
num_tokens = sum(map(lambda a: len(a), token_weight_pairs))
338-
num_tokens += lm_metadata['min_tokens']
338+
num_tokens += lm_metadata.get("min_tokens", 0)
339339
return num_tokens * constant * 1024 * 1024
340340

341341
def te(dtype_llama=None, llama_quantization_metadata=None, lm_model="qwen3_2b"):

0 commit comments

Comments
 (0)