From 13a965a26c09d6ce3442b431bb6286d8596c74d3 Mon Sep 17 00:00:00 2001 From: Jaret Burkett Date: Sun, 18 Aug 2024 09:33:31 -0600 Subject: [PATCH] Fixed bad key naming on lora fuse I just pushed --- toolkit/stable_diffusion_model.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/toolkit/stable_diffusion_model.py b/toolkit/stable_diffusion_model.py index d71a5f01..a5c2eefe 100644 --- a/toolkit/stable_diffusion_model.py +++ b/toolkit/stable_diffusion_model.py @@ -545,11 +545,9 @@ def load_model(self): double_block_key = "transformer.transformer_blocks." for key, value in lora_state_dict.items(): if single_block_key in key: - new_key = key.replace(single_block_key, "") - single_transformer_lora[new_key] = value + single_transformer_lora[key] = value elif double_block_key in key: - new_key = key.replace(double_block_key, "") - double_transformer_lora[new_key] = value + double_transformer_lora[key] = value else: raise ValueError(f"Unknown lora key: {key}. Cannot load this lora in low vram mode")