diff --git a/hordelib/nodes/comfyui_layerdiffuse/lib_layerdiffusion/attention_sharing.py b/hordelib/nodes/comfyui_layerdiffuse/lib_layerdiffusion/attention_sharing.py index a511f745..c707a949 100644 --- a/hordelib/nodes/comfyui_layerdiffuse/lib_layerdiffusion/attention_sharing.py +++ b/hordelib/nodes/comfyui_layerdiffuse/lib_layerdiffusion/attention_sharing.py @@ -296,7 +296,7 @@ def __init__(self, layer_list): class AttentionSharingPatcher(torch.nn.Module): def __init__(self, unet, frames=2, use_control=True, rank=256): super().__init__() - model_management.unload_model_clones(unet) + # model_management.unload_model_clones(unet) # this is now handled implicitly in comfyui units = [] for i in range(32):