Skip to content

Commit

Permalink
Merge pull request #136 from Haidra-Org/main
Browse files Browse the repository at this point in the history
feat: comfy version c78214, extra logging, supports ComfyUI CLI args
  • Loading branch information
tazlin authored Dec 28, 2023
2 parents 493c203 + 5089ac2 commit 77bad29
Show file tree
Hide file tree
Showing 5 changed files with 116 additions and 7 deletions.
57 changes: 55 additions & 2 deletions hordelib/comfy_horde.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import copy
import gc
import glob
import hashlib
import json
import os
import re
Expand Down Expand Up @@ -82,6 +83,8 @@
_comfy_cleanup_models: types.FunctionType
_comfy_soft_empty_cache: types.FunctionType

_comfy_recursive_output_delete_if_changed: types.FunctionType

_canny: types.ModuleType
_hed: types.ModuleType
_leres: types.ModuleType
Expand All @@ -93,25 +96,45 @@


# isort: off
def do_comfy_import():
def do_comfy_import(force_normal_vram_mode: bool = False, extra_comfyui_args: list[str] | None = None) -> None:
global _comfy_current_loaded_models
global _comfy_load_models_gpu
global _comfy_nodes, _comfy_PromptExecutor, _comfy_validate_prompt
global _comfy_recursive_output_delete_if_changed
global _comfy_folder_names_and_paths, _comfy_supported_pt_extensions
global _comfy_load_checkpoint_guess_config
global _comfy_get_torch_device, _comfy_get_free_memory, _comfy_get_total_memory
global _comfy_load_torch_file, _comfy_model_loading
global _comfy_free_memory, _comfy_cleanup_models, _comfy_soft_empty_cache
global _canny, _hed, _leres, _midas, _mlsd, _openpose, _pidinet, _uniformer

logger.info("Disabling smart memory")

sys.argv.append("--disable-smart-memory")

if force_normal_vram_mode:
logger.info("Forcing normal vram mode")
sys.argv.append("--normalvram")

if extra_comfyui_args is not None:
sys.argv.extend(extra_comfyui_args)

# Note these imports are intentionally somewhat obfuscated as a reminder to other modules
# that they should never call through this module into comfy directly. All calls into
# comfy should be abstracted through functions in this module.
output_collector = OutputCollector()
with contextlib.redirect_stdout(output_collector), contextlib.redirect_stderr(output_collector):
from comfy.options import enable_args_parsing

enable_args_parsing()
import execution
from execution import nodes as _comfy_nodes
from execution import PromptExecutor as _comfy_PromptExecutor
from execution import validate_prompt as _comfy_validate_prompt
from execution import recursive_output_delete_if_changed

_comfy_recursive_output_delete_if_changed = recursive_output_delete_if_changed # type: ignore
execution.recursive_output_delete_if_changed = recursive_output_delete_if_changed_hijack
from folder_paths import folder_names_and_paths as _comfy_folder_names_and_paths # type: ignore
from folder_paths import supported_pt_extensions as _comfy_supported_pt_extensions # type: ignore
from comfy.sd import load_checkpoint_guess_config as _comfy_load_checkpoint_guess_config
Expand Down Expand Up @@ -147,7 +170,7 @@ def do_comfy_import():
# return torch.device("cpu")

# comfy.model_management.unet_inital_load_device = always_cpu
comfy.model_management.DISABLE_SMART_MEMORY = True
# comfy.model_management.DISABLE_SMART_MEMORY = True
# comfy.model_management.lowvram_available = True

# comfy.model_management.unet_offload_device = _unet_offload_device_hijack
Expand All @@ -165,6 +188,36 @@ def do_comfy_import():

# isort: on

_last_pipeline_settings_hash = ""


def recursive_output_delete_if_changed_hijack(prompt: dict, old_prompt, outputs, current_item):
global _last_pipeline_settings_hash
if current_item == "prompt":
pipeline_settings_hash = hashlib.md5(json.dumps(prompt).encode("utf-8")).hexdigest()
logger.debug(f"pipeline_settings_hash: {pipeline_settings_hash}")

if pipeline_settings_hash != _last_pipeline_settings_hash:
_last_pipeline_settings_hash = pipeline_settings_hash
logger.debug("Pipeline settings changed")

if old_prompt:
old_pipeline_settings_hash = hashlib.md5(json.dumps(old_prompt).encode("utf-8")).hexdigest()
logger.debug(f"old_pipeline_settings_hash: {old_pipeline_settings_hash}")
if pipeline_settings_hash != old_pipeline_settings_hash:
logger.debug("Pipeline settings changed from old_prompt")

if current_item == "prompt" or current_item == "negative_prompt":
try:
prompt_text = prompt[current_item]["inputs"]["text"]
prompt_hash = hashlib.md5(prompt_text.encode("utf-8")).hexdigest()
logger.debug(f"{current_item} hash: {prompt_hash}")
except KeyError:
pass

global _comfy_recursive_output_delete_if_changed
return _comfy_recursive_output_delete_if_changed(prompt, old_prompt, outputs, current_item)


def cleanup():
_comfy_soft_empty_cache()
Expand Down
2 changes: 1 addition & 1 deletion hordelib/consts.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from hordelib.config_path import get_hordelib_path

COMFYUI_VERSION = "e1345473413cafdd91e7af702b8950ed54d0556d"
COMFYUI_VERSION = "c782144433e41c21ae2dfd75d0bc28255d2e966d"
"""The exact version of ComfyUI version to load."""

REMOTE_PROXY = ""
Expand Down
21 changes: 19 additions & 2 deletions hordelib/initialisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,21 @@ def initialise(
clear_logs=False,
logging_verbosity=3,
process_id: int | None = None,
): # XXX # TODO Do we need `model_managers_to_load`?
force_normal_vram_mode: bool = True,
extra_comfyui_args: list[str] | None = None,
):
"""Initialise hordelib. This is required before using any other hordelib functions.
Args:
setup_logging (bool | None, optional): Whether to use hordelib's loguru logging system. Defaults to True.
clear_logs (bool, optional): Whether logs should be purged when loading loguru. Defaults to False.
logging_verbosity (int, optional): The message level of logging. Defaults to 3.
process_id (int | None, optional): If this is is being used in a child processes, the identifier. \
Defaults to None.
force_low_vram (bool, optional): Whether to forcibly disable ComfyUI's high/med vram modes. Defaults to False.
extra_comfyui_args (list[str] | None, optional): Any additional CLI args for comfyui that should be used. \
Defaults to None.
"""
global _is_initialised

# Wipe existing logs if requested
Expand Down Expand Up @@ -58,7 +72,10 @@ def initialise(

import hordelib.comfy_horde

hordelib.comfy_horde.do_comfy_import()
hordelib.comfy_horde.do_comfy_import(
force_normal_vram_mode=force_normal_vram_mode,
extra_comfyui_args=extra_comfyui_args,
)

vram_on_start_free = hordelib.comfy_horde.get_torch_free_vram_mb()
vram_total = hordelib.comfy_horde.get_torch_total_vram_mb()
Expand Down
Binary file modified images_expected/sampler_30_steps_k_dpmpp_sde.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
43 changes: 41 additions & 2 deletions tests/test_horde_samplers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@

from hordelib.horde import HordeLib

from .testing_shared_functions import check_single_inference_image_similarity
from .testing_shared_functions import check_single_inference_image_similarity, check_single_lora_image_similarity

SLOW_SAMPLERS = ["k_dpmpp_2s_a", "k_dpmpp_sde", "k_heun", "k_dpm_2", "k_dpm_2_a"]
SLOW_SAMPLERS = ["k_dpmpp_2s_a", "k_heun", "k_dpm_2", "k_dpm_2_a"] # "k_dpmpp_sde",


class TestHordeSamplers:
Expand Down Expand Up @@ -42,6 +42,45 @@ def test_ddim_sampler(
pil_image = hordelib_instance.basic_inference_single_image(data).image
assert pil_image is not None

def test_k_dpmpp_sde_sampler(
self,
stable_diffusion_model_name_for_testing: str,
hordelib_instance: HordeLib,
):
data = {
"sampler_name": "k_dpmpp_sde",
"cfg_scale": 6.5,
"denoising_strength": 1.0,
"seed": 3688490319,
"height": 512,
"width": 512,
"karras": False,
"tiling": False,
"hires_fix": False,
"clip_skip": 1,
"control_type": None,
"image_is_control": False,
"return_control_map": False,
"prompt": (
"a woman closeup made out of metal, (cyborg:1.1), realistic skin, (detailed wire:1.3), "
"(intricate details), hdr, (intricate details, hyperdetailed:1.2), cinematic shot, "
"vignette, centered"
),
"ddim_steps": 30,
"n_iter": 1,
"model": stable_diffusion_model_name_for_testing,
}
pil_image = hordelib_instance.basic_inference_single_image(data).image
assert pil_image is not None

img_filename = "sampler_30_steps_k_dpmpp_sde.png"
pil_image.save(f"images/{img_filename}", quality=100)

assert check_single_lora_image_similarity(
f"images_expected/{img_filename}",
pil_image,
)

def test_samplers(
self,
stable_diffusion_model_name_for_testing: str,
Expand Down

0 comments on commit 77bad29

Please sign in to comment.