diff --git a/ConfigSlider.gd b/ConfigSlider.gd
index 53cb6ca..ca496e7 100644
--- a/ConfigSlider.gd
+++ b/ConfigSlider.gd
@@ -76,6 +76,7 @@ func _ready():
ParamBus.connect("models_changed",self,"_on_models_changed")
# warning-ignore:return_value_discarded
EventBus.connect("kudos_calculated", self, "_on_kudos_calculated")
+ ParamBus.connect("params_changed",self,"_on_params_changed")
func set_value(value) -> void:
$"%HSlider".value = value
@@ -86,12 +87,21 @@ func set_max_value(max_value) -> void:
$"%ConfigValue".text = str(max_value)
$"%HSlider".max_value = max_value
+func set_upfront_limit(_upfront_limit) -> void:
+ upfront_limit = _upfront_limit
+ if not globals.config.get_value("Options", "larger_values", false):
+ $"%HSlider".max_value = upfront_limit
+
+func reset_upfront_limit() -> void:
+ if not CONFIG[config_setting].has('upfront_limit'):
+ return
+ upfront_limit = CONFIG[config_setting].upfront_limit
+ if not globals.config.get_value("Options", "larger_values", false):
+ $"%HSlider".max_value = upfront_limit
+
func reset_max_value() -> void:
$"%HSlider".max_value = CONFIG[config_setting].max
- if CONFIG[config_setting].has('upfront_limit'):
- upfront_limit = CONFIG[config_setting].upfront_limit
- if not globals.config.get_value("Options", "larger_values", false):
- $"%HSlider".max_value = CONFIG[config_setting].upfront_limit
+ reset_upfront_limit()
func _on_HSlider_drag_ended(value_changed):
if not value_changed:
@@ -126,6 +136,7 @@ func _on_setting_changed(setting_name):
$"%HSlider".max_value = CONFIG[config_setting].max
else:
$"%HSlider".max_value = CONFIG[config_setting].upfront_limit
+ _on_params_changed()
# Only called for width/height changes
func _on_config_slider_changed() -> void:
@@ -135,7 +146,7 @@ func _on_config_slider_changed() -> void:
else:
config_value.modulate = Color(1,1,1)
$"%HSlider".modulate = Color(1,1,1)
-
+
# Only called for width/height changes
func _on_wh_changed(sister_slider) -> void:
stored_sister_slider = sister_slider
@@ -146,8 +157,12 @@ func _on_wh_changed(sister_slider) -> void:
upfront_limit = 576
if "stable diffusion 2" in baselines:
upfront_limit = 768
- if "SDXL" in baselines:
+ if "stable_diffusion_xl" in baselines:
upfront_limit = 1024
+ if not globals.config.get_value("Options", "larger_values", false):
+ $"%HSlider".max_value = upfront_limit
+ if int(config_value.text) > upfront_limit:
+ config_value.text = str(upfront_limit)
if sister_slider.h_slider.value * h_slider.value > upfront_limit * upfront_limit and globals.user_kudos < generation_kudos:
for n in [sister_slider, self]:
n.config_value.modulate = Color(1,0,0)
@@ -156,7 +171,7 @@ func _on_wh_changed(sister_slider) -> void:
for n in [sister_slider, self]:
n.config_value.modulate = Color(1,1,1)
n.h_slider.modulate = Color(1,1,1)
-
+
func _on_models_changed(_models) -> void:
if not stored_sister_slider:
@@ -169,3 +184,28 @@ func _on_kudos_calculated(kudos) -> void:
_on_wh_changed(stored_sister_slider)
else:
_on_config_slider_changed()
+
+func _on_params_changed() -> void:
+ if config_setting == "steps":
+ if ParamBus.has_controlnet() or ParamBus.is_lcm_payload():
+ if ParamBus.is_lcm_payload():
+ # Protect the user a bit during switching to LCM
+ if h_slider.value > 40:
+ h_slider.value = 8
+ set_upfront_limit(10)
+ else:
+ reset_upfront_limit()
+ if h_slider.value > 40:
+ h_slider.value = 20
+ set_max_value(40)
+ else:
+ reset_max_value()
+ if config_setting == "cfg_scale":
+ if ParamBus.is_lcm_payload():
+ # Protect the user a bit
+ if h_slider.value > 4:
+ h_slider.value = 2
+ set_max_value(4)
+ else:
+ reset_max_value()
+
diff --git a/ConfigSlider.tscn b/ConfigSlider.tscn
index b0088df..57eb9ff 100644
--- a/ConfigSlider.tscn
+++ b/ConfigSlider.tscn
@@ -35,7 +35,7 @@ align = 2
unique_name_in_owner = true
margin_top = 24.0
margin_right = 300.0
-margin_bottom = 40.0
+margin_bottom = 36.0
mouse_filter = 1
min_value = 1.0
max_value = 20.0
diff --git a/LucidCreations.tscn b/LucidCreations.tscn
index c39ea89..0f56661 100644
--- a/LucidCreations.tscn
+++ b/LucidCreations.tscn
@@ -785,7 +785,7 @@ custom_styles/panel = ExtResource( 39 )
margin_left = 10.0
margin_top = 10.0
margin_right = 614.0
-margin_bottom = 483.0
+margin_bottom = 515.0
[node name="LoraInfoLabel" type="RichTextLabel" parent="Margin/Panel/Display/Panels/Controls/Basic/Lora/SelectedLoras/LoraInfoCard/VBoxContainer"]
unique_name_in_owner = true
@@ -823,6 +823,24 @@ margin_right = 604.0
margin_bottom = 169.0
slider_name = "Clip Strength"
+[node name="LoraVersions" type="HBoxContainer" parent="Margin/Panel/Display/Panels/Controls/Basic/Lora/SelectedLoras/LoraInfoCard/VBoxContainer"]
+margin_right = 40.0
+margin_bottom = 40.0
+
+[node name="LoraVersionName" type="Label" parent="Margin/Panel/Display/Panels/Controls/Basic/Lora/SelectedLoras/LoraInfoCard/VBoxContainer/LoraVersions"]
+margin_right = 40.0
+margin_bottom = 20.0
+text = "Version: "
+
+[node name="LoraVersionSelection" type="MenuButton" parent="Margin/Panel/Display/Panels/Controls/Basic/Lora/SelectedLoras/LoraInfoCard/VBoxContainer/LoraVersions"]
+unique_name_in_owner = true
+margin_left = -118.0
+margin_top = -10.0
+margin_right = -98.0
+margin_bottom = 18.0
+toggle_mode = false
+flat = false
+
[node name="HBoxContainer" type="HBoxContainer" parent="Margin/Panel/Display/Panels/Controls/Basic/Lora/SelectedLoras/LoraInfoCard/VBoxContainer"]
margin_top = 173.0
margin_right = 604.0
@@ -1142,7 +1160,7 @@ margin_top = 218.0
margin_right = 300.0
margin_bottom = 246.0
text = "k_lms"
-items = [ "k_lms", null, false, 0, null, "k_heun", null, false, 1, null, "k_euler", null, false, 2, null, "k_euler_a", null, false, 3, null, "k_dpm_2", null, false, 4, null, "k_dpm_2_a", null, false, 5, null, "k_dpm_fast", null, false, 6, null, "k_dpm_adaptive", null, false, 7, null, "k_dpmpp_2s_a", null, false, 8, null, "k_dpmpp_2m", null, false, 9, null, "dpmsolver", null, false, 10, null, "k_dpmpp_sde", null, false, 11, null ]
+items = [ "k_lms", null, false, 0, null, "k_heun", null, false, 1, null, "k_euler", null, false, 2, null, "k_euler_a", null, false, 3, null, "k_dpm_2", null, false, 4, null, "k_dpm_2_a", null, false, 5, null, "k_dpm_fast", null, false, 6, null, "k_dpm_adaptive", null, false, 7, null, "k_dpmpp_2s_a", null, false, 8, null, "k_dpmpp_2m", null, false, 9, null, "k_dpmpp_sde", null, false, 10, null, "dpmsolver", null, false, 11, null, "lcm", null, false, 12, null ]
selected = 0
[node name="SeedLabel" type="Label" parent="Margin/Panel/Display/Panels/Controls/Advanced"]
diff --git a/Model.gd b/Model.gd
index 51cda8f..9611ef9 100644
--- a/Model.gd
+++ b/Model.gd
@@ -185,7 +185,7 @@ func _get_selected_models() -> Array:
if model_name == "SDXL_beta::stability.ai#6901":
model_defs.append({
"name": "SDXL_beta::stability.ai#6901",
- "baseline": "SDXL",
+ "baseline": "stable_diffusion_xl",
"type": "SDXL",
"version": "beta",
})
diff --git a/StableHordeClient.gd b/StableHordeClient.gd
index 3503622..7178019 100644
--- a/StableHordeClient.gd
+++ b/StableHordeClient.gd
@@ -109,23 +109,35 @@ func _ready():
stable_horde_client.aihorde_url = globals.aihorde_url
_connect_hover_signals()
+ # warning-ignore:return_value_discarded
save.connect("pressed", self, "_on_save_pressed")
+ # warning-ignore:return_value_discarded
save_all.connect("pressed", self, "_on_save_all_pressed")
load_from_disk.connect("gensettings_loaded", self, "_on_load_from_disk_gensettings_loaded")
# warning-ignore:return_value_discarded
generate_button.connect("pressed",self,"_on_GenerateButton_pressed")
# warning-ignore:return_value_discarded
cancel_button.connect("pressed",self,"_on_CancelButton_pressed")
+ # warning-ignore:return_value_discarded
model.connect("prompt_inject_requested",self,"_on_prompt_inject")
+ # warning-ignore:return_value_discarded
lora.connect("prompt_inject_requested",self,"_on_prompt_inject")
+ # warning-ignore:return_value_discarded
ti.connect("prompt_inject_requested",self,"_on_prompt_inject")
# Ratings
+ # warning-ignore:return_value_discarded
EventBus.connect("shared_toggled", self, "_on_shared_toggled")
+ # warning-ignore:return_value_discarded
best_of.connect("toggled",self,"on_bestof_toggled")
+ # warning-ignore:return_value_discarded
aesthetic_rating.connect("item_selected",self,"on_aethetic_rating_selected")
+ # warning-ignore:return_value_discarded
artifacts_rating.connect("item_selected",self,"on_artifacts_rating_selected")
+ # warning-ignore:return_value_discarded
submit_ratings.connect("pressed", self, "_on_submit_ratings_pressed")
+ # warning-ignore:return_value_discarded
stable_horde_rate_generation.connect("generation_rated",self, "_on_generation_rated")
+ # warning-ignore:return_value_discarded
stable_horde_rate_generation.connect("request_failed",self, "_on_generation_rating_failed")
nsfw.connect("toggled", self,"_on_nsfw_toggled")
_on_shared_toggled()
@@ -148,7 +160,6 @@ func _ready():
_on_SamplerMethod_item_selected(sampler_method_id)
var control_type_id = stable_horde_client.get_control_type_id()
control_type.select(control_type_id)
- _on_ControlType_item_selected(control_type_id)
# The stable horde client is set from the Parameters settings
options.set_api_key(stable_horde_client.api_key)
options.login()
@@ -287,7 +298,6 @@ func _on_image_process_update(stats: Dictionary) -> void:
status_text.bbcode_text = "Your queue position is {queue}.".format({"queue":stats.queue_position})
status_text.modulate = Color(0,1,0)
-
func _on_viewport_resized() -> void:
# Disabling now with the tabs
# return
@@ -485,14 +495,7 @@ func _on_SamplerMethod_item_selected(index: int) -> void:
else:
steps_slider.h_slider.editable = true
steps_slider.config_value.text = str(steps_slider.h_slider.value)
-
-func _on_ControlType_item_selected(index: int) -> void:
- # Adaptive doesn't have steps
- if index != 0:
- steps_slider.set_max_value(40)
- else:
- steps_slider.reset_max_value()
-
+
func _connect_hover_signals() -> void:
for node in [
negative_prompt,
@@ -725,7 +728,6 @@ func _on_load_from_disk_gensettings_loaded(settings) -> void:
for idx in range(control_type.get_item_count()):
if control_type.get_item_text(idx) == settings["control_type"]:
control_type.select(idx)
- _on_ControlType_item_selected(idx)
if settings.has("source_image_path"):
if image_preview.load_image_from_path(settings["source_image_path"]):
stable_horde_client.source_image = image_preview.source_image
diff --git a/addons/stable_horde_client/civitai_lora_model_fetch.gd b/addons/stable_horde_client/civitai_lora_model_fetch.gd
index d4c5749..406b4db 100644
--- a/addons/stable_horde_client/civitai_lora_model_fetch.gd
+++ b/addons/stable_horde_client/civitai_lora_model_fetch.gd
@@ -42,11 +42,8 @@ func process_request(json_ret) -> void:
func _parse_civitai_lora_data(civitai_entry) -> Dictionary:
var lora_details = {
"name": civitai_entry["name"],
- "id": int(civitai_entry["id"]),
+ "id": str(civitai_entry["id"]),
"description": civitai_entry["description"],
- "unusable": '',
- "nsfw": civitai_entry["nsfw"],
- "sha256": null,
}
if not lora_details["description"]:
lora_details["description"] = ''
@@ -87,31 +84,38 @@ func _parse_civitai_lora_data(civitai_entry) -> Dictionary:
lora_details["description"] = lora_details["description"].replace(repl,html_to_bbcode[repl])
if lora_details["description"].length() > 500:
lora_details["description"] = lora_details["description"].left(700) + ' [...]'
- var versions = civitai_entry.get("modelVersions", {})
+ var versions : Array = civitai_entry.get("modelVersions", {})
if versions.size() == 0:
return lora_details
- lora_details["triggers"] = versions[0]["trainedWords"]
- lora_details["version"] = versions[0]["name"]
- lora_details["base_model"] = versions[0]["baseModel"]
- for file in versions[0]["files"]:
- if not file.get("name", "").ends_with(".safetensors"):
- continue
- lora_details["size_mb"] = round(file["sizeKB"] / 1024)
- # We only store these two to check if they would be present in the workers
- lora_details["sha256"] = file.get("hashes", {}).get("SHA256")
- lora_details["url"] = file.get("downloadUrl", "")
- # If these two fields are not defined, the workers are not going to download it
- # so we ignore it as well
- var is_default = int(lora_details["id"]) in default_ids
- if not is_default and not lora_details["sha256"]:
- lora_details["unusable"] = 'Attention! This LoRa is unusable because it does not provide file validation.'
- elif not lora_details["url"]:
- lora_details["unusable"] = 'Attention! This LoRa is unusable because it appears to have no valid safetensors upload.'
- elif not is_default and lora_details["size_mb"] > 230:
- lora_details["unusable"] = 'Attention! This LoRa is unusable because is exceeds the max 230Mb filesize we allow on the AI Horde.'
- lora_details["images"] = []
- for img in versions[0]["images"]:
- if img["nsfw"] in ["Mature", "X"]:
- continue
- lora_details["images"].append(img["url"])
+ lora_details["versions"] = {}
+ for version in versions:
+ var version_id := str(version["id"])
+ var new_version := {
+ "unusable": '',
+ }
+ new_version["id"] = version_id
+ new_version["triggers"] = version["trainedWords"]
+ new_version["name"] = version["name"]
+ new_version["base_model"] = version["baseModel"]
+ for file in version["files"]:
+ if not file.get("name", "").ends_with(".safetensors"):
+ continue
+ new_version["size_mb"] = round(file["sizeKB"] / 1024)
+ # We only store these two to check if they would be present in the workers
+ new_version["sha256"] = file.get("hashes", {}).get("SHA256")
+ new_version["url"] = file.get("downloadUrl", "")
+ # If these two fields are not defined, the workers are not going to download it
+ # so we ignore it as well
+ if not new_version.get("sha256"):
+ new_version["unusable"] = 'Attention! This LoRa is unusable because it does not provide file validation.'
+ elif not new_version.get("url"):
+ new_version["unusable"] = 'Attention! This LoRa is unusable because it appears to have no valid safetensors upload.'
+ elif new_version["size_mb"] > 230 and not default_ids.has(lora_details["id"]):
+ new_version["unusable"] = 'Attention! This LoRa is unusable because is exceeds the max 230Mb filesize we allow on the AI Horde.'
+ new_version["images"] = []
+ for img in version["images"]:
+ if img["nsfw"] in ["Mature", "X"]:
+ continue
+ new_version["images"].append(img["url"])
+ lora_details["versions"][version_id] = new_version
return lora_details
diff --git a/addons/stable_horde_client/civitai_lora_reference.gd b/addons/stable_horde_client/civitai_lora_reference.gd
index a4f9616..dac5575 100644
--- a/addons/stable_horde_client/civitai_lora_reference.gd
+++ b/addons/stable_horde_client/civitai_lora_reference.gd
@@ -78,7 +78,8 @@ func fetch_lora_metadata(query) -> void:
# Function to overwrite to process valid return from the horde
func process_request(json_ret) -> void:
if typeof(json_ret) == TYPE_ARRAY:
- default_ids = json_ret
+ for id in json_ret:
+ default_ids.append(str(id))
fetch_lora_metadata(default_ids)
state = States.READY
return
@@ -91,11 +92,11 @@ func process_request(json_ret) -> void:
if not json_ret.has("items"):
# Quick hack to treat individual items the same way
json_ret["items"] = [json_ret]
- for entry in json_ret["items"]:
- if initialized:
- var lora = _parse_civitai_lora_data(entry)
- if lora.has("size_mb"):
- _store_lora(lora)
+# for entry in json_ret["items"]:
+# if initialized:
+# var lora = _parse_civitai_lora_data(entry)
+# if lora.has("versions"):
+# _store_lora(lora)
_store_to_file()
emit_signal("reference_retrieved", lora_reference)
initialized = true
@@ -116,20 +117,41 @@ func _on_lora_info_gathering_finished(fetch_node: CivitAIModelFetch) -> void:
emit_signal("reference_retrieved", lora_reference)
func is_lora(lora_name: String) -> bool:
- if lora_id_index.has(int(lora_name)):
+ if lora_id_index.has(lora_name):
+ return true
+ if _get_all_lora_ids().has(lora_name):
return true
return(lora_reference.has(lora_name))
-func get_lora_info(lora_name: String) -> Dictionary:
- if lora_id_index.has(int(lora_name)):
- return lora_reference[lora_id_index[int(lora_name)]]
+func get_lora_info(lora_name: String, is_version := false) -> Dictionary:
+ if is_version and lora_id_index.has(lora_name):
+ return lora_reference[lora_id_index[lora_name]]
+ var lora_ids := _get_all_lora_ids()
+ if lora_ids.has(lora_name):
+ return lora_reference[lora_ids[lora_name]]
return lora_reference.get(lora_name, {})
-func get_lora_name(lora_name: String) -> String:
- if lora_id_index.has(int(lora_name)):
- return lora_reference[lora_id_index[int(lora_name)]]["name"]
+func get_lora_name(lora_name: String, is_version := false) -> String:
+ if is_version and lora_id_index.has(lora_name):
+ return lora_reference[lora_id_index[lora_name]]["name"]
return lora_reference.get(lora_name, {}).get("name", 'N/A')
+func get_latest_version(lora_name: String) -> String:
+ var versions : Dictionary = lora_reference.get(lora_name, {}).get("versions", {})
+ if len(versions) == 0:
+ return "N/A"
+ var keys := []
+ for k in versions.keys():
+ keys.append(int(k))
+ keys.sort()
+ return str(keys.back())
+
+func _get_all_lora_ids() -> Dictionary:
+ var all_l_id = {}
+ for l in lora_reference.values():
+ all_l_id[l['id']] = l['name']
+ return all_l_id
+
func _store_to_file() -> void:
var file = File.new()
file.open("user://civitai_lora_reference", File.WRITE)
@@ -140,10 +162,14 @@ func _load_from_file() -> void:
var file = File.new()
file.open("user://civitai_lora_reference", File.READ)
var filevar = file.get_var()
+ var old_reference: Dictionary
if filevar:
- lora_reference = filevar
- for lora in lora_reference.values():
- lora_id_index[int(lora["id"])] = lora["name"]
+ old_reference = filevar
+ for lora in old_reference.values():
+ if not lora.has("versions"):
+ continue
+ for version_id in lora["versions"].keys():
+ lora_id_index[version_id] = lora["name"]
lora["cached"] = true
# Temporary while changing approach
var unusable = lora.get("unusable", false)
@@ -151,6 +177,7 @@ func _load_from_file() -> void:
lora["unusable"] = 'Attention! This LoRa is unusable because it does not provide file validation.'
elif typeof(unusable) == TYPE_BOOL:
lora["unusable"] = ''
+ lora_reference[lora["name"]] = lora
file.close()
emit_signal("reference_retrieved", lora_reference)
@@ -162,87 +189,14 @@ func calculate_downloaded_loras() -> int:
total_size += self.lora_reference[lora]["size_mb"]
return total_size
-func _parse_civitai_lora_data(civitai_entry) -> Dictionary:
- var lora_details = {
- "name": civitai_entry["name"],
- "id": int(civitai_entry["id"]),
- "description": civitai_entry["description"],
- "unusable": '',
- }
- if not lora_details["description"]:
- lora_details["description"] = ''
- var html_to_bbcode = {
- "
": '',
- "
": '\n',
- "": '[/b]',
- "": '[b]',
- "": '[/b]',
- "": '[b]',
- "": '[/i]',
- "": '[i]',
- "": '[/i]',
- "": '[i]',
- "
": '\n',
- "
": '\n',
- "
": '\n',
- "": '[b][color=yellow]',
- "
": '[/color][/b]\n',
- "": '[b]',
- "
": '[/b]\n',
- "": '',
- "
": '',
- "": '[u]',
- "": '[/u]',
- "": '[code]',
- "
": '[/code]',
- "": '[/ul]',
- "": '[ol]',
- "
": '[/ol]',
- "": '',
- "": '\n',
- "<": '<',
- ">": '>',
- }
- for repl in html_to_bbcode:
- lora_details["description"] = lora_details["description"].replace(repl,html_to_bbcode[repl])
- if lora_details["description"].length() > 500:
- lora_details["description"] = lora_details["description"].left(700) + ' [...]'
- var versions = civitai_entry.get("modelVersions", {})
- if versions.size() == 0:
- return lora_details
- lora_details["triggers"] = versions[0]["trainedWords"]
- lora_details["version"] = versions[0]["name"]
- lora_details["base_model"] = versions[0]["baseModel"]
- for file in versions[0]["files"]:
- if not file.get("name", "").ends_with(".safetensors"):
- continue
- lora_details["size_mb"] = round(file["sizeKB"] / 1024)
- # We only store these two to check if they would be present in the workers
- lora_details["sha256"] = file.get("hashes", {}).get("SHA256")
- lora_details["url"] = file.get("downloadUrl", "")
- # If these two fields are not defined, the workers are not going to download it
- # so we ignore it as well
- if not lora_details["sha256"]:
- lora_details["unusable"] = 'Attention! This LoRa is unusable because it does not provide file validation.'
- elif not lora_details["url"]:
- lora_details["unusable"] = 'Attention! This LoRa is unusable because it appears to have no valid safetensors upload.'
- elif lora_details["size_mb"] > 230:
- lora_details["unusable"] = 'Attention! This LoRa is unusable because is exceeds the max 230Mb filesize we allow on the AI Horde.'
- lora_details["images"] = []
- for img in versions[0]["images"]:
- if img["nsfw"] in ["Mature", "X"]:
- continue
- lora_details["images"].append(img["url"])
- return lora_details
-
func set_nsfw(value) -> void:
nsfw = value
func _store_lora(lora_data: Dictionary) -> void:
var lora_name = lora_data["name"]
lora_reference[lora_name] = lora_data
- lora_id_index[int(lora_data["id"])] = lora_name
+ for version_id in lora_data.get("versions", {}).keys():
+ lora_id_index[version_id] = lora_name
func wipe_cache() -> void:
var dir = Directory.new()
diff --git a/addons/stable_horde_client/civitai_showcase.gd b/addons/stable_horde_client/civitai_showcase.gd
index 556fc6f..fac91ab 100644
--- a/addons/stable_horde_client/civitai_showcase.gd
+++ b/addons/stable_horde_client/civitai_showcase.gd
@@ -2,10 +2,12 @@ class_name CivitAIShowcase
extends HTTPRequest
signal showcase_retrieved(img, model_name)
+signal showcase_failed
var model_reference := {}
var texture: ImageTexture
var model_name: String
+var used_image_index: int
export(int) var showcase_index := 0
func _ready():
@@ -13,11 +15,20 @@ func _ready():
timeout = 2
connect("request_completed",self,"_on_request_completed")
-func get_model_showcase(_model_reference) -> void:
+func get_model_showcase(
+ _model_reference: Dictionary,
+ version_id: String,
+ force_index = null
+ ) -> void:
+ if force_index:
+ used_image_index = force_index
+ else:
+ used_image_index = showcase_index
model_reference = _model_reference
- if model_reference["images"].size() <= showcase_index:
+ if model_reference["versions"][version_id]["images"].size() <= used_image_index:
+ emit_signal("showcase_failed")
return
- var showcase_url = model_reference["images"][showcase_index]
+ var showcase_url = model_reference["versions"][version_id]["images"][used_image_index]
var error = request(showcase_url, [], false, HTTPClient.METHOD_GET)
if error != OK:
var error_msg := "Something went wrong when initiating the request"
@@ -29,20 +40,25 @@ func get_model_showcase(_model_reference) -> void:
func _on_request_completed(_result, response_code, _headers, body):
if response_code == 0:
var error_msg := "Model showcase address cannot be resolved!"
+ emit_signal("showcase_failed")
push_error(error_msg)
return
if response_code == 404:
var error_msg := "Bad showcase URL. Please contact the developer of this addon"
+ emit_signal("showcase_failed")
push_error(error_msg)
return
var image = Image.new()
var image_error = image.load_webp_from_buffer(body)
if image_error != OK:
- image_error = image.load_jpg_from_buffer(body)
+ image_error = image.load_png_from_buffer(body)
if image_error != OK:
- var error_msg := "Download showcase image could not be loaded. Please contact the developer of this addon."
- push_error(error_msg)
- return
+ image_error = image.load_jpg_from_buffer(body)
+ if image_error != OK:
+ var error_msg := "Download showcase image could not be loaded. Please contact the developer of this addon."
+ emit_signal("showcase_failed")
+ push_error(error_msg)
+ return
texture = ImageTexture.new()
texture.create_from_image(image)
emit_signal("showcase_retrieved",texture,model_name)
diff --git a/addons/stable_horde_client/stable_horde_client.gd b/addons/stable_horde_client/stable_horde_client.gd
index b4ce5b0..02f8760 100644
--- a/addons/stable_horde_client/stable_horde_client.gd
+++ b/addons/stable_horde_client/stable_horde_client.gd
@@ -18,6 +18,7 @@ enum SamplerMethods {
k_dpmpp_2m
k_dpmpp_sde
dpmsolver
+ lcm
}
enum ControlTypes {
@@ -56,7 +57,7 @@ export(int,64,1024,64) var height := 512
# Generally there's usually no reason to go above 50 unless you know what you're doing.
export(int,1,100) var steps := 30
# Advanced: The sampler used to generate. Provides slight variations on the same prompt.
-export(String, "k_lms", "k_heun", "k_euler", "k_euler_a", "k_dpm_2", "k_dpm_2_a", "k_dpm_fast", "k_dpm_adaptive", "k_dpmpp_2s_a", "k_dpmpp_2m", "k_dpmpp_sde", "dpmsolver") var sampler_name := "k_euler_a"
+export(String, "k_lms", "k_heun", "k_euler", "k_euler_a", "k_dpm_2", "k_dpm_2_a", "k_dpm_fast", "k_dpm_adaptive", "k_dpmpp_2s_a", "k_dpmpp_2m", "k_dpmpp_sde", "dpmsolver", "lcm") var sampler_name := "k_euler_a"
# How closely to follow the prompt given
export(float,0,30,0.5) var cfg_scale := 7.5
# The number of CLIP language processor layers to skip.
diff --git a/addons/stable_horde_client/stable_horde_httpclient.gd b/addons/stable_horde_client/stable_horde_httpclient.gd
index 20b2a43..85daa98 100644
--- a/addons/stable_horde_client/stable_horde_httpclient.gd
+++ b/addons/stable_horde_client/stable_horde_httpclient.gd
@@ -49,7 +49,7 @@ func _on_request_completed(_result, response_code, _headers, body):
emit_signal("request_failed",error_msg)
state = States.READY
return
- if json_ret.has('message'):
+ if typeof(json_ret) == TYPE_DICTIONARY and json_ret.has('message'):
emit_signal("request_warning", json_ret['message'])
process_request(json_ret)
diff --git a/bus.gd b/bus.gd
index ed6e094..b8bb40b 100644
--- a/bus.gd
+++ b/bus.gd
@@ -24,6 +24,7 @@ signal model_selected(model_details)
signal worker_selected(worker_details)
signal kudos_calculated(kudos)
signal generation_completed
+# warning-ignore:unused_signal
signal cache_wipe_requested
func _on_node_hovered(node: Control):
diff --git a/src/Lora/Lora.gd b/src/Lora/Lora.gd
index db96497..81ad31e 100644
--- a/src/Lora/Lora.gd
+++ b/src/Lora/Lora.gd
@@ -7,6 +7,11 @@ enum LoraCompatible {
MAYBE
}
+const LCM_LORAS := [
+ "216190",
+ "195519",
+]
+
signal prompt_inject_requested(tokens)
signal loras_modified(loras_list)
@@ -30,34 +35,51 @@ onready var show_all_loras = $"%ShowAllLoras"
onready var lora_model_strength = $"%LoraModelStrength"
onready var lora_clip_strength = $"%LoraClipStrength"
onready var fetch_from_civitai = $"%FetchFromCivitAI"
+onready var lora_version_selection = $"%LoraVersionSelection"
func _ready():
# warning-ignore:return_value_discarded
EventBus.connect("model_selected",self,"on_model_selection_changed")
+ # warning-ignore:return_value_discarded
EventBus.connect("cache_wipe_requested",self,"on_cache_wipe_requested")
lora_reference_node = CivitAILoraReference.new()
lora_reference_node.nsfw = globals.config.get_value("Parameters", "nsfw")
# warning-ignore:return_value_discarded
lora_reference_node.connect("reference_retrieved",self, "_on_reference_retrieved")
+ # warning-ignore:return_value_discarded
lora_reference_node.connect("cache_wiped",self, "_on_cache_wiped")
add_child(lora_reference_node)
# warning-ignore:return_value_discarded
- # warning-ignore:return_value_discarded
lora_auto_complete.connect("item_selected", self,"_on_lora_selected")
# warning-ignore:return_value_discarded
lora_trigger_selection.connect("id_pressed", self,"_on_trigger_selection_id_pressed")
# warning-ignore:return_value_discarded
+ lora_version_selection.get_popup().connect("id_pressed", self, "_on_lora_version_selected")
+ # warning-ignore:return_value_discarded
civitai_showcase0.connect("showcase_retrieved",self, "_on_showcase0_retrieved")
+ # warning-ignore:return_value_discarded
civitai_showcase1.connect("showcase_retrieved",self, "_on_showcase1_retrieved")
# warning-ignore:return_value_discarded
+ civitai_showcase0.connect("showcase_failed",self, "_on_showcase0_failed")
+ # warning-ignore:return_value_discarded
+ civitai_showcase1.connect("showcase_failed",self, "_on_showcase1_failed")
+ # warning-ignore:return_value_discarded
selected_loras.connect("meta_clicked",self,"_on_selected_loras_meta_clicked")
+ # warning-ignore:return_value_discarded
selected_loras.connect("meta_hover_started",self,"_on_selected_loras_meta_hover_started")
+ # warning-ignore:return_value_discarded
selected_loras.connect("meta_hover_ended",self,"_on_selected_loras_meta_hover_ended")
+ # warning-ignore:return_value_discarded
lora_info_label.connect("meta_clicked",self,"_on_lora_info_label_meta_clicked")
+ # warning-ignore:return_value_discarded
show_all_loras.connect("pressed",self,"_on_show_all_loras_pressed")
+ # warning-ignore:return_value_discarded
lora_info_card.connect("hide",self,"_on_lora_info_card_hide")
+ # warning-ignore:return_value_discarded
lora_model_strength.connect("value_changed",self,"_on_lora_model_strength_value_changed")
+ # warning-ignore:return_value_discarded
lora_clip_strength.connect("value_changed",self,"_on_lora_clip_strength_value_changed")
+ # warning-ignore:return_value_discarded
fetch_from_civitai.connect("pressed",self,"_on_fetch_from_civitai_pressed")
_on_reference_retrieved(lora_reference_node.lora_reference)
selected_loras_list = globals.config.get_value("Parameters", "loras", [])
@@ -66,19 +88,37 @@ func _ready():
func replace_loras(loras: Array) -> void:
selected_loras_list = loras
for lora in selected_loras_list:
- lora["name"] = lora_reference_node.get_lora_name(lora["name"])
+ var is_version = lora.get("is_version", false)
+ lora["name"] = lora_reference_node.get_lora_name(
+ lora["name"],
+ is_version
+ )
+ if not is_version:
+ lora["id"] = lora_reference_node.get_latest_version(lora["name"])
+ lora["is_version"] = true
+ lora["lora_id"] = lora_reference_node.get_lora_info(lora["id"], true)["id"]
update_selected_loras_label()
emit_signal("loras_modified", selected_loras_list)
-func _on_lora_selected(lora_name: String) -> void:
+func _on_lora_selected(lora_name: String, is_version = false) -> void:
if selected_loras_list.size() >= 5:
return
+ var version_id: String
+ var final_lora_name: String
+ if is_version:
+ version_id = lora_name
+ final_lora_name = lora_reference_node.get_lora_name(lora_name)
+ else:
+ version_id = lora_reference_node.get_latest_version(lora_name)
+ final_lora_name = lora_name
selected_loras_list.append(
{
- "name": lora_name,
+ "name": final_lora_name,
"model": 1.0,
"clip": 1.0,
- "id": lora_reference_node.get_lora_info(lora_name)["id"],
+ "id": version_id, # ID holds just version ID. We always send version IDs
+ "lora_id": lora_reference_node.get_lora_info(version_id, true)["id"],
+ "is_version": true,
}
)
update_selected_loras_label()
@@ -92,34 +132,42 @@ func _on_reference_retrieved(model_reference: Dictionary):
civitai_search_initiated = false
lora_auto_complete.initiate_search()
-func _show_lora_details(lora_name: String) -> void:
- var lora_reference := lora_reference_node.get_lora_info(lora_name)
+func update_lora_details_texts(lora_reference, version_id) -> void:
+ var fmt = {
+ "name": lora_reference['name'],
+ "description": lora_reference['description'],
+ "trigger": ", ".join(lora_reference['versions'][version_id]['triggers']),
+ "url": "https://civitai.com/models/" + str(lora_reference['id']) + "?modelVersionId=" + str(version_id),
+ "unusable": "",
+ }
+ var compatibility = check_baseline_compatibility(version_id)
+ if lora_reference['versions'][version_id].get("unusable"):
+ fmt["unusable"] = "[color=red]" + lora_reference['versions'][version_id].get("unusable") + "[/color]\n"
+ elif compatibility == LoraCompatible.NO:
+ fmt["unusable"] = "[color=red]This LoRa base model version is impatible with the selected Model[/color]\n"
+ elif compatibility == LoraCompatible.MAYBE:
+ fmt["unusable"] = "[color=yellow]You have selected multiple models of varying base versions. This LoRa is not compatible with all of them and will be ignored by the incompatible ones.[/color]\n"
+ elif not lora_reference_node.nsfw and lora_reference['versions'][version_id].get("nsfw", false):
+ fmt["unusable"] = "[color=#FF00FF]SFW workers which pick up the request, will ignore this LoRA.[/color]\n"
+ var label_text = "{unusable}[b]Name: {name}[/b]\nDescription: {description}\n".format(fmt)
+ label_text += "\nTriggers: {trigger}".format(fmt)
+ label_text += "\nCivitAI page: [url={url}]{url}[/url]".format(fmt)
+ lora_info_label.bbcode_text = label_text
+
+
+func _show_lora_details(version_id: String) -> void:
+ var lora_reference := lora_reference_node.get_lora_info(version_id, true)
if lora_reference.empty():
lora_info_label.bbcode_text = "No lora info could not be retrieved at this time."
else:
- civitai_showcase0.get_model_showcase(lora_reference)
- civitai_showcase1.get_model_showcase(lora_reference)
- var fmt = {
- "name": lora_reference['name'],
- "description": lora_reference['description'],
- "version": lora_reference['version'],
- "trigger": ", ".join(lora_reference['triggers']),
- "url": "https://civitai.com/models/" + str(lora_reference['id']),
- "unusable": "",
- }
- var compatibility = check_baseline_compatibility(lora_name)
- if lora_reference.get("unusable"):
- fmt["unusable"] = "[color=red]" + lora_reference.get("unusable") + "[/color]\n"
- elif compatibility == LoraCompatible.NO:
- fmt["unusable"] = "[color=red]This LoRa base model version is impatible with the selected Model[/color]\n"
- elif compatibility == LoraCompatible.MAYBE:
- fmt["unusable"] = "[color=yellow]You have selected multiple models of varying base versions. This LoRa is not compatible with all of them and will be ignored by the incompatible ones.[/color]\n"
- elif not lora_reference_node.nsfw and lora_reference.get("nsfw", false):
- fmt["unusable"] = "[color=#FF00FF]SFW workers which pick up the request, will ignore this LoRA.[/color]\n"
- var label_text = "{unusable}[b]Name: {name}[/b]\nDescription: {description}\nVersion: {version}\n".format(fmt)
- label_text += "\nTriggers: {trigger}".format(fmt)
- label_text += "\nCivitAI page: [url={url}]{url}[/url]".format(fmt)
- lora_info_label.bbcode_text = label_text
+ civitai_showcase0.get_model_showcase(lora_reference, version_id)
+ civitai_showcase1.get_model_showcase(lora_reference, version_id)
+ update_lora_details_texts(lora_reference, version_id)
+ var lora_versions_popup :PopupMenu = lora_version_selection.get_popup()
+ lora_versions_popup.clear()
+ for version in lora_reference['versions'].values():
+ lora_versions_popup.add_item(version['name'], int(version['id']))
+ lora_version_selection.text = lora_reference['versions'][version_id]['name']
lora_info_card.rect_size = Vector2(0,0)
lora_info_card.popup()
lora_info_card.rect_global_position = get_global_mouse_position() + Vector2(30,-lora_info_card.rect_size.y/2)
@@ -131,7 +179,7 @@ func _on_selected_loras_meta_clicked(meta) -> void:
viewed_lora_index = int(meta_split[1])
lora_model_strength.set_value(selected_loras_list[viewed_lora_index]["model"])
lora_clip_strength.set_value(selected_loras_list[viewed_lora_index]["clip"])
- _show_lora_details(selected_loras_list[viewed_lora_index]["name"])
+ _show_lora_details(selected_loras_list[viewed_lora_index]["id"])
"delete":
selected_loras_list.remove(int(meta_split[1]))
update_selected_loras_label()
@@ -161,18 +209,20 @@ func update_selected_loras_label() -> void:
var bbtext := []
var indexes_to_remove = []
for index in range(selected_loras_list.size()):
+ var selected_lora : Dictionary = selected_loras_list[index]
+ var version_id : String = selected_lora['id']
var lora_text = "[url={lora_hover}]{lora_name}[/url]{strengths} ([url={lora_trigger}]T[/url])([url={lora_remove}]X[/url])"
- var lora_name = selected_loras_list[index]["name"]
+ var lora_name = selected_lora["name"]
# This might happen for example when we added a NSFW lora
# but then disabled NSFW which refreshed loras to only show SFW
if not lora_reference_node.is_lora(lora_name):
indexes_to_remove.append(index)
continue
- var lora_reference = lora_reference_node.get_lora_info(lora_name)
- if lora_reference["triggers"].size() == 0:
+ var lora_reference = lora_reference_node.get_lora_info(version_id, true)
+ if lora_reference["versions"][version_id]["triggers"].size() == 0:
lora_text = "[url={lora_hover}]{lora_name}[/url]{strengths} ([url={lora_remove}]X[/url])"
- var compatibility = check_baseline_compatibility(lora_name)
- if lora_reference.get("unusable"):
+ var compatibility = check_baseline_compatibility(version_id)
+ if lora_reference["versions"][version_id].get("unusable"):
lora_text = "[color=red]" + lora_text + "[/color]"
elif compatibility == LoraCompatible.NO:
lora_text = "[color=red]" + lora_text + "[/color]"
@@ -204,13 +254,14 @@ func update_selected_loras_label() -> void:
selected_loras.hide()
func _on_lora_trigger_pressed(index: int) -> void:
- var lora_reference := lora_reference_node.get_lora_info(selected_loras_list[index]["name"])
+ var version_id: String = selected_loras_list[index]["id"]
+ var lora_reference := lora_reference_node.get_lora_info(version_id)
var selected_triggers: Array = []
- if lora_reference['triggers'].size() == 1:
- selected_triggers = [lora_reference['triggers'][0]]
+ if lora_reference['versions'][version_id]['triggers'].size() == 1:
+ selected_triggers = [lora_reference['versions'][version_id]['triggers'][0]]
else:
lora_trigger_selection.clear()
- for t in lora_reference['triggers']:
+ for t in lora_reference['versions'][version_id]['triggers']:
lora_trigger_selection.add_check_item(t)
lora_trigger_selection.add_item("Select")
lora_trigger_selection.popup()
@@ -233,10 +284,16 @@ func _on_showcase0_retrieved(img:ImageTexture, _model_name) -> void:
lora_showcase0.texture = img
lora_showcase0.rect_min_size = Vector2(300,300)
+func _on_showcase0_failed() -> void:
+ lora_showcase0.texture = null
+
func _on_showcase1_retrieved(img:ImageTexture, _model_name) -> void:
lora_showcase1.texture = img
lora_showcase1.rect_min_size = Vector2(300,300)
+func _on_showcase1_failed() -> void:
+ lora_showcase1.texture = null
+
func clear_textures() -> void:
lora_showcase1.texture = null
lora_showcase0.texture = null
@@ -265,7 +322,7 @@ func on_model_selection_changed(models_list) -> void:
current_models = models_list
update_selected_loras_label()
-func check_baseline_compatibility(lora_name) -> int:
+func check_baseline_compatibility(version_id: String) -> int:
var baselines = []
for model in current_models:
if not model["baseline"] in baselines:
@@ -279,7 +336,11 @@ func check_baseline_compatibility(lora_name) -> int:
"SD 2.1 512": "stable diffusion 2",
"Other": null,
}
- var lora_baseline = lora_to_model_baseline_map[lora_reference_node.get_lora_info(lora_name)["base_model"]]
+ var lora_details := lora_reference_node.get_lora_info(version_id, true)
+ var curr_baseline = lora_details["versions"][version_id]["base_model"]
+ if not lora_to_model_baseline_map.has(curr_baseline):
+ return LoraCompatible.NO
+ var lora_baseline = lora_to_model_baseline_map[curr_baseline]
if lora_baseline == null:
return LoraCompatible.NO
if lora_baseline in baselines:
@@ -294,3 +355,29 @@ func _on_cache_wiped() -> void:
func on_cache_wipe_requested() -> void:
lora_reference_node.wipe_cache()
+
+func _on_lora_version_selected(id: int) -> void:
+ var version_id := str(id)
+ var lora_details = lora_reference_node.get_lora_info(version_id, true)
+ var lora_name: String = lora_details["name"]
+ for slora in selected_loras_list:
+ if slora["id"] == version_id:
+ return
+ if slora["name"] == lora_name:
+ slora["id"] = version_id
+ civitai_showcase0.get_model_showcase(lora_details, version_id)
+ civitai_showcase1.get_model_showcase(lora_details, version_id)
+ update_lora_details_texts(lora_details, version_id)
+ update_selected_loras_label()
+ EventBus.emit_signal("lora_selected", lora_details)
+ emit_signal("loras_modified", selected_loras_list)
+ lora_version_selection.text = lora_details['versions'][version_id]['name']
+ return
+ # We expect the lora whose version is changing to always exist in the list
+ # We should never be adding to the list by changing versions
+
+func has_lcm_loras() -> bool:
+ for lora in selected_loras_list:
+ if LCM_LORAS.has(lora["lora_id"]):
+ return true
+ return false
diff --git a/src/Lora/TextualInversion.gd b/src/Lora/TextualInversion.gd
index c3216a9..f658879 100644
--- a/src/Lora/TextualInversion.gd
+++ b/src/Lora/TextualInversion.gd
@@ -34,30 +34,40 @@ onready var fetch_tis_from_civitai = $"%FetchTIsFromCivitAI"
func _ready():
# warning-ignore:return_value_discarded
EventBus.connect("model_selected",self,"on_model_selection_changed")
+ # warning-ignore:return_value_discarded
EventBus.connect("cache_wipe_requested",self,"on_cache_wipe_requested")
ti_reference_node = CivitAITIReference.new()
ti_reference_node.nsfw = globals.config.get_value("Parameters", "nsfw")
# warning-ignore:return_value_discarded
ti_reference_node.connect("reference_retrieved",self, "_on_reference_retrieved")
+ # warning-ignore:return_value_discarded
ti_reference_node.connect("cache_wiped",self, "_on_cache_wiped")
add_child(ti_reference_node)
# warning-ignore:return_value_discarded
- # warning-ignore:return_value_discarded
ti_auto_complete.connect("item_selected", self,"_on_ti_selected")
# warning-ignore:return_value_discarded
ti_trigger_selection.connect("id_pressed", self,"_on_trigger_selection_id_pressed")
# warning-ignore:return_value_discarded
civitai_showcase0.connect("showcase_retrieved",self, "_on_showcase0_retrieved")
+ # warning-ignore:return_value_discarded
civitai_showcase1.connect("showcase_retrieved",self, "_on_showcase1_retrieved")
# warning-ignore:return_value_discarded
selected_tis.connect("meta_clicked",self,"_on_selected_tis_meta_clicked")
+ # warning-ignore:return_value_discarded
selected_tis.connect("meta_hover_started",self,"_on_selected_tis_meta_hover_started")
+ # warning-ignore:return_value_discarded
selected_tis.connect("meta_hover_ended",self,"_on_selected_tis_meta_hover_ended")
+ # warning-ignore:return_value_discarded
ti_info_label.connect("meta_clicked",self,"_on_ti_info_label_meta_clicked")
+ # warning-ignore:return_value_discarded
show_all_tis.connect("pressed",self,"_on_show_all_tis_pressed")
+ # warning-ignore:return_value_discarded
ti_info_card.connect("hide",self,"_on_ti_info_card_hide")
+ # warning-ignore:return_value_discarded
ti_model_strength.connect("value_changed",self,"_on_ti_model_strength_value_changed")
+ # warning-ignore:return_value_discarded
ti_inject.connect("value_changed",self,"_on_ti_inject_value_changed")
+ # warning-ignore:return_value_discarded
fetch_tis_from_civitai.connect("pressed",self,"_on_fetch_tis_from_civitai_pressed")
_on_reference_retrieved(ti_reference_node.ti_reference)
selected_tis_list = globals.config.get_value("Parameters", "tis", [])
diff --git a/src/ParamBus.gd b/src/ParamBus.gd
index 1f33bda..8143959 100644
--- a/src/ParamBus.gd
+++ b/src/ParamBus.gd
@@ -46,6 +46,9 @@ signal control_type_changed(text)
signal loras_changed(list)
# warning-ignore:unused_signal
signal tis_changed(list)
+# warning-ignore:unused_signal
+signal img2img_changed(source_image)
+
var api_key_node: LineEdit
var prompt_node: TextEdit
@@ -156,6 +159,11 @@ func setup(
# warning-ignore:return_value_discarded
loras_node.connect("loras_modified",self,"_on_listnode_changed", [loras_node])
tis_node.connect("tis_modified",self,"_on_listnode_changed", [tis_node])
+ for obutton in [
+ control_type_node,
+ sampler_name_node,
+ ]:
+ obutton.connect("item_selected",self,"_on_option_changed", [obutton])
func get_prompt() -> String:
@@ -258,6 +266,14 @@ func _on_hslider_changed(hslider: ConfigSlider) -> void:
emit_signal("denoising_strength_changed", get_denoising_strength())
emit_signal("params_changed")
+func _on_option_changed(_index: int, option_button: OptionButton) -> void:
+ match option_button:
+ sampler_name_node:
+ emit_signal("sampler_name_changed")
+ control_type_node:
+ emit_signal("control_type_changed")
+ emit_signal("params_changed")
+
func _on_cbutton_changed(cbutton: CheckButton) -> void:
match cbutton:
karras_node:
@@ -287,3 +303,20 @@ func _on_listnode_changed(_thing_list: Array, thing_node: Node) -> void:
tis_node:
emit_signal("tis_changed", get_tis())
emit_signal("params_changed")
+
+func is_lcm_payload() -> bool:
+ print_debug('www')
+ if loras_node.has_lcm_loras():
+ return true
+ print_debug(sampler_name_node.get_item_text(sampler_name_node.selected))
+ if sampler_name_node.get_item_text(sampler_name_node.selected) == 'lcm':
+ return true
+ return false
+
+func is_sdxl_payload() -> bool:
+ return models_node.get_all_baselines().has("stable_diffusion_xl")
+
+func has_controlnet() -> bool:
+ if not img2img_node.pressed:
+ return false
+ return control_type_node.selected == 0