From 39669453cda5bbbbdb322246beda195d0ae46af6 Mon Sep 17 00:00:00 2001 From: Manuel Schmid <9307310+mashb1t@users.noreply.github.com> Date: Mon, 11 Mar 2024 17:59:58 +0100 Subject: [PATCH] feat: allow to add disabled LoRAs in config on application start (#2507) add LoRA checkbox enable/disable handling to all necessary occurrences --- modules/config.py | 7 ++++++- modules/core.py | 13 ++++++++----- presets/anime.json | 5 +++++ presets/default.json | 5 +++++ presets/lcm.json | 5 +++++ presets/realistic.json | 5 +++++ presets/sai.json | 5 +++++ webui.py | 8 ++++---- 8 files changed, 43 insertions(+), 10 deletions(-) diff --git a/modules/config.py b/modules/config.py index 83590a2..8fec8e0 100644 --- a/modules/config.py +++ b/modules/config.py @@ -275,27 +275,32 @@ default_loras = get_config_item_or_set_default( key='default_loras', default_value=[ [ + True, "None", 1.0 ], [ + True, "None", 1.0 ], [ + True, "None", 1.0 ], [ + True, "None", 1.0 ], [ + True, "None", 1.0 ] ], - validator=lambda x: isinstance(x, list) and all(len(y) == 2 and isinstance(y[0], str) and isinstance(y[1], numbers.Number) for y in x) + validator=lambda x: isinstance(x, list) and all(len(y) == 3 and isinstance(y[0], bool) and isinstance(y[1], str) and isinstance(y[2], numbers.Number) for y in x) ) default_max_lora_number = get_config_item_or_set_default( key='default_max_lora_number', diff --git a/modules/core.py b/modules/core.py index bfc4496..e8e1939 100644 --- a/modules/core.py +++ b/modules/core.py @@ -73,14 +73,17 @@ class StableDiffusionModel: loras_to_load = [] - for name, weight in loras: - if name == 'None': + for enabled, filename, weight in loras: + if not enabled: continue - if os.path.exists(name): - lora_filename = name + if filename == 'None': + continue + + if os.path.exists(filename): + lora_filename = filename else: - lora_filename = get_file_from_folder_list(name, modules.config.paths_loras) + lora_filename = get_file_from_folder_list(filename, modules.config.paths_loras) if not os.path.exists(lora_filename): print(f'Lora file not found: {lora_filename}') diff --git a/presets/anime.json b/presets/anime.json index 8bd2813..1f2b26a 100644 --- a/presets/anime.json +++ b/presets/anime.json @@ -4,22 +4,27 @@ "default_refiner_switch": 0.5, "default_loras": [ [ + true, "None", 1.0 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ] diff --git a/presets/default.json b/presets/default.json index 7930c92..963f7a6 100644 --- a/presets/default.json +++ b/presets/default.json @@ -4,22 +4,27 @@ "default_refiner_switch": 0.5, "default_loras": [ [ + false, "sd_xl_offset_example-lora_1.0.safetensors", 0.1 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ] diff --git a/presets/lcm.json b/presets/lcm.json index 3897f88..6713fdd 100644 --- a/presets/lcm.json +++ b/presets/lcm.json @@ -4,22 +4,27 @@ "default_refiner_switch": 0.5, "default_loras": [ [ + true, "None", 1.0 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ] diff --git a/presets/realistic.json b/presets/realistic.json index 7799c96..95f8b6e 100644 --- a/presets/realistic.json +++ b/presets/realistic.json @@ -4,22 +4,27 @@ "default_refiner_switch": 0.5, "default_loras": [ [ + true, "SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.safetensors", 0.25 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ] diff --git a/presets/sai.json b/presets/sai.json index fecf047..918028f 100644 --- a/presets/sai.json +++ b/presets/sai.json @@ -4,22 +4,27 @@ "default_refiner_switch": 0.75, "default_loras": [ [ + true, "sd_xl_offset_example-lora_1.0.safetensors", 0.5 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ], [ + true, "None", 1.0 ] diff --git a/webui.py b/webui.py index 832cc19..7fe10f1 100644 --- a/webui.py +++ b/webui.py @@ -353,15 +353,15 @@ with shared.gradio_root: with gr.Group(): lora_ctrls = [] - for i, (n, v) in enumerate(modules.config.default_loras): + for i, (enabled, filename, weight) in enumerate(modules.config.default_loras): with gr.Row(): - lora_enabled = gr.Checkbox(label='Enable', value=True, + lora_enabled = gr.Checkbox(label='Enable', value=enabled, elem_classes=['lora_enable', 'min_check'], scale=1) lora_model = gr.Dropdown(label=f'LoRA {i + 1}', - choices=['None'] + modules.config.lora_filenames, value=n, + choices=['None'] + modules.config.lora_filenames, value=filename, elem_classes='lora_model', scale=5) lora_weight = gr.Slider(label='Weight', minimum=modules.config.default_loras_min_weight, - maximum=modules.config.default_loras_max_weight, step=0.01, value=v, + maximum=modules.config.default_loras_max_weight, step=0.01, value=weight, elem_classes='lora_weight', scale=5) lora_ctrls += [lora_enabled, lora_model, lora_weight]