multiple loras in preset

This commit is contained in:
lllyasviel 2023-11-15 02:41:49 -08:00
parent a662567f6c
commit 3a9c3c07d1
7 changed files with 14 additions and 26 deletions

View File

@ -1 +1 @@
version = '2.1.814'
version = '2.1.815'

View File

@ -159,15 +159,10 @@ default_refiner_switch = get_config_item_or_set_default(
default_value=0.5,
validator=lambda x: isinstance(x, float)
)
default_lora_name = get_config_item_or_set_default(
key='default_lora',
default_value='sd_xl_offset_example-lora_1.0.safetensors',
validator=lambda x: isinstance(x, str)
)
default_lora_weight = get_config_item_or_set_default(
key='default_lora_weight',
default_value=0.1,
validator=lambda x: isinstance(x, float)
default_loras = get_config_item_or_set_default(
key='default_loras',
default_value=['sd_xl_offset_example-lora_1.0.safetensors', 0.1],
validator=lambda x: isinstance(x, list) and all(len(y) == 2 and isinstance(y[0], str) and isinstance(y[1], float) for y in x)
)
default_cfg_scale = get_config_item_or_set_default(
key='default_cfg_scale',
@ -301,6 +296,7 @@ os.makedirs(path_outputs, exist_ok=True)
model_filenames = []
lora_filenames = []
default_loras = default_loras[:5] + [['None', 1.0] for _ in range(5 - len(default_loras))]
def get_model_filenames(folder_path, name_filter=None):

View File

@ -231,13 +231,7 @@ def refresh_everything(refiner_model_name, base_model_name, loras, base_model_ad
refresh_everything(
refiner_model_name=modules.config.default_refiner_model_name,
base_model_name=modules.config.default_base_model_name,
loras=[
(modules.config.default_lora_name, modules.config.default_lora_weight),
('None', modules.config.default_lora_weight),
('None', modules.config.default_lora_weight),
('None', modules.config.default_lora_weight),
('None', modules.config.default_lora_weight)
]
loras=modules.config.default_loras
)

View File

@ -1,9 +1,8 @@
{
"default_model": "bluePencilXL_v050.safetensors",
"default_refiner": "DreamShaper_8_pruned.safetensors",
"default_lora": "sd_xl_offset_example-lora_1.0.safetensors",
"default_loras": [["sd_xl_offset_example-lora_1.0.safetensors", 0.5]],
"default_refiner_switch": 0.667,
"default_lora_weight": 0.5,
"default_cfg_scale": 7.0,
"default_sampler": "dpmpp_2m_sde_gpu",
"default_scheduler": "karras",

View File

@ -1,8 +1,7 @@
{
"default_model": "realisticStockPhoto_v10.safetensors",
"default_refiner": "",
"default_lora": "SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.safetensors",
"default_lora_weight": 0.25,
"default_loras": [["SDXL_FILM_PHOTOGRAPHY_STYLE_BetaV0.4.safetensors", 0.25]],
"default_cfg_scale": 3.0,
"default_sampler": "dpmpp_2m_sde_gpu",
"default_scheduler": "karras",

View File

@ -1,8 +1,7 @@
{
"default_model": "sd_xl_base_1.0_0.9vae.safetensors",
"default_refiner": "sd_xl_refiner_1.0_0.9vae.safetensors",
"default_lora": "sd_xl_offset_example-lora_1.0.safetensors",
"default_lora_weight": 0.5,
"default_loras": [["sd_xl_offset_example-lora_1.0.safetensors", 0.5]],
"default_cfg_scale": 7.0,
"default_sampler": "dpmpp_2m_sde_gpu",
"default_scheduler": "karras",

View File

@ -275,10 +275,11 @@ with shared.gradio_root:
with gr.Accordion(label='LoRAs (SDXL or SD 1.5)', open=True):
lora_ctrls = []
for i in range(5):
for i, (n, v) in enumerate(modules.config.default_loras):
with gr.Row():
lora_model = gr.Dropdown(label=f'LoRA {i+1}', choices=['None'] + modules.config.lora_filenames, value=modules.config.default_lora_name if i == 0 else 'None')
lora_weight = gr.Slider(label='Weight', minimum=-2, maximum=2, step=0.01, value=modules.config.default_lora_weight)
lora_model = gr.Dropdown(label=f'LoRA {i+1}', choices=['None'] + modules.config.lora_filenames, value=n)
lora_weight = gr.Slider(label='Weight', minimum=-2, maximum=2, step=0.01, value=v)
lora_ctrls += [lora_model, lora_weight]
with gr.Row():
model_refresh = gr.Button(label='Refresh', value='\U0001f504 Refresh All Files', variant='secondary', elem_classes='refresh_button')