This commit is contained in:
lvmin 2023-08-10 12:02:05 -07:00
parent 398766d40f
commit db9f0a1dfa
2 changed files with 3 additions and 3 deletions

View File

@ -83,7 +83,7 @@ def close_all_preview():
@torch.no_grad()
def ksampler(model, positive, negative, latent, seed=None, steps=30, cfg=9.0, sampler_name='euler_ancestral',
def ksampler(model, positive, negative, latent, seed=None, steps=30, cfg=7.0, sampler_name='euler_ancestral',
scheduler='normal', denoise=1.0, disable_noise=False, start_step=None, last_step=None,
force_full_denoise=False):
seed = seed if isinstance(seed, int) else random.randint(1, 2 ** 64)
@ -147,7 +147,7 @@ def ksampler(model, positive, negative, latent, seed=None, steps=30, cfg=9.0, sa
@torch.no_grad()
def ksampler_with_refiner(model, positive, negative, refiner, refiner_positive, refiner_negative, latent,
seed=None, steps=30, refiner_switch_step=20, cfg=9.0, sampler_name='euler_ancestral',
seed=None, steps=30, refiner_switch_step=20, cfg=7.0, sampler_name='euler_ancestral',
scheduler='normal', denoise=1.0, disable_noise=False, start_step=None, last_step=None,
force_full_denoise=False):
seed = seed if isinstance(seed, int) else random.randint(1, 2 ** 64)

View File

@ -14,7 +14,7 @@ del xl_base.vae
@torch.no_grad()
def process(positive_prompt, negative_prompt, width=1280, height=960, batch_size=1):
def process(positive_prompt, negative_prompt, width=1024, height=1024, batch_size=1):
positive_conditions = core.encode_prompt_condition(clip=xl_base.clip, prompt=positive_prompt)
negative_conditions = core.encode_prompt_condition(clip=xl_base.clip, prompt=negative_prompt)