From ac8002d2a43080052289e4a16bd23cd50d9c367c Mon Sep 17 00:00:00 2001 From: lllyasviel Date: Sun, 12 Nov 2023 09:12:09 -0800 Subject: [PATCH] speed up lcm again --- fooocus_version.py | 2 +- modules/async_worker.py | 8 ++++++-- modules/default_pipeline.py | 19 +++++++++++++++++++ 3 files changed, 26 insertions(+), 3 deletions(-) diff --git a/fooocus_version.py b/fooocus_version.py index 3981e67..393af6c 100644 --- a/fooocus_version.py +++ b/fooocus_version.py @@ -1 +1 @@ -version = '2.1.799' +version = '2.1.800' diff --git a/modules/async_worker.py b/modules/async_worker.py index 4f4c65a..00d40d6 100644 --- a/modules/async_worker.py +++ b/modules/async_worker.py @@ -383,8 +383,12 @@ def worker(): t['c'] = pipeline.clip_encode(texts=t['positive'], pool_top_k=t['positive_top_k']) for i, t in enumerate(tasks): - progressbar(10, f'Encoding negative #{i + 1} ...') - t['uc'] = pipeline.clip_encode(texts=t['negative'], pool_top_k=t['negative_top_k']) + if abs(float(cfg_scale) - 1.0) < 1e-4: + progressbar(10, f'Skipped negative #{i + 1} ...') + t['uc'] = pipeline.clone_cond(t['c']) + else: + progressbar(10, f'Encoding negative #{i + 1} ...') + t['uc'] = pipeline.clip_encode(texts=t['negative'], pool_top_k=t['negative_top_k']) if len(goals) > 0: progressbar(13, 'Image processing ...') diff --git a/modules/default_pipeline.py b/modules/default_pipeline.py index 0026f12..c828644 100644 --- a/modules/default_pipeline.py +++ b/modules/default_pipeline.py @@ -132,6 +132,25 @@ def clip_encode_single(clip, text, verbose=False): return result +@torch.no_grad() +@torch.inference_mode() +def clone_cond(conds): + results = [] + + for c, p in conds: + p = p["pooled_output"] + + if isinstance(c, torch.Tensor): + c = c.clone() + + if isinstance(p, torch.Tensor): + p = p.clone() + + results.append([c, {"pooled_output": p}]) + + return results + + @torch.no_grad() @torch.inference_mode() def clip_encode(texts, pool_top_k=1):