speed up lcm again

This commit is contained in:
lllyasviel 2023-11-12 09:12:09 -08:00
parent 649f45a6df
commit ac8002d2a4
3 changed files with 26 additions and 3 deletions

View File

@ -1 +1 @@
version = '2.1.799'
version = '2.1.800'

View File

@ -383,8 +383,12 @@ def worker():
t['c'] = pipeline.clip_encode(texts=t['positive'], pool_top_k=t['positive_top_k'])
for i, t in enumerate(tasks):
progressbar(10, f'Encoding negative #{i + 1} ...')
t['uc'] = pipeline.clip_encode(texts=t['negative'], pool_top_k=t['negative_top_k'])
if abs(float(cfg_scale) - 1.0) < 1e-4:
progressbar(10, f'Skipped negative #{i + 1} ...')
t['uc'] = pipeline.clone_cond(t['c'])
else:
progressbar(10, f'Encoding negative #{i + 1} ...')
t['uc'] = pipeline.clip_encode(texts=t['negative'], pool_top_k=t['negative_top_k'])
if len(goals) > 0:
progressbar(13, 'Image processing ...')

View File

@ -132,6 +132,25 @@ def clip_encode_single(clip, text, verbose=False):
return result
@torch.no_grad()
@torch.inference_mode()
def clone_cond(conds):
results = []
for c, p in conds:
p = p["pooled_output"]
if isinstance(c, torch.Tensor):
c = c.clone()
if isinstance(p, torch.Tensor):
p = p.clone()
results.append([c, {"pooled_output": p}])
return results
@torch.no_grad()
@torch.inference_mode()
def clip_encode(texts, pool_top_k=1):