speed up lcm again
This commit is contained in:
parent
649f45a6df
commit
ac8002d2a4
@ -1 +1 @@
|
||||
version = '2.1.799'
|
||||
version = '2.1.800'
|
||||
|
@ -383,6 +383,10 @@ def worker():
|
||||
t['c'] = pipeline.clip_encode(texts=t['positive'], pool_top_k=t['positive_top_k'])
|
||||
|
||||
for i, t in enumerate(tasks):
|
||||
if abs(float(cfg_scale) - 1.0) < 1e-4:
|
||||
progressbar(10, f'Skipped negative #{i + 1} ...')
|
||||
t['uc'] = pipeline.clone_cond(t['c'])
|
||||
else:
|
||||
progressbar(10, f'Encoding negative #{i + 1} ...')
|
||||
t['uc'] = pipeline.clip_encode(texts=t['negative'], pool_top_k=t['negative_top_k'])
|
||||
|
||||
|
@ -132,6 +132,25 @@ def clip_encode_single(clip, text, verbose=False):
|
||||
return result
|
||||
|
||||
|
||||
@torch.no_grad()
|
||||
@torch.inference_mode()
|
||||
def clone_cond(conds):
|
||||
results = []
|
||||
|
||||
for c, p in conds:
|
||||
p = p["pooled_output"]
|
||||
|
||||
if isinstance(c, torch.Tensor):
|
||||
c = c.clone()
|
||||
|
||||
if isinstance(p, torch.Tensor):
|
||||
p = p.clone()
|
||||
|
||||
results.append([c, {"pooled_output": p}])
|
||||
|
||||
return results
|
||||
|
||||
|
||||
@torch.no_grad()
|
||||
@torch.inference_mode()
|
||||
def clip_encode(texts, pool_top_k=1):
|
||||
|
Loading…
Reference in New Issue
Block a user