speed up lcm again
This commit is contained in:
parent
649f45a6df
commit
ac8002d2a4
@ -1 +1 @@
|
|||||||
version = '2.1.799'
|
version = '2.1.800'
|
||||||
|
@ -383,8 +383,12 @@ def worker():
|
|||||||
t['c'] = pipeline.clip_encode(texts=t['positive'], pool_top_k=t['positive_top_k'])
|
t['c'] = pipeline.clip_encode(texts=t['positive'], pool_top_k=t['positive_top_k'])
|
||||||
|
|
||||||
for i, t in enumerate(tasks):
|
for i, t in enumerate(tasks):
|
||||||
progressbar(10, f'Encoding negative #{i + 1} ...')
|
if abs(float(cfg_scale) - 1.0) < 1e-4:
|
||||||
t['uc'] = pipeline.clip_encode(texts=t['negative'], pool_top_k=t['negative_top_k'])
|
progressbar(10, f'Skipped negative #{i + 1} ...')
|
||||||
|
t['uc'] = pipeline.clone_cond(t['c'])
|
||||||
|
else:
|
||||||
|
progressbar(10, f'Encoding negative #{i + 1} ...')
|
||||||
|
t['uc'] = pipeline.clip_encode(texts=t['negative'], pool_top_k=t['negative_top_k'])
|
||||||
|
|
||||||
if len(goals) > 0:
|
if len(goals) > 0:
|
||||||
progressbar(13, 'Image processing ...')
|
progressbar(13, 'Image processing ...')
|
||||||
|
@ -132,6 +132,25 @@ def clip_encode_single(clip, text, verbose=False):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@torch.no_grad()
|
||||||
|
@torch.inference_mode()
|
||||||
|
def clone_cond(conds):
|
||||||
|
results = []
|
||||||
|
|
||||||
|
for c, p in conds:
|
||||||
|
p = p["pooled_output"]
|
||||||
|
|
||||||
|
if isinstance(c, torch.Tensor):
|
||||||
|
c = c.clone()
|
||||||
|
|
||||||
|
if isinstance(p, torch.Tensor):
|
||||||
|
p = p.clone()
|
||||||
|
|
||||||
|
results.append([c, {"pooled_output": p}])
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
@torch.no_grad()
|
@torch.no_grad()
|
||||||
@torch.inference_mode()
|
@torch.inference_mode()
|
||||||
def clip_encode(texts, pool_top_k=1):
|
def clip_encode(texts, pool_top_k=1):
|
||||||
|
Loading…
Reference in New Issue
Block a user