From 0f09b61ce533be212ce91508e2757b2edb9de49c Mon Sep 17 00:00:00 2001 From: lvmin Date: Wed, 13 Sep 2023 17:28:25 -0700 Subject: [PATCH] remove attention log --- modules/patch.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/modules/patch.py b/modules/patch.py index efdafc4..f953921 100644 --- a/modules/patch.py +++ b/modules/patch.py @@ -5,6 +5,7 @@ import comfy.samplers import comfy.k_diffusion.external import comfy.model_management import modules.anisotropic as anisotropic +import comfy.ldm.modules.attention from comfy.k_diffusion import utils @@ -75,6 +76,8 @@ def text_encoder_device_patched(): def patch_all(): + comfy.ldm.modules.attention.print = lambda x: None + comfy.model_management.text_encoder_device = text_encoder_device_patched print(f'Fooocus Text Processing Pipelines are retargeted to {str(comfy.model_management.text_encoder_device())}')