fix crossattention of diffusers

This commit is contained in:
XDUWQ
2023-08-31 10:11:48 +08:00
parent 0405fc95a9
commit d1478db172

View File

@@ -11,7 +11,7 @@ import torch.nn.functional as F
from diffusers import (AutoencoderKL, DDPMScheduler, DiffusionPipeline,
DPMSolverMultistepScheduler, UNet2DConditionModel,
utils)
from diffusers.models import cross_attention
from diffusers.models import attention
from diffusers.utils import deprecation_utils
from swift import AdapterConfig, LoRAConfig, PromptConfig, Swift
from transformers import CLIPTextModel, CLIPTokenizer
@@ -30,7 +30,7 @@ from .control_sd_lora import ControlLoRATuner
utils.deprecate = lambda *arg, **kwargs: None
deprecation_utils.deprecate = lambda *arg, **kwargs: None
cross_attention.deprecate = lambda *arg, **kwargs: None
attention.deprecate = lambda *arg, **kwargs: None
__tuner_MAP__ = {'lora': LoRATuner, 'control_lora': ControlLoRATuner}