diff --git a/api/onnx_web/diffusers/patches/vae.py b/api/onnx_web/diffusers/patches/vae.py index d7e3e1f7..8019a243 100644 --- a/api/onnx_web/diffusers/patches/vae.py +++ b/api/onnx_web/diffusers/patches/vae.py @@ -4,11 +4,10 @@ from typing import Union import numpy as np import torch from diffusers import OnnxRuntimeModel -from diffusers.models.autoencoder_kl import AutoencoderKLOutput -from diffusers.models.vae import DecoderOutput from diffusers.pipelines.onnx_utils import ORT_TO_NP_TYPE from ...server import ServerContext +from ..version_safe_diffusers import AutoencoderKLOutput, DecoderOutput logger = getLogger(__name__) diff --git a/api/onnx_web/diffusers/version_safe_diffusers.py b/api/onnx_web/diffusers/version_safe_diffusers.py index 8c4da406..e268dcf8 100644 --- a/api/onnx_web/diffusers/version_safe_diffusers.py +++ b/api/onnx_web/diffusers/version_safe_diffusers.py @@ -1,11 +1,4 @@ -import diffusers from diffusers import * # NOQA -from packaging import version - -is_diffusers_0_15 = version.parse( - version.parse(diffusers.__version__).base_version -) >= version.parse("0.15") - try: from diffusers import DEISMultistepScheduler @@ -27,8 +20,17 @@ try: except ImportError: from ..diffusers.stub_scheduler import StubScheduler as UniPCMultistepScheduler +try: + from diffusers.models.modeling_outputs import AutoencoderKLOutput +except ImportError: + from diffusers.models.autoencoder_kl import AutoencoderKLOutput -if is_diffusers_0_15: +try: + from diffusers.models.autoencoders.vae import DecoderOutput +except ImportError: + from diffusers.models.vae import DecoderOutput + +try: from diffusers.models.attention_processor import AttnProcessor -else: +except ImportError: from diffusers.models.cross_attention import CrossAttnProcessor as AttnProcessor