2023-01-29 02:15:39 +00:00
|
|
|
from logging import getLogger
|
2023-02-06 04:48:07 +00:00
|
|
|
from typing import Any, Optional, Tuple
|
2023-01-29 02:15:39 +00:00
|
|
|
|
|
|
|
import numpy as np
|
2023-02-12 15:51:35 +00:00
|
|
|
from diffusers import (
|
|
|
|
DDIMScheduler,
|
|
|
|
DDPMScheduler,
|
|
|
|
DiffusionPipeline,
|
|
|
|
DPMSolverMultistepScheduler,
|
|
|
|
DPMSolverSinglestepScheduler,
|
|
|
|
EulerAncestralDiscreteScheduler,
|
|
|
|
EulerDiscreteScheduler,
|
|
|
|
HeunDiscreteScheduler,
|
2023-02-14 13:27:51 +00:00
|
|
|
IPNDMScheduler,
|
2023-02-12 15:51:35 +00:00
|
|
|
KarrasVeScheduler,
|
|
|
|
KDPM2AncestralDiscreteScheduler,
|
|
|
|
KDPM2DiscreteScheduler,
|
|
|
|
LMSDiscreteScheduler,
|
|
|
|
PNDMScheduler,
|
|
|
|
)
|
2023-02-05 13:53:26 +00:00
|
|
|
|
2023-02-14 04:37:54 +00:00
|
|
|
try:
|
|
|
|
from diffusers import DEISMultistepScheduler
|
2023-02-14 06:12:07 +00:00
|
|
|
except ImportError:
|
2023-02-14 04:37:54 +00:00
|
|
|
from .stub_scheduler import StubScheduler as DEISMultistepScheduler
|
|
|
|
|
2023-02-05 13:53:26 +00:00
|
|
|
from ..params import DeviceParams, Size
|
2023-02-14 00:04:46 +00:00
|
|
|
from ..utils import ServerContext, run_gc
|
2023-01-29 02:15:39 +00:00
|
|
|
|
|
|
|
logger = getLogger(__name__)
|
|
|
|
|
2023-02-02 04:21:22 +00:00
|
|
|
latent_channels = 4
|
|
|
|
latent_factor = 8
|
2023-01-29 02:15:39 +00:00
|
|
|
|
2023-02-12 15:51:35 +00:00
|
|
|
pipeline_schedulers = {
|
|
|
|
"ddim": DDIMScheduler,
|
|
|
|
"ddpm": DDPMScheduler,
|
2023-02-14 04:37:54 +00:00
|
|
|
"deis-multi": DEISMultistepScheduler,
|
2023-02-12 15:51:35 +00:00
|
|
|
"dpm-multi": DPMSolverMultistepScheduler,
|
|
|
|
"dpm-single": DPMSolverSinglestepScheduler,
|
|
|
|
"euler": EulerDiscreteScheduler,
|
|
|
|
"euler-a": EulerAncestralDiscreteScheduler,
|
|
|
|
"heun": HeunDiscreteScheduler,
|
2023-02-14 13:27:51 +00:00
|
|
|
"ipndm": IPNDMScheduler,
|
2023-02-12 15:51:35 +00:00
|
|
|
"k-dpm-2-a": KDPM2AncestralDiscreteScheduler,
|
|
|
|
"k-dpm-2": KDPM2DiscreteScheduler,
|
|
|
|
"karras-ve": KarrasVeScheduler,
|
|
|
|
"lms-discrete": LMSDiscreteScheduler,
|
|
|
|
"pndm": PNDMScheduler,
|
|
|
|
}
|
|
|
|
|
2023-02-12 18:33:36 +00:00
|
|
|
|
2023-02-12 15:51:35 +00:00
|
|
|
def get_scheduler_name(scheduler: Any) -> Optional[str]:
|
|
|
|
for k, v in pipeline_schedulers.items():
|
|
|
|
if scheduler == v or scheduler == v.__name__:
|
|
|
|
return k
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2023-02-02 04:21:22 +00:00
|
|
|
|
|
|
|
def get_latents_from_seed(seed: int, size: Size, batch: int = 1) -> np.ndarray:
|
2023-02-05 13:53:26 +00:00
|
|
|
"""
|
2023-02-05 19:43:33 +00:00
|
|
|
From https://www.travelneil.com/stable-diffusion-updates.html.
|
|
|
|
This one needs to use np.random because of the return type.
|
2023-02-05 13:53:26 +00:00
|
|
|
"""
|
|
|
|
latents_shape = (
|
|
|
|
batch,
|
|
|
|
latent_channels,
|
|
|
|
size.height // latent_factor,
|
|
|
|
size.width // latent_factor,
|
|
|
|
)
|
2023-01-29 02:15:39 +00:00
|
|
|
rng = np.random.default_rng(seed)
|
|
|
|
image_latents = rng.standard_normal(latents_shape).astype(np.float32)
|
|
|
|
return image_latents
|
|
|
|
|
|
|
|
|
2023-02-05 13:53:26 +00:00
|
|
|
def get_tile_latents(
|
|
|
|
full_latents: np.ndarray, dims: Tuple[int, int, int]
|
|
|
|
) -> np.ndarray:
|
2023-01-29 16:31:22 +00:00
|
|
|
x, y, tile = dims
|
2023-02-02 04:21:22 +00:00
|
|
|
t = tile // latent_factor
|
|
|
|
x = x // latent_factor
|
|
|
|
y = y // latent_factor
|
2023-01-29 16:31:22 +00:00
|
|
|
xt = x + t
|
|
|
|
yt = y + t
|
|
|
|
|
2023-02-02 03:20:48 +00:00
|
|
|
return full_latents[:, :, y:yt, x:xt]
|
2023-01-29 16:31:22 +00:00
|
|
|
|
|
|
|
|
2023-02-05 13:53:26 +00:00
|
|
|
def load_pipeline(
|
2023-02-14 00:04:46 +00:00
|
|
|
server: ServerContext,
|
2023-02-05 23:55:04 +00:00
|
|
|
pipeline: DiffusionPipeline,
|
|
|
|
model: str,
|
2023-02-12 15:33:13 +00:00
|
|
|
scheduler_type: Any,
|
2023-02-05 23:55:04 +00:00
|
|
|
device: DeviceParams,
|
|
|
|
lpw: bool,
|
2023-02-05 13:53:26 +00:00
|
|
|
):
|
2023-02-14 00:04:46 +00:00
|
|
|
pipe_key = (pipeline, model, device.device, device.provider, lpw)
|
|
|
|
scheduler_key = (scheduler_type,)
|
|
|
|
|
|
|
|
cache_pipe = server.cache.get("diffusion", pipe_key)
|
2023-01-29 02:15:39 +00:00
|
|
|
|
2023-02-14 00:04:46 +00:00
|
|
|
if cache_pipe is not None:
|
2023-02-05 13:53:26 +00:00
|
|
|
logger.debug("reusing existing diffusion pipeline")
|
2023-02-14 00:04:46 +00:00
|
|
|
pipe = cache_pipe
|
|
|
|
|
|
|
|
cache_scheduler = server.cache.get("scheduler", scheduler_key)
|
|
|
|
if cache_scheduler is None:
|
|
|
|
logger.debug("loading new diffusion scheduler")
|
|
|
|
scheduler = scheduler_type.from_pretrained(
|
|
|
|
model,
|
|
|
|
provider=device.provider,
|
|
|
|
provider_options=device.options,
|
|
|
|
subfolder="scheduler",
|
|
|
|
)
|
|
|
|
|
|
|
|
if device is not None and hasattr(scheduler, "to"):
|
|
|
|
scheduler = scheduler.to(device.torch_device())
|
|
|
|
|
|
|
|
pipe.scheduler = scheduler
|
|
|
|
server.cache.set("scheduler", scheduler_key, scheduler)
|
|
|
|
run_gc()
|
|
|
|
|
2023-01-29 02:15:39 +00:00
|
|
|
else:
|
2023-02-05 13:53:26 +00:00
|
|
|
logger.debug("unloading previous diffusion pipeline")
|
2023-02-14 00:04:46 +00:00
|
|
|
server.cache.drop("diffusion", pipe_key)
|
2023-02-02 03:20:48 +00:00
|
|
|
run_gc()
|
2023-01-29 02:15:39 +00:00
|
|
|
|
2023-02-05 23:15:37 +00:00
|
|
|
if lpw:
|
|
|
|
custom_pipeline = "./onnx_web/diffusion/lpw_stable_diffusion_onnx.py"
|
|
|
|
else:
|
|
|
|
custom_pipeline = None
|
|
|
|
|
2023-02-05 13:53:26 +00:00
|
|
|
logger.debug("loading new diffusion pipeline from %s", model)
|
2023-02-12 15:33:13 +00:00
|
|
|
scheduler = scheduler_type.from_pretrained(
|
2023-02-05 03:17:39 +00:00
|
|
|
model,
|
|
|
|
provider=device.provider,
|
2023-02-05 03:52:45 +00:00
|
|
|
provider_options=device.options,
|
2023-02-05 13:53:26 +00:00
|
|
|
subfolder="scheduler",
|
2023-02-05 03:17:39 +00:00
|
|
|
)
|
2023-01-29 02:15:39 +00:00
|
|
|
pipe = pipeline.from_pretrained(
|
|
|
|
model,
|
2023-02-05 23:15:37 +00:00
|
|
|
custom_pipeline=custom_pipeline,
|
2023-02-05 03:17:39 +00:00
|
|
|
provider=device.provider,
|
2023-02-05 03:52:45 +00:00
|
|
|
provider_options=device.options,
|
2023-02-05 22:01:11 +00:00
|
|
|
revision="onnx",
|
2023-01-29 02:15:39 +00:00
|
|
|
safety_checker=None,
|
2023-02-05 03:17:39 +00:00
|
|
|
scheduler=scheduler,
|
2023-01-29 02:15:39 +00:00
|
|
|
)
|
|
|
|
|
2023-02-05 13:53:26 +00:00
|
|
|
if device is not None and hasattr(pipe, "to"):
|
2023-02-09 04:35:54 +00:00
|
|
|
pipe = pipe.to(device.torch_device())
|
2023-01-29 02:15:39 +00:00
|
|
|
|
2023-02-14 00:04:46 +00:00
|
|
|
server.cache.set("diffusion", pipe_key, pipe)
|
|
|
|
server.cache.set("scheduler", scheduler_key, scheduler)
|
2023-01-29 02:15:39 +00:00
|
|
|
|
|
|
|
return pipe
|