1
0
Fork 0
onnx-web/api/onnx_web/chain/upscale_outpaint.py

127 lines
4.1 KiB
Python
Raw Normal View History

2023-01-28 23:09:19 +00:00
from logging import getLogger
2023-11-19 00:13:13 +00:00
from typing import Callable, Optional, Tuple
import numpy as np
2023-02-05 23:55:04 +00:00
import torch
2023-07-09 05:02:27 +00:00
from PIL import Image
2023-02-05 13:53:26 +00:00
from ..diffusers.load import load_pipeline
from ..diffusers.utils import (
encode_prompt,
get_latents_from_seed,
get_tile_latents,
parse_prompt,
)
2023-07-09 05:02:27 +00:00
from ..image import mask_filter_none, noise_source_histogram
2023-02-05 13:53:26 +00:00
from ..output import save_image
2023-02-12 00:10:36 +00:00
from ..params import Border, ImageParams, Size, SizeChart, StageParams
2023-02-26 05:49:39 +00:00
from ..server import ServerContext
2023-02-19 02:28:21 +00:00
from ..utils import is_debug
2023-02-26 20:15:30 +00:00
from ..worker import ProgressCallback, WorkerContext
from .base import BaseStage
from .result import StageResult
2023-01-28 23:09:19 +00:00
logger = getLogger(__name__)
class UpscaleOutpaintStage(BaseStage):
2023-09-15 13:40:56 +00:00
max_tile = SizeChart.max
def run(
self,
worker: WorkerContext,
server: ServerContext,
stage: StageParams,
params: ImageParams,
sources: StageResult,
*,
border: Border,
dims: Tuple[int, int, int],
tile_mask: Image.Image,
fill_color: str = "white",
mask_filter: Callable = mask_filter_none,
noise_source: Callable = noise_source_histogram,
latents: Optional[np.ndarray] = None,
callback: Optional[ProgressCallback] = None,
stage_source: Optional[Image.Image] = None,
stage_mask: Optional[Image.Image] = None,
**kwargs,
) -> StageResult:
prompt_pairs, loras, inversions, (prompt, negative_prompt) = parse_prompt(
params
)
pipe_type = params.get_valid_pipeline("inpaint", params.pipeline)
pipe = load_pipeline(
server,
params,
pipe_type,
worker.get_device(),
embeddings=inversions,
loras=loras,
2023-02-05 13:53:26 +00:00
)
outputs = []
for source in sources.as_image():
2023-07-09 05:02:27 +00:00
if is_debug():
save_image(server, "tile-source.png", source)
save_image(server, "tile-mask.png", tile_mask)
# if the tile mask is all black, skip processing this tile
2023-07-09 04:56:20 +00:00
if not tile_mask.getbbox():
outputs.append(source)
continue
2023-07-09 05:02:27 +00:00
tile_size = params.unet_tile
size = Size(*source.size)
latent_size = size.min(tile_size, tile_size)
# generate new latents or slice existing
if latents is None:
latents = get_latents_from_seed(params.seed, latent_size, params.batch)
2023-07-09 05:02:27 +00:00
else:
latents = get_tile_latents(latents, params.seed, latent_size, dims)
2023-07-09 05:02:27 +00:00
2023-08-21 03:28:08 +00:00
if params.is_lpw():
2023-07-09 04:56:20 +00:00
logger.debug("using LPW pipeline for inpaint")
rng = torch.manual_seed(params.seed)
result = pipe.inpaint(
source,
2023-07-09 04:56:20 +00:00
tile_mask,
prompt,
negative_prompt=negative_prompt,
height=latent_size.height,
width=latent_size.width,
2023-07-09 04:56:20 +00:00
num_inference_steps=params.steps,
guidance_scale=params.cfg,
generator=rng,
latents=latents,
callback=callback,
)
else:
2023-07-09 04:56:20 +00:00
# encode and record alternative prompts outside of LPW
if not params.is_xl():
prompt_embeds = encode_prompt(
pipe, prompt_pairs, params.batch, params.do_cfg()
)
pipe.unet.set_prompts(prompt_embeds)
2023-07-09 04:56:20 +00:00
rng = np.random.RandomState(params.seed)
result = pipe(
prompt,
source,
tile_mask,
negative_prompt=negative_prompt,
height=latent_size.height,
width=latent_size.width,
2023-07-09 04:56:20 +00:00
num_inference_steps=params.steps,
guidance_scale=params.cfg,
generator=rng,
latents=latents,
callback=callback,
)
2023-07-09 05:02:27 +00:00
2023-07-09 04:56:20 +00:00
outputs.extend(result.images)
return StageResult(images=outputs)