2023-02-26 16:15:12 +00:00
|
|
|
from logging import getLogger
|
|
|
|
from typing import Tuple
|
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
from flask import request
|
|
|
|
|
2023-04-13 04:11:53 +00:00
|
|
|
from ..diffusers.load import get_available_pipelines, get_pipeline_schedulers
|
2023-04-01 17:06:31 +00:00
|
|
|
from ..params import (
|
|
|
|
Border,
|
|
|
|
DeviceParams,
|
|
|
|
HighresParams,
|
|
|
|
ImageParams,
|
|
|
|
Size,
|
|
|
|
UpscaleParams,
|
|
|
|
)
|
2023-04-28 18:56:36 +00:00
|
|
|
from ..utils import (
|
|
|
|
get_and_clamp_float,
|
|
|
|
get_and_clamp_int,
|
|
|
|
get_boolean,
|
|
|
|
get_from_list,
|
|
|
|
get_not_empty,
|
|
|
|
)
|
2023-03-05 13:19:48 +00:00
|
|
|
from .context import ServerContext
|
2023-03-05 04:13:09 +00:00
|
|
|
from .load import (
|
2023-02-26 20:15:30 +00:00
|
|
|
get_available_platforms,
|
|
|
|
get_config_value,
|
|
|
|
get_correction_models,
|
2023-04-01 19:26:45 +00:00
|
|
|
get_highres_methods,
|
2023-04-12 13:51:16 +00:00
|
|
|
get_network_models,
|
2023-02-26 20:15:30 +00:00
|
|
|
get_upscaling_models,
|
2023-02-26 16:15:12 +00:00
|
|
|
)
|
2023-02-26 20:15:30 +00:00
|
|
|
from .utils import get_model_path
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
logger = getLogger(__name__)
|
|
|
|
|
|
|
|
|
2023-02-26 20:15:30 +00:00
|
|
|
def pipeline_from_request(
|
2023-04-10 01:33:03 +00:00
|
|
|
server: ServerContext,
|
2023-04-13 04:11:53 +00:00
|
|
|
default_pipeline: str = "txt2img",
|
2023-02-26 20:15:30 +00:00
|
|
|
) -> Tuple[DeviceParams, ImageParams, Size]:
|
2023-02-26 16:15:12 +00:00
|
|
|
user = request.remote_addr
|
|
|
|
|
|
|
|
# platform stuff
|
|
|
|
device = None
|
|
|
|
device_name = request.args.get("platform")
|
|
|
|
|
|
|
|
if device_name is not None and device_name != "any":
|
|
|
|
for platform in get_available_platforms():
|
|
|
|
if platform.device == device_name:
|
|
|
|
device = platform
|
|
|
|
|
2023-04-13 03:58:48 +00:00
|
|
|
# diffusion model
|
2023-02-26 16:15:12 +00:00
|
|
|
model = get_not_empty(request.args, "model", get_config_value("model"))
|
2023-04-10 01:33:03 +00:00
|
|
|
model_path = get_model_path(server, model)
|
2023-04-15 22:31:02 +00:00
|
|
|
|
|
|
|
control = None
|
|
|
|
control_name = request.args.get("control")
|
|
|
|
for network in get_network_models():
|
|
|
|
if network.name == control_name:
|
|
|
|
control = network
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-04-13 03:58:48 +00:00
|
|
|
# pipeline stuff
|
2023-04-13 04:11:53 +00:00
|
|
|
pipeline = get_from_list(
|
|
|
|
request.args, "pipeline", get_available_pipelines(), default_pipeline
|
|
|
|
)
|
|
|
|
scheduler = get_from_list(request.args, "scheduler", get_pipeline_schedulers())
|
2023-04-13 03:58:48 +00:00
|
|
|
|
2023-02-26 16:15:12 +00:00
|
|
|
if scheduler is None:
|
|
|
|
scheduler = get_config_value("scheduler")
|
|
|
|
|
2023-04-22 05:28:10 +00:00
|
|
|
# prompt does not come from config
|
|
|
|
prompt = request.args.get("prompt", "")
|
2023-02-26 16:15:12 +00:00
|
|
|
negative_prompt = request.args.get("negativePrompt", None)
|
|
|
|
|
|
|
|
if negative_prompt is not None and negative_prompt.strip() == "":
|
|
|
|
negative_prompt = None
|
|
|
|
|
2023-04-22 05:28:10 +00:00
|
|
|
# image params
|
2023-02-26 16:15:12 +00:00
|
|
|
batch = get_and_clamp_int(
|
|
|
|
request.args,
|
|
|
|
"batch",
|
|
|
|
get_config_value("batch"),
|
|
|
|
get_config_value("batch", "max"),
|
|
|
|
get_config_value("batch", "min"),
|
|
|
|
)
|
|
|
|
cfg = get_and_clamp_float(
|
|
|
|
request.args,
|
|
|
|
"cfg",
|
|
|
|
get_config_value("cfg"),
|
|
|
|
get_config_value("cfg", "max"),
|
|
|
|
get_config_value("cfg", "min"),
|
|
|
|
)
|
|
|
|
eta = get_and_clamp_float(
|
|
|
|
request.args,
|
|
|
|
"eta",
|
|
|
|
get_config_value("eta"),
|
|
|
|
get_config_value("eta", "max"),
|
|
|
|
get_config_value("eta", "min"),
|
|
|
|
)
|
2023-04-22 15:39:23 +00:00
|
|
|
loopback = get_and_clamp_int(
|
|
|
|
request.args,
|
|
|
|
"loopback",
|
|
|
|
get_config_value("loopback"),
|
|
|
|
get_config_value("loopback", "max"),
|
|
|
|
get_config_value("loopback", "min"),
|
|
|
|
)
|
2023-02-26 16:15:12 +00:00
|
|
|
steps = get_and_clamp_int(
|
|
|
|
request.args,
|
|
|
|
"steps",
|
|
|
|
get_config_value("steps"),
|
|
|
|
get_config_value("steps", "max"),
|
|
|
|
get_config_value("steps", "min"),
|
|
|
|
)
|
|
|
|
height = get_and_clamp_int(
|
|
|
|
request.args,
|
|
|
|
"height",
|
|
|
|
get_config_value("height"),
|
|
|
|
get_config_value("height", "max"),
|
|
|
|
get_config_value("height", "min"),
|
|
|
|
)
|
|
|
|
width = get_and_clamp_int(
|
|
|
|
request.args,
|
|
|
|
"width",
|
|
|
|
get_config_value("width"),
|
|
|
|
get_config_value("width", "max"),
|
|
|
|
get_config_value("width", "min"),
|
|
|
|
)
|
2023-05-02 04:20:40 +00:00
|
|
|
tiled_vae = get_boolean(request.args, "tiledVAE", get_config_value("tiledVAE"))
|
|
|
|
tiles = get_and_clamp_int(
|
|
|
|
request.args,
|
|
|
|
"tiles",
|
|
|
|
get_config_value("tiles"),
|
|
|
|
get_config_value("tiles", "max"),
|
|
|
|
get_config_value("tiles", "min"),
|
|
|
|
)
|
|
|
|
overlap = get_and_clamp_float(
|
|
|
|
request.args,
|
|
|
|
"overlap",
|
|
|
|
get_config_value("overlap"),
|
|
|
|
get_config_value("overlap", "max"),
|
|
|
|
get_config_value("overlap", "min"),
|
|
|
|
)
|
2023-05-04 00:21:32 +00:00
|
|
|
stride = get_and_clamp_int(
|
2023-05-04 00:15:05 +00:00
|
|
|
request.args,
|
|
|
|
"stride",
|
|
|
|
get_config_value("stride"),
|
|
|
|
get_config_value("stride", "max"),
|
|
|
|
get_config_value("stride", "min"),
|
|
|
|
)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-05-04 00:21:32 +00:00
|
|
|
if stride > tiles:
|
|
|
|
logger.info("limiting stride to tile size, %s > %s", stride, tiles)
|
|
|
|
stride = tiles
|
|
|
|
|
2023-02-26 16:15:12 +00:00
|
|
|
seed = int(request.args.get("seed", -1))
|
|
|
|
if seed == -1:
|
|
|
|
# this one can safely use np.random because it produces a single value
|
|
|
|
seed = np.random.randint(np.iinfo(np.int32).max)
|
|
|
|
|
|
|
|
logger.info(
|
2023-04-13 03:58:48 +00:00
|
|
|
"request from %s: %s steps of %s using %s in %s on %s, %sx%s, %s, %s - %s",
|
2023-02-26 16:15:12 +00:00
|
|
|
user,
|
|
|
|
steps,
|
|
|
|
scheduler,
|
|
|
|
model_path,
|
2023-04-13 03:58:48 +00:00
|
|
|
pipeline,
|
2023-02-26 16:15:12 +00:00
|
|
|
device or "any device",
|
|
|
|
width,
|
|
|
|
height,
|
|
|
|
cfg,
|
|
|
|
seed,
|
|
|
|
prompt,
|
|
|
|
)
|
|
|
|
|
|
|
|
params = ImageParams(
|
|
|
|
model_path,
|
2023-04-13 03:58:48 +00:00
|
|
|
pipeline,
|
2023-02-26 16:15:12 +00:00
|
|
|
scheduler,
|
|
|
|
prompt,
|
|
|
|
cfg,
|
|
|
|
steps,
|
|
|
|
seed,
|
|
|
|
eta=eta,
|
|
|
|
negative_prompt=negative_prompt,
|
|
|
|
batch=batch,
|
2023-04-12 13:43:15 +00:00
|
|
|
control=control,
|
2023-04-22 15:39:23 +00:00
|
|
|
loopback=loopback,
|
2023-05-02 04:20:40 +00:00
|
|
|
tiled_vae=tiled_vae,
|
|
|
|
tiles=tiles,
|
|
|
|
overlap=overlap,
|
2023-05-04 00:15:05 +00:00
|
|
|
stride=stride,
|
2023-02-26 16:15:12 +00:00
|
|
|
)
|
|
|
|
size = Size(width, height)
|
|
|
|
return (device, params, size)
|
|
|
|
|
|
|
|
|
|
|
|
def border_from_request() -> Border:
|
|
|
|
left = get_and_clamp_int(
|
2023-04-01 23:14:18 +00:00
|
|
|
request.args,
|
|
|
|
"left",
|
2023-04-21 03:33:18 +00:00
|
|
|
get_config_value("left"),
|
|
|
|
get_config_value("left", "max"),
|
|
|
|
get_config_value("left", "min"),
|
2023-02-26 16:15:12 +00:00
|
|
|
)
|
|
|
|
right = get_and_clamp_int(
|
2023-04-01 23:14:18 +00:00
|
|
|
request.args,
|
|
|
|
"right",
|
2023-04-21 03:33:18 +00:00
|
|
|
get_config_value("right"),
|
|
|
|
get_config_value("right", "max"),
|
|
|
|
get_config_value("right", "min"),
|
2023-02-26 16:15:12 +00:00
|
|
|
)
|
|
|
|
top = get_and_clamp_int(
|
2023-04-01 23:14:18 +00:00
|
|
|
request.args,
|
|
|
|
"top",
|
2023-04-21 03:33:18 +00:00
|
|
|
get_config_value("top"),
|
|
|
|
get_config_value("top", "max"),
|
|
|
|
get_config_value("top", "min"),
|
2023-02-26 16:15:12 +00:00
|
|
|
)
|
|
|
|
bottom = get_and_clamp_int(
|
2023-04-01 23:14:18 +00:00
|
|
|
request.args,
|
|
|
|
"bottom",
|
2023-04-21 03:33:18 +00:00
|
|
|
get_config_value("bottom"),
|
|
|
|
get_config_value("bottom", "max"),
|
|
|
|
get_config_value("bottom", "min"),
|
2023-02-26 16:15:12 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
return Border(left, right, top, bottom)
|
|
|
|
|
|
|
|
|
|
|
|
def upscale_from_request() -> UpscaleParams:
|
2023-04-01 23:14:18 +00:00
|
|
|
denoise = get_and_clamp_float(
|
|
|
|
request.args,
|
|
|
|
"denoise",
|
|
|
|
get_config_value("denoise"),
|
|
|
|
get_config_value("denoise", "max"),
|
|
|
|
get_config_value("denoise", "min"),
|
|
|
|
)
|
|
|
|
scale = get_and_clamp_int(
|
|
|
|
request.args,
|
|
|
|
"scale",
|
|
|
|
get_config_value("scale"),
|
|
|
|
get_config_value("scale", "max"),
|
|
|
|
get_config_value("scale", "min"),
|
|
|
|
)
|
|
|
|
outscale = get_and_clamp_int(
|
|
|
|
request.args,
|
|
|
|
"outscale",
|
|
|
|
get_config_value("outscale"),
|
|
|
|
get_config_value("outscale", "max"),
|
|
|
|
get_config_value("outscale", "min"),
|
|
|
|
)
|
2023-02-26 16:15:12 +00:00
|
|
|
upscaling = get_from_list(request.args, "upscaling", get_upscaling_models())
|
|
|
|
correction = get_from_list(request.args, "correction", get_correction_models())
|
|
|
|
faces = get_not_empty(request.args, "faces", "false") == "true"
|
2023-04-01 23:14:18 +00:00
|
|
|
face_outscale = get_and_clamp_int(
|
|
|
|
request.args,
|
|
|
|
"faceOutscale",
|
|
|
|
get_config_value("faceOutscale"),
|
|
|
|
get_config_value("faceOutscale", "max"),
|
|
|
|
get_config_value("faceOutscale", "min"),
|
|
|
|
)
|
|
|
|
face_strength = get_and_clamp_float(
|
|
|
|
request.args,
|
|
|
|
"faceStrength",
|
|
|
|
get_config_value("faceStrength"),
|
|
|
|
get_config_value("faceStrength", "max"),
|
|
|
|
get_config_value("faceStrength", "min"),
|
|
|
|
)
|
2023-02-26 16:15:12 +00:00
|
|
|
upscale_order = request.args.get("upscaleOrder", "correction-first")
|
|
|
|
|
|
|
|
return UpscaleParams(
|
|
|
|
upscaling,
|
|
|
|
correction_model=correction,
|
|
|
|
denoise=denoise,
|
|
|
|
faces=faces,
|
|
|
|
face_outscale=face_outscale,
|
|
|
|
face_strength=face_strength,
|
|
|
|
format="onnx",
|
|
|
|
outscale=outscale,
|
|
|
|
scale=scale,
|
|
|
|
upscale_order=upscale_order,
|
|
|
|
)
|
2023-04-01 16:26:10 +00:00
|
|
|
|
|
|
|
|
|
|
|
def highres_from_request() -> HighresParams:
|
2023-04-10 00:53:00 +00:00
|
|
|
iterations = get_and_clamp_int(
|
|
|
|
request.args,
|
|
|
|
"highresIterations",
|
|
|
|
get_config_value("highresIterations"),
|
|
|
|
get_config_value("highresIterations", "max"),
|
|
|
|
get_config_value("highresIterations", "min"),
|
|
|
|
)
|
2023-04-01 19:26:45 +00:00
|
|
|
method = get_from_list(request.args, "highresMethod", get_highres_methods())
|
2023-04-01 23:14:18 +00:00
|
|
|
scale = get_and_clamp_int(
|
|
|
|
request.args,
|
|
|
|
"highresScale",
|
|
|
|
get_config_value("highresScale"),
|
|
|
|
get_config_value("highresScale", "max"),
|
|
|
|
get_config_value("highresScale", "min"),
|
|
|
|
)
|
|
|
|
steps = get_and_clamp_int(
|
|
|
|
request.args,
|
|
|
|
"highresSteps",
|
|
|
|
get_config_value("highresSteps"),
|
|
|
|
get_config_value("highresSteps", "max"),
|
|
|
|
get_config_value("highresSteps", "min"),
|
|
|
|
)
|
|
|
|
strength = get_and_clamp_float(
|
|
|
|
request.args,
|
|
|
|
"highresStrength",
|
|
|
|
get_config_value("highresStrength"),
|
2023-05-02 04:20:40 +00:00
|
|
|
get_config_value("highresStrength", "max"),
|
2023-04-01 23:14:18 +00:00
|
|
|
get_config_value("highresStrength", "min"),
|
|
|
|
)
|
2023-04-01 16:26:10 +00:00
|
|
|
return HighresParams(
|
|
|
|
scale,
|
|
|
|
steps,
|
|
|
|
strength,
|
2023-04-01 19:26:45 +00:00
|
|
|
method=method,
|
2023-04-10 00:53:00 +00:00
|
|
|
iterations=iterations,
|
2023-04-01 16:26:10 +00:00
|
|
|
)
|