2023-02-26 16:15:12 +00:00
|
|
|
from io import BytesIO
|
|
|
|
from logging import getLogger
|
|
|
|
from os import path
|
2024-01-09 04:14:32 +00:00
|
|
|
from typing import Any, Dict, List, Optional
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
from flask import Flask, jsonify, make_response, request, url_for
|
|
|
|
from jsonschema import validate
|
|
|
|
from PIL import Image
|
|
|
|
|
2023-02-26 20:15:30 +00:00
|
|
|
from ..chain import CHAIN_STAGES, ChainPipeline
|
2024-01-04 01:09:18 +00:00
|
|
|
from ..chain.result import ImageMetadata, StageResult
|
2023-04-13 04:14:45 +00:00
|
|
|
from ..diffusers.load import get_available_pipelines, get_pipeline_schedulers
|
2023-03-05 04:25:49 +00:00
|
|
|
from ..diffusers.run import (
|
2023-02-26 16:15:12 +00:00
|
|
|
run_blend_pipeline,
|
|
|
|
run_img2img_pipeline,
|
|
|
|
run_inpaint_pipeline,
|
|
|
|
run_txt2img_pipeline,
|
|
|
|
run_upscale_pipeline,
|
|
|
|
)
|
2023-07-04 21:41:54 +00:00
|
|
|
from ..diffusers.utils import replace_wildcards
|
2024-01-04 02:54:11 +00:00
|
|
|
from ..output import make_job_name, make_output_names
|
2024-01-05 01:16:40 +00:00
|
|
|
from ..params import Size, StageParams, TileOrder
|
2023-03-05 04:13:09 +00:00
|
|
|
from ..transformers.run import run_txt2txt_pipeline
|
2023-02-26 16:15:12 +00:00
|
|
|
from ..utils import (
|
|
|
|
base_join,
|
|
|
|
get_and_clamp_float,
|
|
|
|
get_and_clamp_int,
|
2023-07-11 03:16:17 +00:00
|
|
|
get_boolean,
|
2023-02-26 16:15:12 +00:00
|
|
|
get_from_list,
|
|
|
|
get_from_map,
|
2024-01-15 01:24:50 +00:00
|
|
|
get_list,
|
2023-02-26 16:15:12 +00:00
|
|
|
get_not_empty,
|
|
|
|
get_size,
|
2023-05-04 03:15:17 +00:00
|
|
|
load_config,
|
|
|
|
load_config_str,
|
2023-03-01 03:56:12 +00:00
|
|
|
sanitize_name,
|
2023-02-26 16:15:12 +00:00
|
|
|
)
|
2024-01-05 01:16:40 +00:00
|
|
|
from ..worker.command import JobStatus, JobType, Progress
|
2023-02-26 20:15:30 +00:00
|
|
|
from ..worker.pool import DevicePoolExecutor
|
2023-03-05 13:19:48 +00:00
|
|
|
from .context import ServerContext
|
2023-03-05 04:13:09 +00:00
|
|
|
from .load import (
|
2023-02-26 20:15:30 +00:00
|
|
|
get_available_platforms,
|
|
|
|
get_config_params,
|
|
|
|
get_config_value,
|
|
|
|
get_correction_models,
|
|
|
|
get_diffusion_models,
|
2023-03-05 05:01:06 +00:00
|
|
|
get_extra_strings,
|
2023-02-26 20:15:30 +00:00
|
|
|
get_mask_filters,
|
2023-03-19 00:14:24 +00:00
|
|
|
get_network_models,
|
2023-02-26 20:15:30 +00:00
|
|
|
get_noise_sources,
|
2023-04-14 01:06:33 +00:00
|
|
|
get_source_filters,
|
2023-02-26 20:15:30 +00:00
|
|
|
get_upscaling_models,
|
2023-07-04 21:41:54 +00:00
|
|
|
get_wildcard_data,
|
2023-02-26 20:15:30 +00:00
|
|
|
)
|
2023-04-01 17:06:31 +00:00
|
|
|
from .params import (
|
2023-09-13 22:27:44 +00:00
|
|
|
build_border,
|
|
|
|
build_highres,
|
|
|
|
build_upscale,
|
|
|
|
pipeline_from_json,
|
2023-04-01 17:06:31 +00:00
|
|
|
pipeline_from_request,
|
|
|
|
)
|
2023-02-26 20:15:30 +00:00
|
|
|
from .utils import wrap_route
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
logger = getLogger(__name__)
|
|
|
|
|
|
|
|
|
2023-03-18 20:16:41 +00:00
|
|
|
def ready_reply(
|
2023-03-18 22:25:13 +00:00
|
|
|
ready: bool = False,
|
|
|
|
cancelled: bool = False,
|
|
|
|
failed: bool = False,
|
|
|
|
pending: bool = False,
|
|
|
|
progress: int = 0,
|
2023-03-18 20:16:41 +00:00
|
|
|
):
|
2023-02-26 16:15:12 +00:00
|
|
|
return jsonify(
|
|
|
|
{
|
2023-03-18 22:25:13 +00:00
|
|
|
"cancelled": cancelled,
|
|
|
|
"failed": failed,
|
|
|
|
"pending": pending,
|
2023-02-26 16:15:12 +00:00
|
|
|
"progress": progress,
|
|
|
|
"ready": ready,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def error_reply(err: str):
|
|
|
|
response = make_response(
|
|
|
|
jsonify(
|
|
|
|
{
|
|
|
|
"error": err,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
)
|
|
|
|
response.status_code = 400
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2024-01-07 14:16:13 +00:00
|
|
|
EMPTY_PROGRESS = Progress(0, 0)
|
|
|
|
|
|
|
|
|
|
|
|
def job_reply(name: str, queue: int = 0):
|
2024-01-04 01:09:18 +00:00
|
|
|
return jsonify(
|
|
|
|
{
|
|
|
|
"name": name,
|
2024-01-07 14:16:13 +00:00
|
|
|
"queue": Progress(queue, queue).tojson(),
|
2024-01-07 02:32:38 +00:00
|
|
|
"status": JobStatus.PENDING,
|
2024-01-07 14:16:13 +00:00
|
|
|
"stages": EMPTY_PROGRESS.tojson(),
|
|
|
|
"steps": EMPTY_PROGRESS.tojson(),
|
|
|
|
"tiles": EMPTY_PROGRESS.tojson(),
|
2024-01-04 01:09:18 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def image_reply(
|
2024-01-04 05:32:33 +00:00
|
|
|
server: ServerContext,
|
2024-01-04 01:09:18 +00:00
|
|
|
name: str,
|
|
|
|
status: str,
|
2024-01-07 14:16:13 +00:00
|
|
|
queue: Progress = None,
|
2024-01-04 01:09:18 +00:00
|
|
|
stages: Progress = None,
|
|
|
|
steps: Progress = None,
|
|
|
|
tiles: Progress = None,
|
2024-01-09 04:14:32 +00:00
|
|
|
outputs: Optional[List[str]] = None,
|
|
|
|
metadata: Optional[List[ImageMetadata]] = None,
|
|
|
|
reason: Optional[str] = None,
|
2024-01-06 21:10:07 +00:00
|
|
|
) -> Dict[str, Any]:
|
2024-01-07 14:16:13 +00:00
|
|
|
if queue is None:
|
|
|
|
queue = EMPTY_PROGRESS
|
|
|
|
|
2024-01-04 01:09:18 +00:00
|
|
|
if stages is None:
|
2024-01-07 14:16:13 +00:00
|
|
|
stages = EMPTY_PROGRESS
|
2024-01-04 01:09:18 +00:00
|
|
|
|
|
|
|
if steps is None:
|
2024-01-07 14:16:13 +00:00
|
|
|
steps = EMPTY_PROGRESS
|
2024-01-04 01:09:18 +00:00
|
|
|
|
|
|
|
if tiles is None:
|
2024-01-07 14:16:13 +00:00
|
|
|
tiles = EMPTY_PROGRESS
|
2024-01-04 01:09:18 +00:00
|
|
|
|
|
|
|
data = {
|
|
|
|
"name": name,
|
|
|
|
"status": status,
|
2024-01-07 14:16:13 +00:00
|
|
|
"queue": queue.tojson(),
|
2024-01-04 01:09:18 +00:00
|
|
|
"stages": stages.tojson(),
|
|
|
|
"steps": steps.tojson(),
|
|
|
|
"tiles": tiles.tojson(),
|
|
|
|
}
|
|
|
|
|
2024-01-09 04:14:32 +00:00
|
|
|
if reason is not None:
|
|
|
|
data["reason"] = reason
|
|
|
|
|
2024-01-04 01:09:18 +00:00
|
|
|
if outputs is not None:
|
2024-01-06 03:15:33 +00:00
|
|
|
if metadata is None:
|
2024-01-04 05:13:21 +00:00
|
|
|
logger.error("metadata is required with outputs")
|
|
|
|
return error_reply("metadata is required with outputs")
|
2024-01-04 01:09:18 +00:00
|
|
|
|
2024-01-04 05:13:21 +00:00
|
|
|
if len(metadata) != len(outputs):
|
|
|
|
logger.error("metadata and outputs must be the same length")
|
|
|
|
return error_reply("metadata and outputs must be the same length")
|
2024-01-04 02:22:38 +00:00
|
|
|
|
2024-01-04 05:32:33 +00:00
|
|
|
data["metadata"] = [m.tojson(server, [o]) for m, o in zip(metadata, outputs)]
|
2024-01-04 05:13:21 +00:00
|
|
|
data["outputs"] = outputs
|
2024-01-04 01:09:18 +00:00
|
|
|
|
2024-01-06 21:10:07 +00:00
|
|
|
return data
|
2024-01-04 01:09:18 +00:00
|
|
|
|
|
|
|
|
2024-01-13 05:41:16 +00:00
|
|
|
def multi_image_reply(results: List[Dict[str, Any]]):
|
2024-01-04 01:09:18 +00:00
|
|
|
return jsonify(
|
|
|
|
{
|
|
|
|
"results": results,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-02-26 16:15:12 +00:00
|
|
|
def url_from_rule(rule) -> str:
|
|
|
|
options = {}
|
|
|
|
for arg in rule.arguments:
|
|
|
|
options[arg] = ":%s" % (arg)
|
|
|
|
|
|
|
|
return url_for(rule.endpoint, **options)
|
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def introspect(server: ServerContext, app: Flask):
|
2023-02-26 16:15:12 +00:00
|
|
|
return {
|
|
|
|
"name": "onnx-web",
|
|
|
|
"routes": [
|
2024-01-13 05:41:16 +00:00
|
|
|
{
|
|
|
|
"path": url_from_rule(rule),
|
|
|
|
"methods": list(rule.methods or []),
|
|
|
|
}
|
2023-02-26 16:15:12 +00:00
|
|
|
for rule in app.url_map.iter_rules()
|
|
|
|
],
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def list_extra_strings(server: ServerContext):
|
2023-03-05 04:57:31 +00:00
|
|
|
return jsonify(get_extra_strings())
|
|
|
|
|
|
|
|
|
2023-04-14 01:59:26 +00:00
|
|
|
def list_filters(server: ServerContext):
|
|
|
|
mask_filters = list(get_mask_filters().keys())
|
|
|
|
source_filters = list(get_source_filters().keys())
|
2023-04-14 02:28:30 +00:00
|
|
|
return jsonify(
|
|
|
|
{
|
|
|
|
"mask": mask_filters,
|
|
|
|
"source": source_filters,
|
|
|
|
}
|
|
|
|
)
|
2023-04-14 01:59:26 +00:00
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def list_mask_filters(server: ServerContext):
|
2023-04-14 01:59:26 +00:00
|
|
|
logger.info("dedicated list endpoint for mask filters is deprecated")
|
2023-02-26 16:15:12 +00:00
|
|
|
return jsonify(list(get_mask_filters().keys()))
|
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def list_models(server: ServerContext):
|
2023-02-26 16:15:12 +00:00
|
|
|
return jsonify(
|
|
|
|
{
|
|
|
|
"correction": get_correction_models(),
|
|
|
|
"diffusion": get_diffusion_models(),
|
2023-03-19 00:21:15 +00:00
|
|
|
"networks": [model.tojson() for model in get_network_models()],
|
2023-02-26 16:15:12 +00:00
|
|
|
"upscaling": get_upscaling_models(),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def list_noise_sources(server: ServerContext):
|
2023-02-26 16:15:12 +00:00
|
|
|
return jsonify(list(get_noise_sources().keys()))
|
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def list_params(server: ServerContext):
|
2023-02-26 16:15:12 +00:00
|
|
|
return jsonify(get_config_params())
|
|
|
|
|
|
|
|
|
2023-04-13 04:14:45 +00:00
|
|
|
def list_pipelines(server: ServerContext):
|
|
|
|
return jsonify(get_available_pipelines())
|
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def list_platforms(server: ServerContext):
|
2023-02-26 16:15:12 +00:00
|
|
|
return jsonify([p.device for p in get_available_platforms()])
|
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def list_schedulers(server: ServerContext):
|
2023-04-13 04:14:45 +00:00
|
|
|
return jsonify(get_pipeline_schedulers())
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
|
2023-12-16 21:17:28 +00:00
|
|
|
def list_wildcards(server: ServerContext):
|
|
|
|
return jsonify(list(get_wildcard_data().keys()))
|
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def img2img(server: ServerContext, pool: DevicePoolExecutor):
|
2023-03-01 03:44:52 +00:00
|
|
|
source_file = request.files.get("source")
|
|
|
|
if source_file is None:
|
2023-02-26 16:15:12 +00:00
|
|
|
return error_reply("source image is required")
|
|
|
|
|
|
|
|
source = Image.open(BytesIO(source_file.read())).convert("RGB")
|
2023-07-10 03:19:02 +00:00
|
|
|
size = Size(source.width, source.height)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-07-10 03:19:02 +00:00
|
|
|
device, params, _size = pipeline_from_request(server, "img2img")
|
2023-09-13 22:27:44 +00:00
|
|
|
upscale = build_upscale()
|
|
|
|
highres = build_highres()
|
2023-04-14 01:06:33 +00:00
|
|
|
source_filter = get_from_list(
|
|
|
|
request.args, "sourceFilter", list(get_source_filters().keys())
|
|
|
|
)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
strength = get_and_clamp_float(
|
|
|
|
request.args,
|
|
|
|
"strength",
|
|
|
|
get_config_value("strength"),
|
|
|
|
get_config_value("strength", "max"),
|
|
|
|
get_config_value("strength", "min"),
|
|
|
|
)
|
|
|
|
|
2023-07-07 01:39:08 +00:00
|
|
|
replace_wildcards(params, get_wildcard_data())
|
2023-07-04 21:41:54 +00:00
|
|
|
|
2024-01-04 02:22:38 +00:00
|
|
|
job_name = make_job_name("img2img", params, size, extras=[strength])
|
2024-01-07 14:16:13 +00:00
|
|
|
queue = pool.submit(
|
2023-02-26 16:15:12 +00:00
|
|
|
job_name,
|
2024-01-04 01:09:18 +00:00
|
|
|
JobType.IMG2IMG,
|
2023-02-26 16:15:12 +00:00
|
|
|
run_img2img_pipeline,
|
2023-04-10 01:33:03 +00:00
|
|
|
server,
|
2023-02-26 16:15:12 +00:00
|
|
|
params,
|
|
|
|
upscale,
|
2023-04-15 01:29:44 +00:00
|
|
|
highres,
|
2023-02-26 16:15:12 +00:00
|
|
|
source,
|
|
|
|
strength,
|
|
|
|
needs_device=device,
|
2023-04-14 01:06:33 +00:00
|
|
|
source_filter=source_filter,
|
2023-02-26 16:15:12 +00:00
|
|
|
)
|
|
|
|
|
2023-04-28 21:02:03 +00:00
|
|
|
logger.info("img2img job queued for: %s", job_name)
|
|
|
|
|
2024-01-07 14:16:13 +00:00
|
|
|
return job_reply(job_name, queue=queue)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def txt2img(server: ServerContext, pool: DevicePoolExecutor):
|
2023-04-13 04:11:53 +00:00
|
|
|
device, params, size = pipeline_from_request(server, "txt2img")
|
2023-09-13 22:27:44 +00:00
|
|
|
upscale = build_upscale()
|
|
|
|
highres = build_highres()
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-07-07 01:39:08 +00:00
|
|
|
replace_wildcards(params, get_wildcard_data())
|
2023-07-04 21:41:54 +00:00
|
|
|
|
2024-01-04 02:22:38 +00:00
|
|
|
job_name = make_job_name("txt2img", params, size)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2024-01-07 14:16:13 +00:00
|
|
|
queue = pool.submit(
|
2023-02-26 16:15:12 +00:00
|
|
|
job_name,
|
2024-01-04 01:09:18 +00:00
|
|
|
JobType.TXT2IMG,
|
2023-02-26 16:15:12 +00:00
|
|
|
run_txt2img_pipeline,
|
2023-04-10 01:33:03 +00:00
|
|
|
server,
|
2023-02-26 16:15:12 +00:00
|
|
|
params,
|
|
|
|
size,
|
|
|
|
upscale,
|
2023-04-01 16:26:10 +00:00
|
|
|
highres,
|
2023-02-26 16:15:12 +00:00
|
|
|
needs_device=device,
|
|
|
|
)
|
|
|
|
|
2023-04-28 21:02:03 +00:00
|
|
|
logger.info("txt2img job queued for: %s", job_name)
|
|
|
|
|
2024-01-07 14:16:13 +00:00
|
|
|
return job_reply(job_name, queue=queue)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def inpaint(server: ServerContext, pool: DevicePoolExecutor):
|
2023-03-01 03:44:52 +00:00
|
|
|
source_file = request.files.get("source")
|
|
|
|
if source_file is None:
|
2023-02-26 16:15:12 +00:00
|
|
|
return error_reply("source image is required")
|
|
|
|
|
2023-03-01 03:44:52 +00:00
|
|
|
mask_file = request.files.get("mask")
|
|
|
|
if mask_file is None:
|
2023-02-26 16:15:12 +00:00
|
|
|
return error_reply("mask image is required")
|
|
|
|
|
2023-11-26 16:38:12 +00:00
|
|
|
source = Image.open(BytesIO(source_file.read())).convert("RGBA")
|
2023-07-10 03:19:02 +00:00
|
|
|
size = Size(source.width, source.height)
|
|
|
|
|
2023-07-09 04:56:20 +00:00
|
|
|
mask_top_layer = Image.open(BytesIO(mask_file.read())).convert("RGBA")
|
2023-07-09 05:02:27 +00:00
|
|
|
mask = Image.new("RGBA", mask_top_layer.size, color=(0, 0, 0, 255))
|
2023-07-09 04:56:20 +00:00
|
|
|
mask.alpha_composite(mask_top_layer)
|
|
|
|
mask.convert(mode="L")
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-07-11 03:16:17 +00:00
|
|
|
full_res_inpaint = get_boolean(
|
|
|
|
request.args, "fullresInpaint", get_config_value("fullresInpaint")
|
|
|
|
)
|
|
|
|
full_res_inpaint_padding = get_and_clamp_float(
|
|
|
|
request.args,
|
|
|
|
"fullresInpaintPadding",
|
|
|
|
get_config_value("fullresInpaintPadding"),
|
|
|
|
get_config_value("fullresInpaintPadding", "max"),
|
|
|
|
get_config_value("fullresInpaintPadding", "min"),
|
|
|
|
)
|
|
|
|
|
2023-07-10 03:19:02 +00:00
|
|
|
device, params, _size = pipeline_from_request(server, "inpaint")
|
2023-09-13 22:27:44 +00:00
|
|
|
expand = build_border()
|
|
|
|
upscale = build_upscale()
|
|
|
|
highres = build_highres()
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
fill_color = get_not_empty(request.args, "fillColor", "white")
|
|
|
|
mask_filter = get_from_map(request.args, "filter", get_mask_filters(), "none")
|
|
|
|
noise_source = get_from_map(request.args, "noise", get_noise_sources(), "histogram")
|
|
|
|
tile_order = get_from_list(
|
|
|
|
request.args, "tileOrder", [TileOrder.grid, TileOrder.kernel, TileOrder.spiral]
|
|
|
|
)
|
2023-07-09 04:56:20 +00:00
|
|
|
tile_order = TileOrder.spiral
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-07-07 01:39:08 +00:00
|
|
|
replace_wildcards(params, get_wildcard_data())
|
2023-07-04 21:41:54 +00:00
|
|
|
|
2024-01-04 01:09:18 +00:00
|
|
|
job_name = make_job_name(
|
2023-02-26 16:15:12 +00:00
|
|
|
"inpaint",
|
|
|
|
params,
|
|
|
|
size,
|
2023-03-01 03:44:52 +00:00
|
|
|
extras=[
|
2023-02-26 16:15:12 +00:00
|
|
|
expand.left,
|
|
|
|
expand.right,
|
|
|
|
expand.top,
|
|
|
|
expand.bottom,
|
|
|
|
mask_filter.__name__,
|
|
|
|
noise_source.__name__,
|
|
|
|
fill_color,
|
|
|
|
tile_order,
|
2023-03-01 03:44:52 +00:00
|
|
|
],
|
2023-02-26 16:15:12 +00:00
|
|
|
)
|
|
|
|
|
2024-01-07 14:16:13 +00:00
|
|
|
queue = pool.submit(
|
2023-02-26 16:15:12 +00:00
|
|
|
job_name,
|
2024-01-04 01:09:18 +00:00
|
|
|
JobType.INPAINT,
|
2023-02-26 16:15:12 +00:00
|
|
|
run_inpaint_pipeline,
|
2023-04-10 01:33:03 +00:00
|
|
|
server,
|
2023-02-26 16:15:12 +00:00
|
|
|
params,
|
|
|
|
size,
|
|
|
|
upscale,
|
2023-04-15 01:29:44 +00:00
|
|
|
highres,
|
2023-02-26 16:15:12 +00:00
|
|
|
source,
|
|
|
|
mask,
|
|
|
|
expand,
|
|
|
|
noise_source,
|
|
|
|
mask_filter,
|
|
|
|
fill_color,
|
|
|
|
tile_order,
|
2023-07-11 03:16:17 +00:00
|
|
|
full_res_inpaint,
|
|
|
|
full_res_inpaint_padding,
|
2023-02-26 16:15:12 +00:00
|
|
|
needs_device=device,
|
|
|
|
)
|
|
|
|
|
2023-04-28 21:02:03 +00:00
|
|
|
logger.info("inpaint job queued for: %s", job_name)
|
|
|
|
|
2024-01-07 14:16:13 +00:00
|
|
|
return job_reply(job_name, queue=queue)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def upscale(server: ServerContext, pool: DevicePoolExecutor):
|
2023-03-01 03:44:52 +00:00
|
|
|
source_file = request.files.get("source")
|
|
|
|
if source_file is None:
|
2023-02-26 16:15:12 +00:00
|
|
|
return error_reply("source image is required")
|
|
|
|
|
|
|
|
source = Image.open(BytesIO(source_file.read())).convert("RGB")
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
device, params, size = pipeline_from_request(server)
|
2023-09-13 22:27:44 +00:00
|
|
|
upscale = build_upscale()
|
|
|
|
highres = build_highres()
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-07-07 01:39:08 +00:00
|
|
|
replace_wildcards(params, get_wildcard_data())
|
2023-07-04 21:41:54 +00:00
|
|
|
|
2024-01-04 02:22:38 +00:00
|
|
|
job_name = make_job_name("upscale", params, size)
|
2024-01-07 14:16:13 +00:00
|
|
|
queue = pool.submit(
|
2023-02-26 16:15:12 +00:00
|
|
|
job_name,
|
2024-01-04 01:09:18 +00:00
|
|
|
JobType.UPSCALE,
|
2023-02-26 16:15:12 +00:00
|
|
|
run_upscale_pipeline,
|
2023-04-10 01:33:03 +00:00
|
|
|
server,
|
2023-02-26 16:15:12 +00:00
|
|
|
params,
|
|
|
|
size,
|
|
|
|
upscale,
|
2023-04-15 01:29:44 +00:00
|
|
|
highres,
|
2023-02-26 16:15:12 +00:00
|
|
|
source,
|
|
|
|
needs_device=device,
|
|
|
|
)
|
|
|
|
|
2023-04-28 21:02:03 +00:00
|
|
|
logger.info("upscale job queued for: %s", job_name)
|
|
|
|
|
2024-01-07 14:16:13 +00:00
|
|
|
return job_reply(job_name, queue=queue)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
|
2023-09-13 22:27:44 +00:00
|
|
|
# keys that are specially parsed by params and should not show up in with_args
|
|
|
|
CHAIN_POP_KEYS = ["model", "control"]
|
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def chain(server: ServerContext, pool: DevicePoolExecutor):
|
2023-09-11 02:17:09 +00:00
|
|
|
if request.is_json:
|
2023-09-11 01:59:13 +00:00
|
|
|
logger.debug("chain pipeline request with JSON body")
|
|
|
|
data = request.get_json()
|
|
|
|
else:
|
|
|
|
logger.debug(
|
|
|
|
"chain pipeline request: %s, %s", request.form.keys(), request.files.keys()
|
|
|
|
)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-09-11 01:59:13 +00:00
|
|
|
body = request.form.get("chain") or request.files.get("chain")
|
|
|
|
if body is None:
|
|
|
|
return error_reply("chain pipeline must have a body")
|
|
|
|
|
|
|
|
data = load_config_str(body)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-09-11 01:59:13 +00:00
|
|
|
schema = load_config("./schemas/chain.yaml")
|
2023-02-26 16:15:12 +00:00
|
|
|
logger.debug("validating chain request: %s against %s", data, schema)
|
|
|
|
validate(data, schema)
|
|
|
|
|
2023-09-13 22:27:44 +00:00
|
|
|
device, base_params, base_size = pipeline_from_json(
|
|
|
|
server, data=data.get("defaults")
|
2023-09-13 00:17:03 +00:00
|
|
|
)
|
2023-09-13 00:06:13 +00:00
|
|
|
|
|
|
|
# start building the pipeline
|
2023-02-26 16:15:12 +00:00
|
|
|
pipeline = ChainPipeline()
|
|
|
|
for stage_data in data.get("stages", []):
|
2023-07-01 12:10:53 +00:00
|
|
|
stage_class = CHAIN_STAGES[stage_data.get("type")]
|
2023-09-11 02:17:09 +00:00
|
|
|
kwargs: Dict[str, Any] = stage_data.get("params", {})
|
2023-07-01 12:10:53 +00:00
|
|
|
logger.info("request stage: %s, %s", stage_class.__name__, kwargs)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-09-11 02:23:16 +00:00
|
|
|
# TODO: combine base params with stage params
|
2023-09-13 22:27:44 +00:00
|
|
|
_device, params, size = pipeline_from_json(server, data=kwargs)
|
2023-09-11 02:21:57 +00:00
|
|
|
replace_wildcards(params, get_wildcard_data())
|
|
|
|
|
2023-09-13 22:27:44 +00:00
|
|
|
# remove parsed keys, like model names (which become paths)
|
|
|
|
for pop_key in CHAIN_POP_KEYS:
|
|
|
|
if pop_key in kwargs:
|
|
|
|
kwargs.pop(pop_key)
|
2023-09-11 02:17:09 +00:00
|
|
|
|
2023-09-24 15:12:12 +00:00
|
|
|
if "seed" in kwargs and kwargs["seed"] == -1:
|
|
|
|
kwargs.pop("seed")
|
|
|
|
|
2023-09-13 22:27:44 +00:00
|
|
|
# replace kwargs with parsed versions
|
2023-09-11 03:19:29 +00:00
|
|
|
kwargs["params"] = params
|
2023-09-13 22:27:44 +00:00
|
|
|
kwargs["size"] = size
|
|
|
|
|
|
|
|
border = build_border(kwargs)
|
|
|
|
kwargs["border"] = border
|
2023-09-11 03:19:29 +00:00
|
|
|
|
2023-09-13 22:27:44 +00:00
|
|
|
upscale = build_upscale(kwargs)
|
|
|
|
kwargs["upscale"] = upscale
|
|
|
|
|
|
|
|
# prepare the stage metadata
|
2023-02-26 16:15:12 +00:00
|
|
|
stage = StageParams(
|
2023-07-01 12:10:53 +00:00
|
|
|
stage_data.get("name", stage_class.__name__),
|
2023-09-13 22:27:44 +00:00
|
|
|
tile_size=get_size(kwargs.get("tiles")),
|
2023-02-26 16:15:12 +00:00
|
|
|
outscale=get_and_clamp_int(kwargs, "outscale", 1, 4),
|
|
|
|
)
|
|
|
|
|
2023-09-13 22:27:44 +00:00
|
|
|
# load any images related to this stage
|
2023-02-26 16:15:12 +00:00
|
|
|
stage_source_name = "source:%s" % (stage.name)
|
|
|
|
stage_mask_name = "mask:%s" % (stage.name)
|
|
|
|
|
|
|
|
if stage_source_name in request.files:
|
|
|
|
logger.debug(
|
|
|
|
"loading source image %s for pipeline stage %s",
|
|
|
|
stage_source_name,
|
|
|
|
stage.name,
|
|
|
|
)
|
|
|
|
source_file = request.files.get(stage_source_name)
|
2023-03-01 03:44:52 +00:00
|
|
|
if source_file is not None:
|
|
|
|
source = Image.open(BytesIO(source_file.read())).convert("RGB")
|
|
|
|
kwargs["stage_source"] = source
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
if stage_mask_name in request.files:
|
|
|
|
logger.debug(
|
|
|
|
"loading mask image %s for pipeline stage %s",
|
|
|
|
stage_mask_name,
|
|
|
|
stage.name,
|
|
|
|
)
|
|
|
|
mask_file = request.files.get(stage_mask_name)
|
2023-03-01 03:44:52 +00:00
|
|
|
if mask_file is not None:
|
|
|
|
mask = Image.open(BytesIO(mask_file.read())).convert("RGB")
|
|
|
|
kwargs["stage_mask"] = mask
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-07-01 12:10:53 +00:00
|
|
|
pipeline.append((stage_class(), stage, kwargs))
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
logger.info("running chain pipeline with %s stages", len(pipeline.stages))
|
|
|
|
|
2024-01-04 02:22:38 +00:00
|
|
|
job_name = make_job_name("chain", base_params, base_size)
|
2023-09-11 02:21:57 +00:00
|
|
|
|
2023-02-26 16:15:12 +00:00
|
|
|
# build and run chain pipeline
|
2024-01-07 14:16:13 +00:00
|
|
|
queue = pool.submit(
|
2023-02-26 16:15:12 +00:00
|
|
|
job_name,
|
2024-01-04 01:09:18 +00:00
|
|
|
JobType.CHAIN,
|
2023-02-26 16:15:12 +00:00
|
|
|
pipeline,
|
2023-04-10 01:33:03 +00:00
|
|
|
server,
|
2023-09-12 23:16:16 +00:00
|
|
|
base_params,
|
2023-11-26 16:43:08 +00:00
|
|
|
StageResult.empty(),
|
2023-09-12 23:16:16 +00:00
|
|
|
size=base_size,
|
2023-02-26 16:15:12 +00:00
|
|
|
needs_device=device,
|
|
|
|
)
|
|
|
|
|
2024-01-07 14:16:13 +00:00
|
|
|
return job_reply(job_name, queue=queue)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def blend(server: ServerContext, pool: DevicePoolExecutor):
|
2023-03-01 03:44:52 +00:00
|
|
|
mask_file = request.files.get("mask")
|
|
|
|
if mask_file is None:
|
2023-02-26 16:15:12 +00:00
|
|
|
return error_reply("mask image is required")
|
|
|
|
|
|
|
|
mask = Image.open(BytesIO(mask_file.read())).convert("RGBA")
|
|
|
|
|
|
|
|
max_sources = 2
|
|
|
|
sources = []
|
|
|
|
|
|
|
|
for i in range(max_sources):
|
|
|
|
source_file = request.files.get("source:%s" % (i))
|
2023-03-01 03:44:52 +00:00
|
|
|
if source_file is None:
|
|
|
|
logger.warning("missing source %s", i)
|
|
|
|
else:
|
2023-12-17 20:53:17 +00:00
|
|
|
source = Image.open(BytesIO(source_file.read())).convert("RGB")
|
2023-03-01 03:44:52 +00:00
|
|
|
sources.append(source)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
device, params, size = pipeline_from_request(server)
|
2023-09-13 22:27:44 +00:00
|
|
|
upscale = build_upscale()
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2024-01-04 02:22:38 +00:00
|
|
|
job_name = make_job_name("blend", params, size)
|
2024-01-07 14:16:13 +00:00
|
|
|
queue = pool.submit(
|
2023-02-26 16:15:12 +00:00
|
|
|
job_name,
|
2024-01-04 01:09:18 +00:00
|
|
|
JobType.BLEND,
|
2023-02-26 16:15:12 +00:00
|
|
|
run_blend_pipeline,
|
2023-04-10 01:33:03 +00:00
|
|
|
server,
|
2023-02-26 16:15:12 +00:00
|
|
|
params,
|
|
|
|
size,
|
|
|
|
upscale,
|
2023-04-22 17:40:51 +00:00
|
|
|
# TODO: highres
|
2023-02-26 16:15:12 +00:00
|
|
|
sources,
|
|
|
|
mask,
|
|
|
|
needs_device=device,
|
|
|
|
)
|
|
|
|
|
2023-04-28 21:02:03 +00:00
|
|
|
logger.info("upscale job queued for: %s", job_name)
|
|
|
|
|
2024-01-07 14:16:13 +00:00
|
|
|
return job_reply(job_name, queue=queue)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def txt2txt(server: ServerContext, pool: DevicePoolExecutor):
|
|
|
|
device, params, size = pipeline_from_request(server)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2024-01-04 02:22:38 +00:00
|
|
|
job_name = make_job_name("txt2txt", params, size)
|
2023-03-01 03:44:52 +00:00
|
|
|
logger.info("upscale job queued for: %s", job_name)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2024-01-07 14:16:13 +00:00
|
|
|
queue = pool.submit(
|
2023-03-01 03:44:52 +00:00
|
|
|
job_name,
|
2024-01-04 01:09:18 +00:00
|
|
|
JobType.TXT2TXT,
|
2023-02-26 16:15:12 +00:00
|
|
|
run_txt2txt_pipeline,
|
2023-04-10 01:33:03 +00:00
|
|
|
server,
|
2023-02-26 16:15:12 +00:00
|
|
|
params,
|
|
|
|
size,
|
|
|
|
needs_device=device,
|
|
|
|
)
|
|
|
|
|
2024-01-07 14:16:13 +00:00
|
|
|
return job_reply(job_name, queue=queue)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def cancel(server: ServerContext, pool: DevicePoolExecutor):
|
2023-02-26 16:15:12 +00:00
|
|
|
output_file = request.args.get("output", None)
|
2023-03-01 03:44:52 +00:00
|
|
|
if output_file is None:
|
|
|
|
return error_reply("output name is required")
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-03-01 03:56:12 +00:00
|
|
|
output_file = sanitize_name(output_file)
|
2023-03-18 22:25:13 +00:00
|
|
|
cancelled = pool.cancel(output_file)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-03-18 22:25:13 +00:00
|
|
|
return ready_reply(cancelled=cancelled)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def ready(server: ServerContext, pool: DevicePoolExecutor):
|
2023-02-26 16:15:12 +00:00
|
|
|
output_file = request.args.get("output", None)
|
2023-03-01 03:44:52 +00:00
|
|
|
if output_file is None:
|
|
|
|
return error_reply("output name is required")
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-03-01 03:56:12 +00:00
|
|
|
output_file = sanitize_name(output_file)
|
2024-01-07 14:16:13 +00:00
|
|
|
status, progress, _queue = pool.status(output_file)
|
2023-03-18 22:25:13 +00:00
|
|
|
|
2024-01-06 08:33:01 +00:00
|
|
|
if status == JobStatus.PENDING:
|
2023-03-18 22:25:13 +00:00
|
|
|
return ready_reply(pending=True)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-03-18 20:12:09 +00:00
|
|
|
if progress is None:
|
2023-04-10 01:33:03 +00:00
|
|
|
output = base_join(server.output_path, output_file)
|
2023-02-26 16:15:12 +00:00
|
|
|
if path.exists(output):
|
2023-03-18 22:25:13 +00:00
|
|
|
return ready_reply(ready=True)
|
2023-03-18 20:16:41 +00:00
|
|
|
else:
|
2023-03-18 20:32:49 +00:00
|
|
|
return ready_reply(
|
2023-03-18 22:25:13 +00:00
|
|
|
ready=True,
|
|
|
|
failed=True,
|
2023-03-18 20:32:49 +00:00
|
|
|
) # is a missing image really an error? yes will display the retry button
|
2023-02-26 16:15:12 +00:00
|
|
|
|
2023-03-18 20:16:41 +00:00
|
|
|
return ready_reply(
|
2024-01-06 08:33:01 +00:00
|
|
|
ready=(status == JobStatus.SUCCESS),
|
|
|
|
progress=progress.steps.current,
|
|
|
|
failed=(status == JobStatus.FAILED),
|
|
|
|
cancelled=(status == JobStatus.CANCELLED),
|
2023-03-18 20:16:41 +00:00
|
|
|
)
|
2023-02-26 16:15:12 +00:00
|
|
|
|
|
|
|
|
2024-01-04 01:15:33 +00:00
|
|
|
def job_create(server: ServerContext, pool: DevicePoolExecutor):
|
|
|
|
return chain(server, pool)
|
|
|
|
|
|
|
|
|
2024-01-04 01:09:18 +00:00
|
|
|
def job_cancel(server: ServerContext, pool: DevicePoolExecutor):
|
|
|
|
legacy_job_name = request.args.get("job", None)
|
2024-01-15 01:24:50 +00:00
|
|
|
job_list = get_list(request.args, "jobs")
|
2024-01-04 01:09:18 +00:00
|
|
|
|
|
|
|
if legacy_job_name is not None:
|
|
|
|
job_list.append(legacy_job_name)
|
|
|
|
|
|
|
|
if len(job_list) == 0:
|
|
|
|
return error_reply("at least one job name is required")
|
2024-01-15 01:25:19 +00:00
|
|
|
elif len(job_list) > 10:
|
|
|
|
return error_reply("too many jobs")
|
2024-01-04 01:09:18 +00:00
|
|
|
|
2024-01-13 04:58:52 +00:00
|
|
|
results: List[Dict[str, str]] = []
|
2024-01-04 01:09:18 +00:00
|
|
|
for job_name in job_list:
|
|
|
|
job_name = sanitize_name(job_name)
|
|
|
|
cancelled = pool.cancel(job_name)
|
2024-01-04 05:58:57 +00:00
|
|
|
results.append(
|
|
|
|
{
|
|
|
|
"name": job_name,
|
|
|
|
"status": JobStatus.CANCELLED if cancelled else JobStatus.PENDING,
|
|
|
|
}
|
|
|
|
)
|
2024-01-04 01:09:18 +00:00
|
|
|
|
|
|
|
return multi_image_reply(results)
|
|
|
|
|
|
|
|
|
|
|
|
def job_status(server: ServerContext, pool: DevicePoolExecutor):
|
|
|
|
legacy_job_name = request.args.get("job", None)
|
2024-01-15 01:24:50 +00:00
|
|
|
job_list = get_list(request.args, "jobs")
|
2024-01-04 01:09:18 +00:00
|
|
|
|
|
|
|
if legacy_job_name is not None:
|
|
|
|
job_list.append(legacy_job_name)
|
|
|
|
|
|
|
|
if len(job_list) == 0:
|
|
|
|
return error_reply("at least one job name is required")
|
2024-01-15 01:25:19 +00:00
|
|
|
elif len(job_list) > 10:
|
|
|
|
return error_reply("too many jobs")
|
2024-01-04 01:09:18 +00:00
|
|
|
|
2024-01-06 21:10:07 +00:00
|
|
|
records = []
|
|
|
|
|
2024-01-04 01:09:18 +00:00
|
|
|
for job_name in job_list:
|
|
|
|
job_name = sanitize_name(job_name)
|
2024-01-07 14:16:13 +00:00
|
|
|
status, progress, queue = pool.status(job_name)
|
2024-01-04 01:09:18 +00:00
|
|
|
|
|
|
|
if progress is not None:
|
2024-01-04 02:54:11 +00:00
|
|
|
outputs = None
|
2024-01-04 05:13:21 +00:00
|
|
|
metadata = None
|
|
|
|
if progress.result is not None and len(progress.result) > 0:
|
2024-01-13 05:41:16 +00:00
|
|
|
# TODO: the names should be attached to the result when it is saved rather than recomputing them
|
2024-01-04 05:13:21 +00:00
|
|
|
outputs = make_output_names(server, job_name, len(progress.result))
|
|
|
|
metadata = progress.result.metadata
|
2024-01-04 02:54:11 +00:00
|
|
|
|
2024-01-06 21:10:07 +00:00
|
|
|
records.append(
|
|
|
|
image_reply(
|
|
|
|
server,
|
|
|
|
job_name,
|
|
|
|
status,
|
|
|
|
stages=progress.stages,
|
|
|
|
steps=progress.steps,
|
|
|
|
tiles=progress.tiles,
|
|
|
|
outputs=outputs,
|
|
|
|
metadata=metadata,
|
2024-01-09 04:14:32 +00:00
|
|
|
reason=progress.reason,
|
2024-01-06 21:10:07 +00:00
|
|
|
)
|
2024-01-04 01:09:18 +00:00
|
|
|
)
|
2024-01-06 21:10:07 +00:00
|
|
|
else:
|
2024-01-07 14:16:13 +00:00
|
|
|
records.append(image_reply(server, job_name, status, queue=queue))
|
2024-01-04 01:09:18 +00:00
|
|
|
|
2024-01-06 21:10:07 +00:00
|
|
|
return jsonify(records)
|
2024-01-04 01:09:18 +00:00
|
|
|
|
|
|
|
|
2023-04-10 01:33:03 +00:00
|
|
|
def register_api_routes(app: Flask, server: ServerContext, pool: DevicePoolExecutor):
|
2023-02-26 20:15:30 +00:00
|
|
|
return [
|
2023-04-10 01:33:03 +00:00
|
|
|
app.route("/api")(wrap_route(introspect, server, app=app)),
|
2024-01-04 01:09:18 +00:00
|
|
|
# job routes
|
2024-01-04 01:18:56 +00:00
|
|
|
app.route("/api/job", methods=["POST"])(
|
|
|
|
wrap_route(job_create, server, pool=pool)
|
|
|
|
),
|
2024-01-04 01:09:18 +00:00
|
|
|
app.route("/api/job/cancel", methods=["PUT"])(
|
|
|
|
wrap_route(job_cancel, server, pool=pool)
|
|
|
|
),
|
|
|
|
app.route("/api/job/status")(wrap_route(job_status, server, pool=pool)),
|
|
|
|
# settings routes
|
2023-04-14 01:59:26 +00:00
|
|
|
app.route("/api/settings/filters")(wrap_route(list_filters, server)),
|
2023-04-10 01:33:03 +00:00
|
|
|
app.route("/api/settings/masks")(wrap_route(list_mask_filters, server)),
|
|
|
|
app.route("/api/settings/models")(wrap_route(list_models, server)),
|
|
|
|
app.route("/api/settings/noises")(wrap_route(list_noise_sources, server)),
|
|
|
|
app.route("/api/settings/params")(wrap_route(list_params, server)),
|
2023-04-13 04:14:45 +00:00
|
|
|
app.route("/api/settings/pipelines")(wrap_route(list_pipelines, server)),
|
2023-04-10 01:33:03 +00:00
|
|
|
app.route("/api/settings/platforms")(wrap_route(list_platforms, server)),
|
|
|
|
app.route("/api/settings/schedulers")(wrap_route(list_schedulers, server)),
|
|
|
|
app.route("/api/settings/strings")(wrap_route(list_extra_strings, server)),
|
2023-12-16 21:17:28 +00:00
|
|
|
app.route("/api/settings/wildcards")(wrap_route(list_wildcards, server)),
|
2024-01-04 01:09:18 +00:00
|
|
|
# legacy job routes
|
2023-02-26 20:15:30 +00:00
|
|
|
app.route("/api/img2img", methods=["POST"])(
|
2023-04-10 01:33:03 +00:00
|
|
|
wrap_route(img2img, server, pool=pool)
|
2023-02-26 20:15:30 +00:00
|
|
|
),
|
|
|
|
app.route("/api/txt2img", methods=["POST"])(
|
2023-04-10 01:33:03 +00:00
|
|
|
wrap_route(txt2img, server, pool=pool)
|
2023-02-26 20:15:30 +00:00
|
|
|
),
|
|
|
|
app.route("/api/txt2txt", methods=["POST"])(
|
2023-04-10 01:33:03 +00:00
|
|
|
wrap_route(txt2txt, server, pool=pool)
|
2023-02-26 20:15:30 +00:00
|
|
|
),
|
|
|
|
app.route("/api/inpaint", methods=["POST"])(
|
2023-04-10 01:33:03 +00:00
|
|
|
wrap_route(inpaint, server, pool=pool)
|
2023-02-26 20:15:30 +00:00
|
|
|
),
|
|
|
|
app.route("/api/upscale", methods=["POST"])(
|
2023-04-10 01:33:03 +00:00
|
|
|
wrap_route(upscale, server, pool=pool)
|
2023-02-26 20:15:30 +00:00
|
|
|
),
|
2023-04-10 01:34:10 +00:00
|
|
|
app.route("/api/chain", methods=["POST"])(wrap_route(chain, server, pool=pool)),
|
|
|
|
app.route("/api/blend", methods=["POST"])(wrap_route(blend, server, pool=pool)),
|
2024-01-04 01:09:18 +00:00
|
|
|
# deprecated routes
|
2023-02-26 20:15:30 +00:00
|
|
|
app.route("/api/cancel", methods=["PUT"])(
|
2023-04-10 01:33:03 +00:00
|
|
|
wrap_route(cancel, server, pool=pool)
|
2023-02-26 20:15:30 +00:00
|
|
|
),
|
2023-04-10 01:33:03 +00:00
|
|
|
app.route("/api/ready")(wrap_route(ready, server, pool=pool)),
|
2023-02-26 20:15:30 +00:00
|
|
|
]
|