1
0
Fork 0

fix imports

This commit is contained in:
Sean Sube 2023-02-18 20:28:21 -06:00
parent 3fd90a667d
commit 25c56c7d5c
Signed by: ssube
GPG Key ID: 3EED7B957D362AF1
27 changed files with 113 additions and 109 deletions

View File

@ -36,14 +36,14 @@ from .server import (
DeviceParams,
DevicePoolExecutor,
ModelCache,
ServerContext,
apply_patch_basicsr,
apply_patch_codeformer,
apply_patch_facexlib,
apply_patches,
run_upscale_correction,
)
from .upscale import run_upscale_correction
from .utils import (
ServerContext,
base_join,
get_and_clamp_float,
get_and_clamp_int,

View File

@ -7,8 +7,8 @@ from PIL import Image
from ..output import save_image
from ..params import ImageParams, StageParams
from ..server.device_pool import JobContext, ProgressCallback
from ..utils import ServerContext, is_debug
from ..server import JobContext, ProgressCallback, ServerContext
from ..utils import is_debug
from .utils import process_tile_order
logger = getLogger(__name__)

View File

@ -7,8 +7,7 @@ from PIL import Image
from ..diffusion.load import load_pipeline
from ..params import ImageParams, StageParams
from ..server.device_pool import JobContext, ProgressCallback
from ..utils import ServerContext
from ..server import JobContext, ProgressCallback, ServerContext
logger = getLogger(__name__)

View File

@ -10,8 +10,8 @@ from ..diffusion.load import get_latents_from_seed, load_pipeline
from ..image import expand_image, mask_filter_none, noise_source_histogram
from ..output import save_image
from ..params import Border, ImageParams, Size, SizeChart, StageParams
from ..server.device_pool import JobContext, ProgressCallback
from ..utils import ServerContext, is_debug
from ..server import JobContext, ProgressCallback, ServerContext
from ..utils import is_debug
from .utils import process_tile_order
logger = getLogger(__name__)

View File

@ -7,8 +7,8 @@ from onnx_web.image import valid_image
from onnx_web.output import save_image
from ..params import ImageParams, StageParams
from ..server.device_pool import JobContext, ProgressCallback
from ..utils import ServerContext, is_debug
from ..server import JobContext, ProgressCallback, ServerContext
from ..utils import is_debug
logger = getLogger(__name__)

View File

@ -3,8 +3,7 @@ from logging import getLogger
from PIL import Image
from ..params import ImageParams, StageParams, UpscaleParams
from ..server.device_pool import JobContext
from ..utils import ServerContext
from ..server import JobContext, ServerContext
logger = getLogger(__name__)

View File

@ -5,8 +5,8 @@ import numpy as np
from PIL import Image
from ..params import DeviceParams, ImageParams, StageParams, UpscaleParams
from ..server.device_pool import JobContext
from ..utils import ServerContext, run_gc
from ..server import JobContext, ServerContext
from ..utils import run_gc
logger = getLogger(__name__)

View File

@ -4,8 +4,7 @@ from PIL import Image
from ..output import save_image
from ..params import ImageParams, StageParams
from ..server.device_pool import JobContext
from ..utils import ServerContext
from ..server import JobContext, ServerContext
logger = getLogger(__name__)

View File

@ -5,8 +5,7 @@ from boto3 import Session
from PIL import Image
from ..params import ImageParams, StageParams
from ..server.device_pool import JobContext
from ..utils import ServerContext
from ..server import JobContext, ServerContext
logger = getLogger(__name__)

View File

@ -3,8 +3,7 @@ from logging import getLogger
from PIL import Image
from ..params import ImageParams, Size, StageParams
from ..server.device_pool import JobContext
from ..utils import ServerContext
from ..server import JobContext, ServerContext
logger = getLogger(__name__)

View File

@ -3,8 +3,7 @@ from logging import getLogger
from PIL import Image
from ..params import ImageParams, Size, StageParams
from ..server.device_pool import JobContext
from ..utils import ServerContext
from ..server import JobContext, ServerContext
logger = getLogger(__name__)

View File

@ -4,8 +4,7 @@ from typing import Callable
from PIL import Image
from ..params import ImageParams, Size, StageParams
from ..server.device_pool import JobContext
from ..utils import ServerContext
from ..server import JobContext, ServerContext
logger = getLogger(__name__)

View File

@ -7,8 +7,7 @@ from PIL import Image
from ..diffusion.load import get_latents_from_seed, load_pipeline
from ..params import ImageParams, Size, StageParams
from ..server.device_pool import JobContext, ProgressCallback
from ..utils import ServerContext
from ..server import JobContext, ProgressCallback, ServerContext
logger = getLogger(__name__)

View File

@ -10,8 +10,8 @@ from ..diffusion.load import get_latents_from_seed, get_tile_latents, load_pipel
from ..image import expand_image, mask_filter_none, noise_source_histogram
from ..output import save_image
from ..params import Border, ImageParams, Size, SizeChart, StageParams
from ..server.device_pool import JobContext, ProgressCallback
from ..utils import ServerContext, is_debug
from ..server import JobContext, ProgressCallback, ServerContext
from ..utils import is_debug
from .utils import process_tile_grid, process_tile_order
logger = getLogger(__name__)

View File

@ -6,8 +6,8 @@ from PIL import Image
from ..onnx import OnnxNet
from ..params import DeviceParams, ImageParams, StageParams, UpscaleParams
from ..server.device_pool import JobContext
from ..utils import ServerContext, run_gc
from ..server import JobContext, ServerContext
from ..utils import run_gc
logger = getLogger(__name__)

View File

@ -10,8 +10,8 @@ from ..diffusion.pipeline_onnx_stable_diffusion_upscale import (
OnnxStableDiffusionUpscalePipeline,
)
from ..params import DeviceParams, ImageParams, StageParams, UpscaleParams
from ..server.device_pool import JobContext, ProgressCallback
from ..utils import ServerContext, run_gc
from ..server import JobContext, ProgressCallback, ServerContext
from ..utils import run_gc
logger = getLogger(__name__)

View File

@ -11,7 +11,7 @@ import torch
from huggingface_hub.utils.tqdm import tqdm
from yaml import safe_load
from ..utils import ServerContext
from ..server import ServerContext
logger = getLogger(__name__)

View File

@ -26,7 +26,8 @@ except ImportError:
from .stub_scheduler import StubScheduler as DEISMultistepScheduler
from ..params import DeviceParams, Size
from ..utils import ServerContext, run_gc
from ..server import ServerContext
from ..utils import run_gc
logger = getLogger(__name__)

View File

@ -11,10 +11,10 @@ from onnx_web.chain.base import ChainProgress
from ..chain import upscale_outpaint
from ..output import save_image, save_params
from ..params import Border, ImageParams, Size, StageParams
from ..server.device_pool import JobContext
from ..server.upscale import UpscaleParams, run_upscale_correction
from ..utils import ServerContext, run_gc
from ..params import Border, ImageParams, Size, StageParams, UpscaleParams
from ..server import JobContext, ServerContext
from ..upscale import run_upscale_correction
from ..utils import run_gc
from .load import get_latents_from_seed, load_pipeline
logger = getLogger(__name__)

View File

@ -5,7 +5,7 @@ import numpy as np
import torch
from onnxruntime import InferenceSession, SessionOptions
from ..utils import ServerContext
from ..server import ServerContext
class OnnxTensor:

View File

@ -10,7 +10,8 @@ from PIL import Image
from .diffusion.load import get_scheduler_name
from .params import Border, ImageParams, Param, Size, UpscaleParams
from .utils import ServerContext, base_join
from .server import ServerContext
from .utils import base_join
logger = getLogger(__name__)

View File

@ -63,10 +63,8 @@ from .params import (
TileOrder,
UpscaleParams,
)
from .server.device_pool import DevicePoolExecutor
from .server.hacks import apply_patches
from .server import DevicePoolExecutor, ServerContext, apply_patches
from .utils import (
ServerContext,
base_join,
get_and_clamp_float,
get_and_clamp_int,

View File

@ -1,4 +1,10 @@
from .device_pool import DeviceParams, DevicePoolExecutor
from .device_pool import (
DeviceParams,
DevicePoolExecutor,
Job,
JobContext,
ProgressCallback,
)
from .hacks import (
apply_patch_basicsr,
apply_patch_codeformer,
@ -6,4 +12,4 @@ from .hacks import (
apply_patches,
)
from .model_cache import ModelCache
from .upscale import run_upscale_correction
from .context import ServerContext

View File

@ -0,0 +1,66 @@
from logging import getLogger
from os import environ, path
from typing import List
from ..utils import get_boolean
from .model_cache import ModelCache
logger = getLogger(__name__)
class ServerContext:
def __init__(
self,
bundle_path: str = ".",
model_path: str = ".",
output_path: str = ".",
params_path: str = ".",
cors_origin: str = "*",
num_workers: int = 1,
any_platform: bool = True,
block_platforms: List[str] = [],
default_platform: str = None,
image_format: str = "png",
cache: ModelCache = None,
cache_path: str = None,
show_progress: bool = True,
optimizations: List[str] = [],
) -> None:
self.bundle_path = bundle_path
self.model_path = model_path
self.output_path = output_path
self.params_path = params_path
self.cors_origin = cors_origin
self.num_workers = num_workers
self.any_platform = any_platform
self.block_platforms = block_platforms
self.default_platform = default_platform
self.image_format = image_format
self.cache = cache or ModelCache(num_workers)
self.cache_path = cache_path or path.join(model_path, ".cache")
self.show_progress = show_progress
self.optimizations = optimizations
@classmethod
def from_environ(cls):
num_workers = int(environ.get("ONNX_WEB_NUM_WORKERS", 1))
cache_limit = int(environ.get("ONNX_WEB_CACHE_MODELS", num_workers + 2))
return cls(
bundle_path=environ.get(
"ONNX_WEB_BUNDLE_PATH", path.join("..", "gui", "out")
),
model_path=environ.get("ONNX_WEB_MODEL_PATH", path.join("..", "models")),
output_path=environ.get("ONNX_WEB_OUTPUT_PATH", path.join("..", "outputs")),
params_path=environ.get("ONNX_WEB_PARAMS_PATH", "."),
# others
cors_origin=environ.get("ONNX_WEB_CORS_ORIGIN", "*").split(","),
num_workers=num_workers,
any_platform=get_boolean(environ, "ONNX_WEB_ANY_PLATFORM", True),
block_platforms=environ.get("ONNX_WEB_BLOCK_PLATFORMS", "").split(","),
default_platform=environ.get("ONNX_WEB_DEFAULT_PLATFORM", None),
image_format=environ.get("ONNX_WEB_IMAGE_FORMAT", "png"),
cache=ModelCache(limit=cache_limit),
show_progress=get_boolean(environ, "ONNX_WEB_SHOW_PROGRESS", True),
optimizations=environ.get("ONNX_WEB_OPTIMIZATIONS", "").split(","),
)

View File

@ -8,7 +8,7 @@ import basicsr.utils.download_util
import codeformer.facelib.utils.misc
import facexlib.utils
from ..utils import ServerContext
from .context import ServerContext
logger = getLogger(__name__)

View File

@ -2,16 +2,15 @@ from logging import getLogger
from PIL import Image
from ..chain import (
from .chain import (
ChainPipeline,
correct_codeformer,
correct_gfpgan,
upscale_resrgan,
upscale_stable_diffusion,
)
from ..params import ImageParams, SizeChart, StageParams, UpscaleParams
from ..utils import ServerContext
from .device_pool import JobContext, ProgressCallback
from .params import ImageParams, SizeChart, StageParams, UpscaleParams
from .server import JobContext, ProgressCallback, ServerContext
logger = getLogger(__name__)

View File

@ -7,69 +7,10 @@ from typing import Any, Dict, List, Optional, Union
import torch
from .params import DeviceParams, SizeChart
from .server.model_cache import ModelCache
logger = getLogger(__name__)
class ServerContext:
def __init__(
self,
bundle_path: str = ".",
model_path: str = ".",
output_path: str = ".",
params_path: str = ".",
cors_origin: str = "*",
num_workers: int = 1,
any_platform: bool = True,
block_platforms: List[str] = [],
default_platform: str = None,
image_format: str = "png",
cache: ModelCache = None,
cache_path: str = None,
show_progress: bool = True,
optimizations: List[str] = [],
) -> None:
self.bundle_path = bundle_path
self.model_path = model_path
self.output_path = output_path
self.params_path = params_path
self.cors_origin = cors_origin
self.num_workers = num_workers
self.any_platform = any_platform
self.block_platforms = block_platforms
self.default_platform = default_platform
self.image_format = image_format
self.cache = cache or ModelCache(num_workers)
self.cache_path = cache_path or path.join(model_path, ".cache")
self.show_progress = show_progress
self.optimizations = optimizations
@classmethod
def from_environ(cls):
num_workers = int(environ.get("ONNX_WEB_NUM_WORKERS", 1))
cache_limit = int(environ.get("ONNX_WEB_CACHE_MODELS", num_workers + 2))
return cls(
bundle_path=environ.get(
"ONNX_WEB_BUNDLE_PATH", path.join("..", "gui", "out")
),
model_path=environ.get("ONNX_WEB_MODEL_PATH", path.join("..", "models")),
output_path=environ.get("ONNX_WEB_OUTPUT_PATH", path.join("..", "outputs")),
params_path=environ.get("ONNX_WEB_PARAMS_PATH", "."),
# others
cors_origin=environ.get("ONNX_WEB_CORS_ORIGIN", "*").split(","),
num_workers=num_workers,
any_platform=get_boolean(environ, "ONNX_WEB_ANY_PLATFORM", True),
block_platforms=environ.get("ONNX_WEB_BLOCK_PLATFORMS", "").split(","),
default_platform=environ.get("ONNX_WEB_DEFAULT_PLATFORM", None),
image_format=environ.get("ONNX_WEB_IMAGE_FORMAT", "png"),
cache=ModelCache(limit=cache_limit),
show_progress=get_boolean(environ, "ONNX_WEB_SHOW_PROGRESS", True),
optimizations=environ.get("ONNX_WEB_OPTIMIZATIONS", "").split(","),
)
def base_join(base: str, tail: str) -> str:
tail_path = path.relpath(path.normpath(path.join("/", tail)), "/")
return path.join(base, tail_path)