diff --git a/api/onnx_web/chain/base.py b/api/onnx_web/chain/base.py index 45644496..06d10ffd 100644 --- a/api/onnx_web/chain/base.py +++ b/api/onnx_web/chain/base.py @@ -104,7 +104,7 @@ class ChainPipeline: kwargs = stage_kwargs or {} kwargs = {**pipeline_kwargs, **kwargs} - logger.info( + logger.debug( "running stage %s on image with dimensions %sx%s, %s", name, image.width, @@ -145,7 +145,7 @@ class ChainPipeline: [stage_tile], ) else: - logger.info("image within tile size, running stage") + logger.debug("image within tile size, running stage") image = stage_pipe( job, server, @@ -156,7 +156,7 @@ class ChainPipeline: **kwargs ) - logger.info( + logger.debug( "finished stage %s, result size: %sx%s", name, image.width, image.height ) diff --git a/api/onnx_web/chain/persist_s3.py b/api/onnx_web/chain/persist_s3.py index 4c620c4c..2af5a515 100644 --- a/api/onnx_web/chain/persist_s3.py +++ b/api/onnx_web/chain/persist_s3.py @@ -38,7 +38,7 @@ def persist_s3( try: s3.upload_fileobj(data, bucket, output) logger.info("saved image to %s/%s", bucket, output) - except Exception as err: - logger.error("error saving image to S3: %s", err) + except Exception: + logger.exception("error saving image to S3") return source diff --git a/api/onnx_web/chain/utils.py b/api/onnx_web/chain/utils.py index 81317226..f0799020 100644 --- a/api/onnx_web/chain/utils.py +++ b/api/onnx_web/chain/utils.py @@ -32,7 +32,7 @@ def process_tile_grid( idx = (y * tiles_x) + x left = x * tile top = y * tile - logger.info("processing tile %s of %s, %s.%s", idx + 1, total, y, x) + logger.debug("processing tile %s of %s, %s.%s", idx + 1, total, y, x) tile_image = source.crop((left, top, left + tile, top + tile)) for filter in filters: @@ -80,7 +80,7 @@ def process_tile_spiral( top = center_y + int(top) counter += 1 - logger.info("processing tile %s of %s, %sx%s", counter, len(tiles), left, top) + logger.debug("processing tile %s of %s, %sx%s", counter, len(tiles), left, top) # TODO: only valid for scale == 1, resize source for others tile_image = image.crop((left, top, left + tile, top + tile)) diff --git a/api/onnx_web/convert/__main__.py b/api/onnx_web/convert/__main__.py index b701c5ae..697a3b29 100644 --- a/api/onnx_web/convert/__main__.py +++ b/api/onnx_web/convert/__main__.py @@ -3,7 +3,6 @@ from argparse import ArgumentParser from logging import getLogger from os import makedirs, path from sys import exit -from traceback import format_exception from typing import Any, Dict, List, Optional, Tuple from urllib.parse import urlparse @@ -199,8 +198,8 @@ def convert_models(ctx: ConversionContext, args, models: Models): try: dest = fetch_model(ctx, name, source, model_format=model_format) logger.info("finished downloading source: %s -> %s", source, dest) - except Exception as e: - logger.error("error fetching source %s: %s", name, e) + except Exception: + logger.exception("error fetching source %s", name) if args.diffusion and "diffusion" in models: for model in models.get("diffusion"): @@ -246,11 +245,10 @@ def convert_models(ctx: ConversionContext, args, models: Models): base_token=inversion.get("token"), ) - except Exception as e: - logger.error( - "error converting diffusion model %s: %s", + except Exception: + logger.exception( + "error converting diffusion model %s", name, - format_exception(type(e), e, e.__traceback__), ) if args.upscaling and "upscaling" in models: @@ -268,11 +266,10 @@ def convert_models(ctx: ConversionContext, args, models: Models): ctx, name, model["source"], model_format=model_format ) convert_upscale_resrgan(ctx, model, source) - except Exception as e: - logger.error( - "error converting upscaling model %s: %s", + except Exception: + logger.exception( + "error converting upscaling model %s", name, - format_exception(type(e), e, e.__traceback__), ) if args.correction and "correction" in models: @@ -289,11 +286,10 @@ def convert_models(ctx: ConversionContext, args, models: Models): ctx, name, model["source"], model_format=model_format ) convert_correction_gfpgan(ctx, model, source) - except Exception as e: - logger.error( - "error converting correction model %s: %s", + except Exception: + logger.exception( + "error converting correction model %s", name, - format_exception(type(e), e, e.__traceback__), ) @@ -375,10 +371,10 @@ def main() -> int: validate(data, extra_schema) logger.info("converting extra models") convert_models(ctx, args, data) - except ValidationError as err: - logger.error("invalid data in extras file: %s", err) - except Exception as err: - logger.error("error converting extra models: %s", err) + except ValidationError: + logger.exception("invalid data in extras file") + except Exception: + logger.exception("error converting extra models") return 0 diff --git a/api/onnx_web/convert/diffusion/lora.py b/api/onnx_web/convert/diffusion/lora.py index 8945a573..6c869a18 100644 --- a/api/onnx_web/convert/diffusion/lora.py +++ b/api/onnx_web/convert/diffusion/lora.py @@ -160,10 +160,10 @@ def blend_loras( fixed_initializer_names = [ fix_initializer_name(node.name) for node in base_model.graph.initializer ] - # logger.info("fixed initializer names: %s", fixed_initializer_names) + logger.trace("fixed initializer names: %s", fixed_initializer_names) fixed_node_names = [fix_node_name(node.name) for node in base_model.graph.node] - # logger.info("fixed node names: %s", fixed_node_names) + logger.trace("fixed node names: %s", fixed_node_names) for base_key, weights in blended.items(): conv_key = base_key + "_Conv" diff --git a/api/onnx_web/convert/diffusion/original.py b/api/onnx_web/convert/diffusion/original.py index 334b0411..0b13e28e 100644 --- a/api/onnx_web/convert/diffusion/original.py +++ b/api/onnx_web/convert/diffusion/original.py @@ -17,8 +17,6 @@ import json import os import re import shutil -import sys -import traceback from logging import getLogger from typing import Dict, List @@ -1395,7 +1393,6 @@ def extract_checkpoint( image_size = 512 if is_512 else 768 # Needed for V2 models so we can create the right text encoder. upcast_attention = False - msg = None # Create empty config db_config = TrainingConfig( @@ -1607,15 +1604,13 @@ def extract_checkpoint( scheduler=scheduler, ) except Exception: - logger.error( - "exception setting up output: %s", - traceback.format_exception(*sys.exc_info()), + logger.exception( + "error setting up output", ) pipe = None if pipe is None or db_config is None: - msg = "pipeline or config is not set, unable to continue." - logger.error(msg) + logger.error("pipeline or config is not set, unable to continue") return else: logger.info("saving diffusion model") diff --git a/api/onnx_web/server/load.py b/api/onnx_web/server/load.py index a62496b7..c8183bb4 100644 --- a/api/onnx_web/server/load.py +++ b/api/onnx_web/server/load.py @@ -133,8 +133,8 @@ def load_extras(context: ServerContext): logger.debug("validating extras file %s", data) try: validate(data, extra_schema) - except ValidationError as err: - logger.error("invalid data in extras file: %s", err) + except ValidationError: + logger.exception("invalid data in extras file") continue if "strings" in data: @@ -166,8 +166,8 @@ def load_extras(context: ServerContext): f"inversion-{inversion_name}" ] = inversion["label"] - except Exception as err: - logger.error("error loading extras file: %s", err) + except Exception: + logger.exception("error loading extras file") logger.debug("adding labels to strings: %s", labels) merge( diff --git a/api/onnx_web/server/utils.py b/api/onnx_web/server/utils.py index 56cc33e0..4280b43f 100644 --- a/api/onnx_web/server/utils.py +++ b/api/onnx_web/server/utils.py @@ -32,6 +32,9 @@ def register_routes( def wrap_route(func, *args, **kwargs): + """ + From http://louistiao.me/posts/adding-__name__-and-__doc__-attributes-to-functoolspartial-objects/ + """ partial_func = partial(func, *args, **kwargs) update_wrapper(partial_func, func) return partial_func diff --git a/api/onnx_web/worker/pool.py b/api/onnx_web/worker/pool.py index 3ac6a3fc..e03a4687 100644 --- a/api/onnx_web/worker/pool.py +++ b/api/onnx_web/worker/pool.py @@ -131,8 +131,8 @@ class DevicePoolExecutor: pass except ValueError: break - except Exception as err: - logger.error("error in log worker: %s", err) + except Exception: + logger.exception("error in log worker") logger_thread = Thread( name="onnx-web logger", target=logger_worker, args=(self.logs,), daemon=True @@ -159,8 +159,8 @@ class DevicePoolExecutor: pass except ValueError: break - except Exception as err: - logger.error("error in progress worker: %s", err) + except Exception: + logger.exception("error in progress worker") progress_thread = Thread( name="onnx-web progress", @@ -189,8 +189,8 @@ class DevicePoolExecutor: pass except ValueError: break - except Exception as err: - logger.error("error in finished worker: %s", err) + except Exception: + logger.exception("error in finished worker") finished_thread = Thread( name="onnx-web finished", diff --git a/api/onnx_web/worker/worker.py b/api/onnx_web/worker/worker.py index 43007ef0..740d234b 100644 --- a/api/onnx_web/worker/worker.py +++ b/api/onnx_web/worker/worker.py @@ -65,7 +65,6 @@ def worker_main(context: WorkerContext, server: ServerContext): logger.error("detected out-of-memory error, exiting: %s", e) exit(EXIT_MEMORY) else: - logger.error( - "error while running job: %s", - format_exception(type(e), e, e.__traceback__), + logger.exception( + "error while running job", ) diff --git a/api/scripts/test-release.py b/api/scripts/test-release.py index 59dd596f..894dbd61 100644 --- a/api/scripts/test-release.py +++ b/api/scripts/test-release.py @@ -342,8 +342,7 @@ def main(): logger.warning("test failed: %s", test.name) failed.append(test.name) except Exception as e: - traceback.print_exception(type(e), e, e.__traceback__) - logger.error("error running test for %s: %s", test.name, e) + logger.exception("error running test for %s", test.name) failed.append(test.name) logger.info("%s of %s tests passed", len(passed), len(TEST_DATA))