From 55576ae87cd6e9f59e790f6884d3772ad7dee8b6 Mon Sep 17 00:00:00 2001 From: Sean Sube Date: Sun, 5 Mar 2023 21:50:01 -0600 Subject: [PATCH] fix(api): include cache keys in logs --- api/onnx_web/server/model_cache.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/api/onnx_web/server/model_cache.py b/api/onnx_web/server/model_cache.py index 138f3a8d..31213436 100644 --- a/api/onnx_web/server/model_cache.py +++ b/api/onnx_web/server/model_cache.py @@ -21,10 +21,10 @@ class ModelCache: def get(self, tag: str, key: Any) -> Any: for t, k, v in self.cache: if tag == t and key == k: - logger.debug("found cached model: %s", tag) + logger.debug("found cached model: %s %s", tag, key) return v - logger.debug("model not found in cache: %s", tag) + logger.debug("model not found in cache: %s %s", tag, key) return None def set(self, tag: str, key: Any, value: Any) -> None: @@ -35,11 +35,11 @@ class ModelCache: for i in range(len(self.cache)): t, k, v = self.cache[i] if tag == t and key != k: - logger.debug("updating model cache: %s", tag) + logger.debug("updating model cache: %s %s", tag, key) self.cache[i] = (tag, key, value) return - logger.debug("adding new model to cache: %s", tag) + logger.debug("adding new model to cache: %s %s", tag, key) self.cache.append((tag, key, value)) self.prune()