2023-02-14 00:04:46 +00:00
|
|
|
from logging import getLogger
|
2023-02-14 00:12:40 +00:00
|
|
|
from typing import Any, List, Tuple
|
2023-02-14 00:04:46 +00:00
|
|
|
|
|
|
|
logger = getLogger(__name__)
|
|
|
|
|
2023-03-11 19:30:11 +00:00
|
|
|
cache: List[Tuple[str, Any, Any]] = []
|
|
|
|
|
2023-02-14 00:04:46 +00:00
|
|
|
|
|
|
|
class ModelCache:
|
2023-03-11 19:30:11 +00:00
|
|
|
# cache: List[Tuple[str, Any, Any]]
|
2023-02-14 00:04:46 +00:00
|
|
|
limit: int
|
|
|
|
|
|
|
|
def __init__(self, limit: int) -> None:
|
|
|
|
self.limit = limit
|
2023-03-11 14:17:27 +00:00
|
|
|
logger.debug("creating model cache with limit of %s models", limit)
|
2023-02-14 00:04:46 +00:00
|
|
|
|
2023-03-06 13:34:09 +00:00
|
|
|
def drop(self, tag: str, key: Any) -> int:
|
2023-03-11 19:30:11 +00:00
|
|
|
global cache
|
|
|
|
|
2023-03-07 14:02:53 +00:00
|
|
|
logger.debug("dropping item from cache: %s %s", tag, key)
|
2023-03-11 19:30:11 +00:00
|
|
|
removed = [model for model in cache if model[0] == tag and model[1] == key]
|
2023-03-06 13:34:09 +00:00
|
|
|
for item in removed:
|
2023-03-11 19:30:11 +00:00
|
|
|
cache.remove(item)
|
2023-03-06 13:34:09 +00:00
|
|
|
|
|
|
|
return len(removed)
|
2023-02-14 00:04:46 +00:00
|
|
|
|
|
|
|
def get(self, tag: str, key: Any) -> Any:
|
2023-03-11 19:30:11 +00:00
|
|
|
global cache
|
|
|
|
|
|
|
|
for t, k, v in cache:
|
2023-02-14 00:04:46 +00:00
|
|
|
if tag == t and key == k:
|
2023-03-06 03:50:01 +00:00
|
|
|
logger.debug("found cached model: %s %s", tag, key)
|
2023-02-14 00:04:46 +00:00
|
|
|
return v
|
|
|
|
|
2023-03-06 03:50:01 +00:00
|
|
|
logger.debug("model not found in cache: %s %s", tag, key)
|
2023-02-14 00:04:46 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
def set(self, tag: str, key: Any, value: Any) -> None:
|
2023-03-11 19:30:11 +00:00
|
|
|
global cache
|
|
|
|
|
2023-02-15 23:17:28 +00:00
|
|
|
if self.limit == 0:
|
2023-02-17 00:42:05 +00:00
|
|
|
logger.debug("cache limit set to 0, not caching model: %s", tag)
|
2023-02-15 23:17:28 +00:00
|
|
|
return
|
|
|
|
|
2023-03-11 19:30:11 +00:00
|
|
|
for i in range(len(cache)):
|
|
|
|
t, k, v = cache[i]
|
2023-02-19 13:53:20 +00:00
|
|
|
if tag == t and key != k:
|
2023-03-06 03:50:01 +00:00
|
|
|
logger.debug("updating model cache: %s %s", tag, key)
|
2023-03-11 19:30:11 +00:00
|
|
|
cache[i] = (tag, key, value)
|
2023-02-19 13:53:20 +00:00
|
|
|
return
|
2023-02-14 00:04:46 +00:00
|
|
|
|
2023-03-06 03:50:01 +00:00
|
|
|
logger.debug("adding new model to cache: %s %s", tag, key)
|
2023-03-11 19:30:11 +00:00
|
|
|
cache.append((tag, key, value))
|
2023-02-14 00:04:46 +00:00
|
|
|
self.prune()
|
|
|
|
|
2023-03-11 19:59:09 +00:00
|
|
|
def clear(self):
|
|
|
|
global cache
|
|
|
|
|
|
|
|
cache.clear()
|
|
|
|
|
2023-02-14 00:04:46 +00:00
|
|
|
def prune(self):
|
2023-03-11 19:30:11 +00:00
|
|
|
global cache
|
|
|
|
|
|
|
|
total = len(cache)
|
2023-03-11 14:06:22 +00:00
|
|
|
overage = total - self.limit
|
|
|
|
if overage > 0:
|
2023-03-11 19:30:11 +00:00
|
|
|
removed = cache[:overage]
|
2023-02-14 00:10:11 +00:00
|
|
|
logger.info(
|
2023-03-11 14:06:22 +00:00
|
|
|
"removing %s of %s models from cache, %s",
|
|
|
|
overage,
|
|
|
|
total,
|
|
|
|
[m[0] for m in removed],
|
2023-02-14 00:10:11 +00:00
|
|
|
)
|
2023-03-11 19:30:11 +00:00
|
|
|
cache[:] = cache[-self.limit :]
|
2023-02-14 00:04:46 +00:00
|
|
|
else:
|
2023-02-17 00:42:05 +00:00
|
|
|
logger.debug("model cache below limit, %s of %s", total, self.limit)
|
2023-03-06 13:34:09 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def size(self):
|
2023-03-11 19:30:11 +00:00
|
|
|
global cache
|
|
|
|
|
|
|
|
return len(cache)
|