Added a little caching to model info
All checks were successful
continuous-integration/drone/push Build is passing
All checks were successful
continuous-integration/drone/push Build is passing
This commit is contained in:
parent
c5500012ce
commit
188a31508e
@ -9,6 +9,8 @@ from typing import Tuple
|
|||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
from cnn_classifier import Classifier
|
from cnn_classifier import Classifier
|
||||||
|
from config import Config
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
class ClassifierCache:
|
class ClassifierCache:
|
||||||
@ -20,6 +22,8 @@ class ClassifierCache:
|
|||||||
self._current_classifier = None # Latest classifier is a classifier that uses the $default model
|
self._current_classifier = None # Latest classifier is a classifier that uses the $default model
|
||||||
self._downloaded_files = []
|
self._downloaded_files = []
|
||||||
|
|
||||||
|
self._last_fetch_time = 0
|
||||||
|
|
||||||
self._session = SessionTracing(propagate=True)
|
self._session = SessionTracing(propagate=True)
|
||||||
|
|
||||||
def _cleanup(self):
|
def _cleanup(self):
|
||||||
@ -63,16 +67,27 @@ class ClassifierCache:
|
|||||||
self._current_classifier = Classifier(model_file_path, weights_file_path)
|
self._current_classifier = Classifier(model_file_path, weights_file_path)
|
||||||
|
|
||||||
def get_default_classifier(self) -> Tuple[dict, Classifier]:
|
def get_default_classifier(self) -> Tuple[dict, Classifier]:
|
||||||
logging.debug("Fetching model info...")
|
|
||||||
r = self._session.get(self._model_info_url)
|
|
||||||
r.raise_for_status()
|
|
||||||
|
|
||||||
model_details = r.json()
|
if ((time.time() - self._last_fetch_time) > Config.MODEL_CACHE_LIFETIME_SEC) or \
|
||||||
|
(not self._current_model_details):
|
||||||
|
|
||||||
if (not self._current_model_details) or (self._current_model_details['id'] != model_details['id']):
|
logging.debug("Fetching model info...")
|
||||||
# If the currently loaded model is not the default... then load it
|
r = self._session.get(self._model_info_url)
|
||||||
self._cleanup() # delete/unload everything
|
r.raise_for_status()
|
||||||
self._download_and_load_model(model_details['files']['model'], model_details['files']['weights'])
|
self._last_fetch_time = time.time()
|
||||||
self._current_model_details = model_details
|
|
||||||
|
model_details = r.json()
|
||||||
|
|
||||||
|
if (not self._current_model_details) or (self._current_model_details['id'] != model_details['id']):
|
||||||
|
logging.info(f"Model needs to be loaded (local: {self._current_model_details['id']} model service default: {model_details['id']})")
|
||||||
|
# If the currently loaded model is not the default... then load it
|
||||||
|
self._cleanup() # delete/unload everything
|
||||||
|
self._download_and_load_model(model_details['files']['model'], model_details['files']['weights'])
|
||||||
|
self._current_model_details = model_details
|
||||||
|
else:
|
||||||
|
logging.debug(f"Currently loaded model seems up to date ({self._current_model_details['id']} == {model_details['id']})")
|
||||||
|
|
||||||
|
else:
|
||||||
|
logging.debug("Cache is still valid. Not fetching model info")
|
||||||
|
|
||||||
return self._current_model_details, self._current_classifier
|
return self._current_model_details, self._current_classifier
|
||||||
|
@ -16,6 +16,8 @@ class Config:
|
|||||||
RELEASE_ID = os.environ.get('RELEASE_ID', 'test')
|
RELEASE_ID = os.environ.get('RELEASE_ID', 'test')
|
||||||
RELEASEMODE = os.environ.get('RELEASEMODE', 'dev')
|
RELEASEMODE = os.environ.get('RELEASEMODE', 'dev')
|
||||||
|
|
||||||
|
MODEL_CACHE_LIFETIME_SEC = int(os.environ.get("MODEL_CACHE_LIFETIME_SEC", 15))
|
||||||
|
|
||||||
LOG_LEVEL = logging.DEBUG if (
|
LOG_LEVEL = logging.DEBUG if (
|
||||||
'--debug' in sys.argv
|
'--debug' in sys.argv
|
||||||
) or (
|
) or (
|
||||||
|
Loading…
Reference in New Issue
Block a user