Skip to content

Commit

Permalink
🎨 refactor: update ModelCache to use model_cache variable
Browse files Browse the repository at this point in the history
- Renamed `_model_cache` to `model_cache` for consistency
- Added `clear` method to `ModelCache` class
  • Loading branch information
sudoskys committed Sep 29, 2024
1 parent 1bbf7cd commit 3ad7d45
Showing 1 changed file with 7 additions and 4 deletions.
11 changes: 7 additions & 4 deletions src/fast_langdetect/ft_detect/infer.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,11 @@ def get_model(self, model_type: ModelType) -> Optional["fasttext.FastText._FastT
def set_model(self, model_type: ModelType, model: "fasttext.FastText._FastText"):
self._models[model_type] = model

def clear(self):
self._models.clear()

_model_cache = ModelCache()

model_cache = ModelCache()


class DetectError(Exception):
Expand All @@ -62,15 +65,15 @@ def load_model(low_memory: bool = False,
model_type = ModelType.LOW_MEMORY if low_memory else ModelType.HIGH_MEMORY

# If the model is already loaded, return it
cached_model = _model_cache.get_model(model_type)
cached_model = model_cache.get_model(model_type)
if cached_model:
return cached_model

def load_local_small_model():
"""Try to load the local small model."""
try:
_loaded_model = fasttext.load_model(str(LOCAL_SMALL_MODEL_PATH))
_model_cache.set_model(ModelType.LOW_MEMORY, _loaded_model)
model_cache.set_model(ModelType.LOW_MEMORY, _loaded_model)
return _loaded_model
except Exception as e:
logger.error(f"Failed to load the local small model '{LOCAL_SMALL_MODEL_PATH}': {e}")
Expand All @@ -80,7 +83,7 @@ def load_large_model():
"""Try to load the large model."""
try:
loaded_model = fasttext.load_model(str(model_path))
_model_cache.set_model(ModelType.HIGH_MEMORY, loaded_model)
model_cache.set_model(ModelType.HIGH_MEMORY, loaded_model)
return loaded_model
except Exception as e:
logger.error(f"Failed to load the large model '{model_path}': {e}")
Expand Down

0 comments on commit 3ad7d45

Please sign in to comment.