File size: 431 Bytes
14cb7ae |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
# models/model_loader.py
from functools import lru_cache
from transformers import pipeline
from .logging_config import logger
@lru_cache(maxsize=10)
def load_model(task, model_name):
try:
logger.info(f"Loading model: {model_name} for task: {task}")
return pipeline(task, model=model_name, device=-1)
except Exception as e:
logger.error(f"Error loading model {model_name}: {str(e)}")
raise
|