sksameermujahid's picture
Upload 45 files
14cb7ae verified
raw
history blame contribute delete
431 Bytes
# models/model_loader.py
from functools import lru_cache
from transformers import pipeline
from .logging_config import logger
@lru_cache(maxsize=10)
def load_model(task, model_name):
try:
logger.info(f"Loading model: {model_name} for task: {task}")
return pipeline(task, model=model_name, device=-1)
except Exception as e:
logger.error(f"Error loading model {model_name}: {str(e)}")
raise