Spaces:
Running
Running
# utils/pandasai_setup.py | |
import os | |
import logging | |
import pandasai as pai | |
from pandasai_litellm import LiteLLM # Ensure this import matches your installed library | |
# Configure logger for this module | |
logger = logging.getLogger(__name__) | |
# It's good practice to define constants at the top or in a config file | |
DEFAULT_PANDASAI_MODEL = "gemini/gemini-2.5-flash-preview-05-20" # Using a common default | |
def configure_pandasai(api_key: str, model_name: str = None): | |
""" | |
Configures PandasAI with LiteLLM using the provided API key and model. | |
Args: | |
api_key: The Google API key. | |
model_name: The specific model to use (e.g., "gemini/gemini-1.5-flash-latest"). | |
If None, uses DEFAULT_PANDASAI_MODEL. | |
""" | |
if not api_key: | |
logger.error("PandasAI Configuration Error: API key is missing.") | |
# Depending on strictness, you might raise an error or just log | |
# raise ValueError("API key must be provided for PandasAI configuration") | |
return | |
selected_model = model_name if model_name else DEFAULT_PANDASAI_MODEL | |
try: | |
llm = LiteLLM( | |
model=selected_model, # Use the selected model | |
api_key=api_key | |
# You might need to add other parameters for LiteLLM depending on the provider | |
# e.g., if not using a Google model directly via gemini provider in LiteLLM | |
) | |
# PandasAI configuration | |
pai.config.set({ | |
"llm": llm, | |
"verbose": os.environ.get("PANDASAI_VERBOSE", "False").lower() == "true", | |
"enable_cache": True, | |
"enforce_privacy": False, # Be cautious with this in production | |
"save_charts": False, # Set to True if you want to save charts locally | |
# "save_charts_path": "charts_output", # Define path if saving charts | |
"custom_whitelisted_dependencies": [], # Add any custom dependencies if needed | |
"max_retries": 3, # Default retries for PandasAI operations | |
"temperature": 0.3, # Lower temperature for more deterministic/factual outputs | |
# "open_charts": False # Whether to automatically open charts | |
}) | |
logger.info(f"PandasAI configured successfully with model: {selected_model}") | |
logger.info(f"PandasAI LLM object: {llm}") | |
except ImportError: | |
logger.error("PandasAI or pandasai_litellm is not installed. Please install the required packages.") | |
except Exception as e: | |
logger.error(f"Error configuring PandasAI: {e}", exc_info=True) | |