rohit commited on
Commit
1d87783
·
1 Parent(s): 628bcda

Fix module import by moving FastAPI app to root main.py with proper imports

Browse files
Files changed (1) hide show
  1. main.py +127 -4
main.py CHANGED
@@ -1,5 +1,128 @@
1
- # Import the FastAPI app from our app module
2
- from app.main import app
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
 
4
- # This file is required by Hugging Face Spaces
5
- # It exposes our FastAPI app at the root level
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from pydantic import BaseModel
3
+ import os
4
+ import logging
5
+ import sys
6
+ from app.config import DATASET_CONFIGS
7
+ # Lazy imports to avoid blocking startup
8
+ # from .pipeline import RAGPipeline # Will import when needed
9
+ # import umap # Will import when needed for visualization
10
+ # import plotly.express as px # Will import when needed for visualization
11
+ # import plotly.graph_objects as go # Will import when needed for visualization
12
+ # from plotly.subplots import make_subplots # Will import when needed for visualization
13
+ # import numpy as np # Will import when needed for visualization
14
+ # from sklearn.preprocessing import normalize # Will import when needed for visualization
15
+ # import pandas as pd # Will import when needed for visualization
16
+ import json
17
+
18
+ # Configure logging
19
+ logging.basicConfig(
20
+ level=logging.INFO,
21
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
22
+ handlers=[
23
+ logging.StreamHandler(sys.stdout)
24
+ ]
25
+ )
26
+ logger = logging.getLogger(__name__)
27
+
28
+ app = FastAPI(title="RAG Pipeline API", description="Multi-dataset RAG API", version="1.0.0")
29
+
30
+ # Initialize pipelines for all datasets
31
+ pipelines = {}
32
+ google_api_key = os.getenv("GOOGLE_API_KEY")
33
+
34
+ logger.info(f"Starting RAG Pipeline API")
35
+ logger.info(f"Port from env: {os.getenv('PORT', 'Not set - will use 8000')}")
36
+ logger.info(f"Google API Key present: {'Yes' if google_api_key else 'No'}")
37
+ logger.info(f"Available datasets: {list(DATASET_CONFIGS.keys())}")
38
+
39
+ # Don't load datasets during startup - do it asynchronously after server starts
40
+ logger.info("RAG Pipeline API is ready to serve requests - datasets will load in background")
41
+
42
+ # Visualization function disabled to speed up startup
43
+ # def create_3d_visualization(pipeline):
44
+ # ... (commented out for faster startup)
45
+
46
+ class Question(BaseModel):
47
+ text: str
48
+ dataset: str = "developer-portfolio" # Default dataset
49
+
50
+ @app.post("/answer")
51
+ async def get_answer(question: Question):
52
+ try:
53
+ # Check if any pipelines are loaded
54
+ if not pipelines:
55
+ return {
56
+ "answer": "RAG Pipeline is running but datasets are still loading in the background. Please try again in a moment, or check /health for loading status.",
57
+ "dataset": question.dataset,
58
+ "status": "datasets_loading"
59
+ }
60
+
61
+ # Select the appropriate pipeline based on dataset
62
+ if question.dataset not in pipelines:
63
+ raise HTTPException(status_code=400, detail=f"Dataset '{question.dataset}' not available. Available datasets: {list(pipelines.keys())}")
64
+
65
+ selected_pipeline = pipelines[question.dataset]
66
+ answer = selected_pipeline.answer_question(question.text)
67
+ return {"answer": answer, "dataset": question.dataset}
68
+ except Exception as e:
69
+ raise HTTPException(status_code=500, detail=str(e))
70
+
71
+ @app.get("/datasets")
72
+ async def list_datasets():
73
+ """List all available datasets"""
74
+ return {"datasets": list(pipelines.keys())}
75
+
76
+ async def load_datasets_background():
77
+ """Load datasets in background after server starts"""
78
+ global pipelines
79
+ if google_api_key:
80
+ # Import RAGPipeline only when needed
81
+ from app.pipeline import RAGPipeline
82
+ # Only load developer-portfolio to save memory
83
+ dataset_name = "developer-portfolio"
84
+ try:
85
+ logger.info(f"Loading dataset: {dataset_name}")
86
+ pipeline = RAGPipeline.from_preset(
87
+ google_api_key=google_api_key,
88
+ preset_name=dataset_name
89
+ )
90
+ pipelines[dataset_name] = pipeline
91
+ logger.info(f"Successfully loaded {dataset_name}")
92
+ except Exception as e:
93
+ logger.error(f"Failed to load {dataset_name}: {e}")
94
+ logger.info(f"Background loading complete - {len(pipelines)} datasets loaded")
95
+ else:
96
+ logger.warning("No Google API key provided - running in demo mode without datasets")
97
+
98
+ @app.on_event("startup")
99
+ async def startup_event():
100
+ logger.info("FastAPI application startup complete")
101
+ logger.info(f"Server should be running on port: {os.getenv('PORT', '8000')}")
102
+
103
+ # Start loading datasets in background (non-blocking)
104
+ import asyncio
105
+ asyncio.create_task(load_datasets_background())
106
+
107
+ @app.on_event("shutdown")
108
+ async def shutdown_event():
109
+ logger.info("FastAPI application shutting down")
110
+
111
+ @app.get("/")
112
+ async def root():
113
+ """Root endpoint"""
114
+ return {"status": "ok", "message": "RAG Pipeline API", "version": "1.0.0", "datasets": list(pipelines.keys())}
115
+
116
+ @app.get("/health")
117
+ async def health_check():
118
+ """Health check endpoint"""
119
+ logger.info("Health check called")
120
+ loading_status = "complete" if "developer-portfolio" in pipelines else "loading"
121
+ return {
122
+ "status": "healthy",
123
+ "datasets_loaded": len(pipelines),
124
+ "total_datasets": 1, # Only loading developer-portfolio
125
+ "loading_status": loading_status,
126
+ "port": os.getenv('PORT', '8000')
127
+ }
128