Venkat V commited on
Commit
28b59e0
Β·
1 Parent(s): 0b65367

lazy loading of model

Browse files
Files changed (2) hide show
  1. api_backend.py +55 -16
  2. combined_app.py +6 -5
api_backend.py CHANGED
@@ -19,24 +19,61 @@ import json
19
  import base64
20
  import os
21
 
22
- # πŸ”§ Import local processing modules
23
- from yolo_module import run_yolo
24
- from ocr_module import extract_text, count_elements, validate_structure
25
- from graph_module import map_arrows, build_flowchart_json
26
- from summarizer_module import summarize_flowchart
27
-
28
  # πŸ”₯ Initialize FastAPI app
29
  app = FastAPI()
30
 
31
- # πŸ”“ Enable CORS to allow frontend (e.g., Streamlit on localhost) to connect
32
  app.add_middleware(
33
  CORSMiddleware,
34
- allow_origins=["*"], # In production, replace with allowed frontend domain
35
  allow_credentials=True,
36
- allow_methods=["*"],
37
  allow_headers=["*"],
 
38
  )
39
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
 
41
  @app.post("/process-image")
42
  async def process_image(
@@ -54,6 +91,9 @@ async def process_image(
54
  Returns:
55
  JSONResponse: Contains flowchart structure, summary, debug output, and optional YOLO overlay.
56
  """
 
 
 
57
  debug_mode = debug.lower() == "true"
58
  debug_log = []
59
 
@@ -69,25 +109,24 @@ async def process_image(
69
  print("βœ… Image converted to RGB")
70
 
71
  # πŸ“¦ YOLO Detection for boxes and arrows
72
- boxes, arrows, vis_debug = run_yolo(image)
73
  if debug_mode:
74
  debug_log.append(f"πŸ“¦ Detected {len(boxes)} boxes, {len(arrows)} arrows")
75
 
76
  # πŸ” Run OCR on each detected box
77
  for box in boxes:
78
- box["text"] = extract_text(image, box["bbox"], debug=debug_mode)
79
  print(f"πŸ” OCR for {box['id']}: {box['text']}")
80
  if debug_mode:
81
  debug_log.append(f"πŸ” {box['id']}: {box['text']}")
82
 
83
-
84
  # 🧠 Build structured JSON from nodes and edges
85
- flowchart_json = build_flowchart_json(boxes, arrows)
86
  print("🧠 Flowchart JSON:", json.dumps(flowchart_json, indent=2))
87
 
88
  # βœ… Validate structure
89
- structure_info = count_elements(boxes, arrows, debug=debug_mode)
90
- validation = validate_structure(
91
  flowchart_json,
92
  expected_boxes=structure_info["box_count"],
93
  expected_arrows=len(arrows),
@@ -97,7 +136,7 @@ async def process_image(
97
  debug_log.append(f"🧾 Validation: {validation}")
98
 
99
  # ✍️ Generate plain-English summary
100
- summary = summarize_flowchart(flowchart_json)
101
  print("πŸ“ Summary:", summary)
102
 
103
  # πŸ–ΌοΈ Encode YOLO debug image (if debug enabled)
 
19
  import base64
20
  import os
21
 
 
 
 
 
 
 
22
  # πŸ”₯ Initialize FastAPI app
23
  app = FastAPI()
24
 
25
+ # πŸ”“ Enable CORS with more specific configuration for Hugging Face Spaces
26
  app.add_middleware(
27
  CORSMiddleware,
28
+ allow_origins=["*", "https://venkatviswa-flowchart-to-text.hf.space"], # Include your specific domain
29
  allow_credentials=True,
30
+ allow_methods=["GET", "POST", "OPTIONS"], # Explicitly allow methods
31
  allow_headers=["*"],
32
+ expose_headers=["*"],
33
  )
34
 
35
+ # Add a health check endpoint
36
+ @app.get("/")
37
+ async def health_check():
38
+ """Health check endpoint to verify API is running."""
39
+ return {"status": "ok", "message": "API is running"}
40
+
41
+
42
+ @app.options("/process-image")
43
+ async def options_process_image():
44
+ """Handle OPTIONS requests for the process-image endpoint."""
45
+ return {}
46
+
47
+
48
+ # Lazy-loading for ML modules to avoid startup issues
49
+ SKIP_MODEL_LOADING = os.getenv("SKIP_MODEL_LOADING", "0") == "1"
50
+ yolo_module = None
51
+ ocr_module = None
52
+ graph_module = None
53
+ summarizer_module = None
54
+
55
+ def load_modules():
56
+ global yolo_module, ocr_module, graph_module, summarizer_module
57
+
58
+ if yolo_module is None:
59
+ # Only import these when needed, not during startup
60
+ from yolo_module import run_yolo as yolo_run
61
+ from ocr_module import extract_text as ocr_extract, count_elements, validate_structure
62
+ from graph_module import map_arrows, build_flowchart_json
63
+ from summarizer_module import summarize_flowchart
64
+
65
+ yolo_module = {"run_yolo": yolo_run}
66
+ ocr_module = {
67
+ "extract_text": ocr_extract,
68
+ "count_elements": count_elements,
69
+ "validate_structure": validate_structure
70
+ }
71
+ graph_module = {
72
+ "map_arrows": map_arrows,
73
+ "build_flowchart_json": build_flowchart_json
74
+ }
75
+ summarizer_module = {"summarize_flowchart": summarize_flowchart}
76
+
77
 
78
  @app.post("/process-image")
79
  async def process_image(
 
91
  Returns:
92
  JSONResponse: Contains flowchart structure, summary, debug output, and optional YOLO overlay.
93
  """
94
+ # Lazy load modules when first request comes in
95
+ load_modules()
96
+
97
  debug_mode = debug.lower() == "true"
98
  debug_log = []
99
 
 
109
  print("βœ… Image converted to RGB")
110
 
111
  # πŸ“¦ YOLO Detection for boxes and arrows
112
+ boxes, arrows, vis_debug = yolo_module["run_yolo"](image)
113
  if debug_mode:
114
  debug_log.append(f"πŸ“¦ Detected {len(boxes)} boxes, {len(arrows)} arrows")
115
 
116
  # πŸ” Run OCR on each detected box
117
  for box in boxes:
118
+ box["text"] = ocr_module["extract_text"](image, box["bbox"], debug=debug_mode)
119
  print(f"πŸ” OCR for {box['id']}: {box['text']}")
120
  if debug_mode:
121
  debug_log.append(f"πŸ” {box['id']}: {box['text']}")
122
 
 
123
  # 🧠 Build structured JSON from nodes and edges
124
+ flowchart_json = graph_module["build_flowchart_json"](boxes, arrows)
125
  print("🧠 Flowchart JSON:", json.dumps(flowchart_json, indent=2))
126
 
127
  # βœ… Validate structure
128
+ structure_info = ocr_module["count_elements"](boxes, arrows, debug=debug_mode)
129
+ validation = ocr_module["validate_structure"](
130
  flowchart_json,
131
  expected_boxes=structure_info["box_count"],
132
  expected_arrows=len(arrows),
 
136
  debug_log.append(f"🧾 Validation: {validation}")
137
 
138
  # ✍️ Generate plain-English summary
139
+ summary = summarizer_module["summarize_flowchart"](flowchart_json)
140
  print("πŸ“ Summary:", summary)
141
 
142
  # πŸ–ΌοΈ Encode YOLO debug image (if debug enabled)
combined_app.py CHANGED
@@ -1,10 +1,7 @@
1
  """
2
  combined_app.py - A unified approach that runs both FastAPI and Streamlit in a single process.
3
  This is specifically designed for Hugging Face Spaces deployments.
4
-
5
- Usage:
6
- 1. Make this your entry point in Hugging Face Spaces
7
- 2. It will start the FastAPI backend and then launch Streamlit in the same process
8
  """
9
 
10
  import streamlit as st
@@ -20,7 +17,11 @@ import subprocess
20
  # Use environment variable to determine if we're in Spaces
21
  IS_SPACE = "SPACE_ID" in os.environ
22
 
23
- # Import FastAPI app from your backend module
 
 
 
 
24
  from api_backend import app as fastapi_app
25
 
26
  # Add a route for checking if the API is running
 
1
  """
2
  combined_app.py - A unified approach that runs both FastAPI and Streamlit in a single process.
3
  This is specifically designed for Hugging Face Spaces deployments.
4
+ Uses lazy loading to avoid loading models during startup.
 
 
 
5
  """
6
 
7
  import streamlit as st
 
17
  # Use environment variable to determine if we're in Spaces
18
  IS_SPACE = "SPACE_ID" in os.environ
19
 
20
+ # Import FastAPI app but avoid importing modules that load models
21
+ # This is critical to prevent infinite loops during deployment
22
+ os.environ["SKIP_MODEL_LOADING"] = "1"
23
+
24
+ # Now import the FastAPI app
25
  from api_backend import app as fastapi_app
26
 
27
  # Add a route for checking if the API is running