Update app.py
Browse files
app.py
CHANGED
@@ -23,7 +23,7 @@ import base64
|
|
23 |
import cv2
|
24 |
import matplotlib.pyplot as plt
|
25 |
from peft import PeftModel
|
26 |
-
from gradcam_xception import
|
27 |
warnings.filterwarnings("ignore", category=UserWarning)
|
28 |
|
29 |
# Define Xception transform function directly in app.py
|
@@ -192,14 +192,20 @@ with st.sidebar:
|
|
192 |
if not st.session_state.xception_model_loaded:
|
193 |
if st.button("π₯ Load Xception Model", type="primary"):
|
194 |
# Load Xception model
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
203 |
else:
|
204 |
st.success("β
Xception model loaded")
|
205 |
|
|
|
23 |
import cv2
|
24 |
import matplotlib.pyplot as plt
|
25 |
from peft import PeftModel
|
26 |
+
from gradcam_xception import generate_smoothgrad_visualizations_xception
|
27 |
warnings.filterwarnings("ignore", category=UserWarning)
|
28 |
|
29 |
# Define Xception transform function directly in app.py
|
|
|
192 |
if not st.session_state.xception_model_loaded:
|
193 |
if st.button("π₯ Load Xception Model", type="primary"):
|
194 |
# Load Xception model
|
195 |
+
try:
|
196 |
+
from gradcam_xception import load_xception_model
|
197 |
+
model = load_xception_model()
|
198 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
199 |
+
|
200 |
+
if model is not None:
|
201 |
+
st.session_state.xception_model = model
|
202 |
+
st.session_state.device = device
|
203 |
+
st.session_state.xception_model_loaded = True
|
204 |
+
st.success("β
Xception model loaded!")
|
205 |
+
else:
|
206 |
+
st.error("β Failed to load Xception model.")
|
207 |
+
except Exception as e:
|
208 |
+
st.error(f"Error loading model: {str(e)}")
|
209 |
else:
|
210 |
st.success("β
Xception model loaded")
|
211 |
|