Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
# Multi-agent UI generator with
|
2 |
|
3 |
import streamlit as st
|
4 |
import time
|
@@ -33,7 +33,7 @@ AGENT_MODEL_CONFIG = {
|
|
33 |
"adapter": "spandana30/software-engineer-codellama"
|
34 |
},
|
35 |
"qa": {
|
36 |
-
"base": "codellama/
|
37 |
"adapter": "spandana30/software-engineer-codellama"
|
38 |
},
|
39 |
}
|
@@ -41,12 +41,21 @@ AGENT_MODEL_CONFIG = {
|
|
41 |
@st.cache_resource
|
42 |
|
43 |
def load_agent_model(base_id, adapter_id):
|
44 |
-
|
45 |
-
base_id
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
|
51 |
AGENT_PIPELINES = {
|
52 |
role: load_agent_model(cfg["base"], cfg["adapter"])
|
|
|
1 |
+
# Multi-agent UI generator with token handling and model load debug support
|
2 |
|
3 |
import streamlit as st
|
4 |
import time
|
|
|
33 |
"adapter": "spandana30/software-engineer-codellama"
|
34 |
},
|
35 |
"qa": {
|
36 |
+
"base": "codellama/CodeLLaMA-7b-Instruct-hf",
|
37 |
"adapter": "spandana30/software-engineer-codellama"
|
38 |
},
|
39 |
}
|
|
|
41 |
@st.cache_resource
|
42 |
|
43 |
def load_agent_model(base_id, adapter_id):
|
44 |
+
try:
|
45 |
+
st.write(f"🔍 Loading: {base_id} with adapter {adapter_id}")
|
46 |
+
st.write(f"🔐 Using token: {'Yes' if HF_TOKEN else 'No'}")
|
47 |
+
|
48 |
+
base_model = AutoModelForCausalLM.from_pretrained(
|
49 |
+
base_id, torch_dtype=torch.float16, device_map="auto", token=HF_TOKEN
|
50 |
+
)
|
51 |
+
model = PeftModel.from_pretrained(base_model, adapter_id, token=HF_TOKEN)
|
52 |
+
tokenizer = AutoTokenizer.from_pretrained(adapter_id, token=HF_TOKEN)
|
53 |
+
|
54 |
+
return pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=1024)
|
55 |
+
|
56 |
+
except Exception as e:
|
57 |
+
st.error(f"❌ Model load failed!\nBase: {base_id}\nAdapter: {adapter_id}\nError: {e}")
|
58 |
+
raise
|
59 |
|
60 |
AGENT_PIPELINES = {
|
61 |
role: load_agent_model(cfg["base"], cfg["adapter"])
|