Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -4,12 +4,13 @@ import time
|
|
4 |
import base64
|
5 |
from typing import Dict, List, TypedDict
|
6 |
from langgraph.graph import StateGraph, END
|
7 |
-
from
|
8 |
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
)
|
|
|
13 |
|
14 |
class AgentState(TypedDict):
|
15 |
messages: List[Dict[str, str]]
|
@@ -57,18 +58,12 @@ Check for:
|
|
57 |
Reply "APPROVED" if perfect, or suggest improvements."""
|
58 |
|
59 |
def call_model(prompt: str, max_retries=3) -> str:
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
return_full_text=False
|
67 |
-
)
|
68 |
-
except Exception as e:
|
69 |
-
st.error(f"Model call failed (attempt {attempt+1}): {str(e)}")
|
70 |
-
st.write("\n\n**Full Error:**", e)
|
71 |
-
st.stop() # Force exit to show error
|
72 |
return "<html><body><h1>Error generating UI</h1></body></html>"
|
73 |
|
74 |
def time_agent(agent_func, state: AgentState, label: str):
|
@@ -135,8 +130,6 @@ def main():
|
|
135 |
with st.sidebar:
|
136 |
max_iter = st.slider("Max QA Iterations", 1, 5, 2)
|
137 |
|
138 |
-
st.write("\nπ HF_TOKEN found:", st.secrets.get("HF_TOKEN", "β Missing!"))
|
139 |
-
|
140 |
prompt = st.text_area("π Describe the UI you want:", "A coffee shop landing page with hero, menu, and contact form.", height=150)
|
141 |
|
142 |
if st.button("π Generate UI"):
|
|
|
4 |
import base64
|
5 |
from typing import Dict, List, TypedDict
|
6 |
from langgraph.graph import StateGraph, END
|
7 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
|
8 |
|
9 |
+
# Load CodeLLaMA locally
|
10 |
+
model_id = "codellama/CodeLlama-7b-hf"
|
11 |
+
model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto")
|
12 |
+
tokenizer = AutoTokenizer.from_pretrained(model_id)
|
13 |
+
generator = pipeline("text-generation", model=model, tokenizer=tokenizer, device_map="auto")
|
14 |
|
15 |
class AgentState(TypedDict):
|
16 |
messages: List[Dict[str, str]]
|
|
|
58 |
Reply "APPROVED" if perfect, or suggest improvements."""
|
59 |
|
60 |
def call_model(prompt: str, max_retries=3) -> str:
|
61 |
+
try:
|
62 |
+
outputs = generator(prompt, max_new_tokens=1000, temperature=0.3)
|
63 |
+
return outputs[0]["generated_text"]
|
64 |
+
except Exception as e:
|
65 |
+
st.error(f"Local model call failed: {str(e)}")
|
66 |
+
st.stop()
|
|
|
|
|
|
|
|
|
|
|
|
|
67 |
return "<html><body><h1>Error generating UI</h1></body></html>"
|
68 |
|
69 |
def time_agent(agent_func, state: AgentState, label: str):
|
|
|
130 |
with st.sidebar:
|
131 |
max_iter = st.slider("Max QA Iterations", 1, 5, 2)
|
132 |
|
|
|
|
|
133 |
prompt = st.text_area("π Describe the UI you want:", "A coffee shop landing page with hero, menu, and contact form.", height=150)
|
134 |
|
135 |
if st.button("π Generate UI"):
|