Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -22,7 +22,6 @@ warnings.filterwarnings("ignore", category=DeprecationWarning)
|
|
22 |
# Load environment variables
|
23 |
load_dotenv()
|
24 |
GROQ_API_KEY=os.getenv("GROQ_API_KEY")
|
25 |
-
print(GROQ_API_KEY)
|
26 |
llm = ChatGroq(model="llama-3.1-70b-versatile")
|
27 |
|
28 |
PROMPT_TEMPLATE = """
|
@@ -45,6 +44,10 @@ Instructions:
|
|
45 |
Begin extraction.
|
46 |
"""
|
47 |
def get_llm_response(entity, query):
|
|
|
|
|
|
|
|
|
48 |
# Format the prompt with the entity and query
|
49 |
formatted_prompt = PROMPT_TEMPLATE.format(entity=entity, query=query)
|
50 |
|
@@ -161,7 +164,7 @@ elif selected == "Define Query":
|
|
161 |
if st.button("Extract Information"):
|
162 |
if entity and query:
|
163 |
response_text = get_llm_response(entity, query)
|
164 |
-
st.write(
|
165 |
|
166 |
# Extract Information Section with Progress Bar
|
167 |
elif selected == "Extract Information":
|
@@ -181,9 +184,6 @@ elif selected == "Extract Information":
|
|
181 |
user_message = st.session_state["query_template"].replace("{company}", str(entity))
|
182 |
formatted_prompt = PROMPT_TEMPLATE.format(entity=entity, query=user_message)
|
183 |
|
184 |
-
# Append user message to the flow history
|
185 |
-
st.session_state.flowmessages.append(HumanMessage(content=user_message))
|
186 |
-
|
187 |
# Generate response from the model
|
188 |
response = llm([SystemMessage(content=formatted_prompt)])
|
189 |
|
|
|
22 |
# Load environment variables
|
23 |
load_dotenv()
|
24 |
GROQ_API_KEY=os.getenv("GROQ_API_KEY")
|
|
|
25 |
llm = ChatGroq(model="llama-3.1-70b-versatile")
|
26 |
|
27 |
PROMPT_TEMPLATE = """
|
|
|
44 |
Begin extraction.
|
45 |
"""
|
46 |
def get_llm_response(entity, query):
|
47 |
+
# Ensure entity and query are not None
|
48 |
+
entity = entity or "Unknown Entity"
|
49 |
+
query = query or "Information not provided"
|
50 |
+
|
51 |
# Format the prompt with the entity and query
|
52 |
formatted_prompt = PROMPT_TEMPLATE.format(entity=entity, query=query)
|
53 |
|
|
|
164 |
if st.button("Extract Information"):
|
165 |
if entity and query:
|
166 |
response_text = get_llm_response(entity, query)
|
167 |
+
st.write(response_text)
|
168 |
|
169 |
# Extract Information Section with Progress Bar
|
170 |
elif selected == "Extract Information":
|
|
|
184 |
user_message = st.session_state["query_template"].replace("{company}", str(entity))
|
185 |
formatted_prompt = PROMPT_TEMPLATE.format(entity=entity, query=user_message)
|
186 |
|
|
|
|
|
|
|
187 |
# Generate response from the model
|
188 |
response = llm([SystemMessage(content=formatted_prompt)])
|
189 |
|