Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -12,7 +12,7 @@ model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
|
|
12 |
|
13 |
# Constants for enhanced organization
|
14 |
GITHUB_API_BASE_URL = "https://api.github.com/repos"
|
15 |
-
DEFAULT_MODEL = "
|
16 |
MAX_RELATED_ISSUES = 3
|
17 |
|
18 |
# Load a pre-trained model for sentence similarity
|
@@ -95,6 +95,7 @@ class MyChatbot(gr.Chatbot):
|
|
95 |
def __init__(self, fn, **kwargs):
|
96 |
super().__init__(fn, **kwargs)
|
97 |
self.issues = [] # Store fetched issues
|
|
|
98 |
|
99 |
def postprocess(self, y):
|
100 |
"""Post-processes the response to handle commands and display results."""
|
@@ -123,18 +124,26 @@ class MyChatbot(gr.Chatbot):
|
|
123 |
elif y['command'].isdigit() and self.issues:
|
124 |
try:
|
125 |
issue_number = int(y['command']) - 1
|
126 |
-
|
127 |
-
issue_text =
|
128 |
resolution = analyze_issues(issue_text, y['selected_model'], y['severity'], y['programming_language'])
|
129 |
related_issues = find_related_issues(issue_text, self.issues)
|
130 |
related_issue_text = "\n".join(
|
131 |
[f"- {issue['title']} (Similarity: {similarity:.2f})" for issue, similarity in related_issues]
|
132 |
)
|
133 |
-
return f"Resolution for Issue '{
|
134 |
except Exception as e:
|
135 |
return f"Error analyzing issue: {e}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
136 |
else:
|
137 |
-
# For
|
138 |
return assistant_response
|
139 |
|
140 |
with gr.Blocks() as demo:
|
@@ -150,9 +159,9 @@ with gr.Blocks() as demo:
|
|
150 |
|
151 |
model_dropdown = gr.Dropdown(
|
152 |
choices=[
|
153 |
-
"
|
154 |
-
"
|
155 |
-
|
156 |
],
|
157 |
label="Select Model for Issue Resolution",
|
158 |
value=DEFAULT_MODEL,
|
@@ -166,22 +175,9 @@ with gr.Blocks() as demo:
|
|
166 |
|
167 |
programming_language_textbox = gr.Textbox(label="Programming Language")
|
168 |
|
169 |
-
command_dropdown = gr.Dropdown(
|
170 |
-
choices=[
|
171 |
-
"/github",
|
172 |
-
"/help",
|
173 |
-
"/generate_code",
|
174 |
-
"/explain_concept",
|
175 |
-
"/write_documentation",
|
176 |
-
"/translate_code",
|
177 |
-
],
|
178 |
-
label="Select Command",
|
179 |
-
)
|
180 |
-
|
181 |
chatbot = MyChatbot(
|
182 |
respond,
|
183 |
additional_inputs=[
|
184 |
-
command_dropdown,
|
185 |
system_message,
|
186 |
gr.Slider(minimum=1, maximum=8192, value=2048, step=1, label="Max new tokens"),
|
187 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.71, step=0.1, label="Temperature"),
|
@@ -201,6 +197,17 @@ with gr.Blocks() as demo:
|
|
201 |
],
|
202 |
)
|
203 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
204 |
if __name__ == "__main__":
|
205 |
demo.queue().launch(
|
206 |
share=True,
|
|
|
12 |
|
13 |
# Constants for enhanced organization
|
14 |
GITHUB_API_BASE_URL = "https://api.github.com/repos"
|
15 |
+
DEFAULT_MODEL = "apple/OpenELM"
|
16 |
MAX_RELATED_ISSUES = 3
|
17 |
|
18 |
# Load a pre-trained model for sentence similarity
|
|
|
95 |
def __init__(self, fn, **kwargs):
|
96 |
super().__init__(fn, **kwargs)
|
97 |
self.issues = [] # Store fetched issues
|
98 |
+
self.current_issue = None # Store the currently selected issue
|
99 |
|
100 |
def postprocess(self, y):
|
101 |
"""Post-processes the response to handle commands and display results."""
|
|
|
124 |
elif y['command'].isdigit() and self.issues:
|
125 |
try:
|
126 |
issue_number = int(y['command']) - 1
|
127 |
+
self.current_issue = self.issues[issue_number] # Store the selected issue
|
128 |
+
issue_text = self.current_issue['title'] + "\n\n" + self.current_issue['body']
|
129 |
resolution = analyze_issues(issue_text, y['selected_model'], y['severity'], y['programming_language'])
|
130 |
related_issues = find_related_issues(issue_text, self.issues)
|
131 |
related_issue_text = "\n".join(
|
132 |
[f"- {issue['title']} (Similarity: {similarity:.2f})" for issue, similarity in related_issues]
|
133 |
)
|
134 |
+
return f"Resolution for Issue '{self.current_issue['title']}':\n{resolution['assistant_response']}\n\nRelated Issues:\n{related_issue_text}"
|
135 |
except Exception as e:
|
136 |
return f"Error analyzing issue: {e}"
|
137 |
+
elif y['command'].startswith("/"):
|
138 |
+
# Handle commands like `/generate_code`, `/explain_concept`, etc.
|
139 |
+
if self.current_issue:
|
140 |
+
# Use the current issue's context for these commands
|
141 |
+
issue_text = self.current_issue['title'] + "\n\n" + self.current_issue['body']
|
142 |
+
return analyze_issues(issue_text, y['selected_model'], y['severity'], y['programming_language'])['assistant_response']
|
143 |
+
else:
|
144 |
+
return "Please select an issue first using `/github`."
|
145 |
else:
|
146 |
+
# For free-form text, simply display the assistant's response
|
147 |
return assistant_response
|
148 |
|
149 |
with gr.Blocks() as demo:
|
|
|
159 |
|
160 |
model_dropdown = gr.Dropdown(
|
161 |
choices=[
|
162 |
+
"apple/OpenELM",
|
163 |
+
"Gabriel/Swe-review-setfit-model",
|
164 |
+
"acecalisto3/InstructiPhi"
|
165 |
],
|
166 |
label="Select Model for Issue Resolution",
|
167 |
value=DEFAULT_MODEL,
|
|
|
175 |
|
176 |
programming_language_textbox = gr.Textbox(label="Programming Language")
|
177 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
178 |
chatbot = MyChatbot(
|
179 |
respond,
|
180 |
additional_inputs=[
|
|
|
181 |
system_message,
|
182 |
gr.Slider(minimum=1, maximum=8192, value=2048, step=1, label="Max new tokens"),
|
183 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.71, step=0.1, label="Temperature"),
|
|
|
197 |
],
|
198 |
)
|
199 |
|
200 |
+
# Add a button to fetch GitHub issues
|
201 |
+
fetch_issues_button = gr.Button(label="Fetch Issues")
|
202 |
+
fetch_issues_button.click(fn=lambda github_api_token, github_username, github_repository: chatbot.issues, inputs=[github_api_token, github_username, github_repository], outputs=[chatbot])
|
203 |
+
|
204 |
+
# Add a dropdown to select an issue
|
205 |
+
issue_dropdown = gr.Dropdown(label="Select Issue", choices=[], interactive=True)
|
206 |
+
issue_dropdown.change(fn=lambda issue_number, chatbot: chatbot.postprocess(issue_number), inputs=[issue_dropdown, chatbot], outputs=[chatbot])
|
207 |
+
|
208 |
+
# Connect the chatbot input to the issue dropdown
|
209 |
+
chatbot.input.change(fn=lambda chatbot, github_api_token, github_username, github_repository: chatbot.postprocess("/github"), inputs=[chatbot, github_api_token, github_username, github_repository], outputs=[chatbot])
|
210 |
+
|
211 |
if __name__ == "__main__":
|
212 |
demo.queue().launch(
|
213 |
share=True,
|