Spaces:
Runtime error
Runtime error
import gradio as gr | |
from transformers import pipeline | |
from sentence_transformers import SentenceTransformer, util | |
import os | |
import requests | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
import torch | |
from huggingface_hub import InferenceClient, HfApi | |
import git | |
import gitdb | |
# If you have a GPU, move the model to it | |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | |
# Constants for enhanced organization | |
system_message = "You are GitBot, the Github project guardian angel. You resolve issues and propose implementation of feature requests" | |
max_tokens = 2048 | |
temperature = 0.71 | |
top_p = 0.95 | |
class InferenceClient: | |
def __init__(self): | |
pass | |
def create_endpoint(self, repo_id, handler_path, model_id, task, description, hyperparameters): | |
pass | |
def update_endpoint(self, repo_id, handler_path, model_id, task, description, hyperparameters): | |
pass | |
def delete_endpoint(self, repo_id, handler_path): | |
pass | |
def list_endpoints(self): | |
pass | |
def get_endpoint_status(self, repo_id, handler_path): | |
pass | |
def get_endpoint_logs(self, repo_id, handler_path, num_lines): | |
pass | |
def get_endpoint_metrics(self, repo_id, handler_path): | |
pass | |
class MyChatbot(gr.Chatbot): | |
"""Custom Chatbot class for enhanced functionality.""" | |
def __init__(self, **kwargs): | |
super().__init__(type="messages", **kwargs) # Set type to 'messages' | |
self.issues = [] | |
self.current_issue = None | |
def postprocess(self, y): | |
"""Post-processes the response to handle commands and display results.""" | |
if not y: | |
return [] | |
if isinstance(y, dict): | |
assistant_response = y.get('assistant_response', '') | |
command = y.get('command', '') | |
else: | |
assistant_response = str(y) | |
command = '' | |
return [ | |
{ | |
"role": "assistant", | |
"content": assistant_response | |
} | |
] | |
with gr.Blocks() as demo: | |
with gr.Row(): | |
github_api_token = gr.Textbox(label="GitHub API Token", type="password") | |
github_username = gr.Textbox(label="GitHub Username") | |
github_repository = gr.Textbox(label="GitHub Repository") | |
system_message = gr.Textbox( | |
value="You are GitBot, the Github project guardian angel. You resolve issues and propose implementation of feature requests", | |
label="System message", | |
) | |
model_dropdown = gr.Dropdown( | |
choices=[ | |
"mistralai/Mixtral-8x7B-Instruct-v0.1", | |
"Gabriel/Swe-review-setfit-model", | |
"OpenBMB/multilingual-codeparrot" | |
], | |
label="Select Model for Issue Resolution", | |
value="OpenBMB/multilingual-codeparrot", | |
) | |
severity_dropdown = gr.Dropdown( | |
choices=["Critical", "Major", "Minor", "Trivial"], | |
label="Severity", | |
value=None, | |
) | |
programming_language_textbox = gr.Textbox(label="Programming Language") | |
# Create the chatbot instance | |
chatbot = MyChatbot() | |
# Create input textbox for user messages | |
msg = gr.Textbox(label="Message") | |
# Create state for storing conversation history | |
state = gr.State([]) | |
# Add buttons with correct syntax | |
fetch_issues_button = gr.Button("Fetch Issues") | |
resolve_issue_button = gr.Button("Resolve Issue") | |
clear_button = gr.Button("Clear Chat") | |
submit_button = gr.Button("Submit Message") | |
# Add a dropdown to select an issue | |
issue_dropdown = gr.Dropdown( | |
choices=[], | |
label="Select Issue", | |
interactive=True | |
) | |
def clear_chat(): | |
return [], [] | |
def fetch_issues(api_token, username, repo): | |
issues = fetch_github_issues(api_token, username, repo) | |
chatbot.issues = issues | |
return gr.Dropdown(choices=[f"{i+1}. {issue['title']}" for i, issue in enumerate(issues)]) | |
# Connect button events | |
fetch_issues_button.click( | |
fetch_issues, | |
inputs=[github_api_token, github_username, github_repository], | |
outputs=[issue_dropdown] | |
) | |
clear_button.click( | |
clear_chat, | |
outputs=[chatbot, state] | |
) | |
submit_button.click( | |
user, | |
[msg, chatbot], | |
[msg, chatbot] | |
).then( | |
bot, | |
[ | |
chatbot, | |
system_message, | |
gr.Slider(minimum=1, maximum=8192, value=2048, step=1, label="Max new tokens"), | |
gr.Slider(minimum=0.1, maximum=4.0, value=0.71, step=0.1, label="Temperature"), | |
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.01, label="Top-p"), | |
github_api_token, | |
github_username, | |
github_repository, | |
model_dropdown, | |
severity_dropdown, | |
programming_language_textbox | |
], | |
[chatbot] | |
) | |
# Add message input handler | |
msg.submit( | |
user, | |
[msg, chatbot], | |
[msg, chatbot] | |
).then( | |
bot, | |
[ | |
chatbot, | |
system_message, | |
gr.Slider(minimum=1, maximum=8192, value=2048, step=1, label="Max new tokens"), | |
gr.Slider(minimum=0.1, maximum=4.0, value=0.71, step=0.1, label="Temperature"), | |
gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.01, label="Top-p"), | |
github_api_token, | |
github_username, | |
github_repository, | |
model_dropdown, | |
severity_dropdown, | |
programming_language_textbox | |
], | |
[chatbot] | |
) | |
# Issue selection handler | |
issue_dropdown.change( | |
select_issue, | |
inputs=[issue_dropdown, chatbot], | |
outputs=[chatbot] | |
) | |
# Issue resolution handler | |
resolve_issue_button.click( | |
resolve_selected_issue, | |
inputs=[ | |
github_api_token, | |
github_username, | |
github_repository, | |
chatbot | |
], | |
outputs=[chatbot] | |
) | |
if __name__ == "__main__": | |
demo.queue().launch( | |
share=True, | |
server_name="0.0.0.0", | |
server_port=7860 | |
) |