GitBot / app.py
acecalisto3's picture
Update app.py
f1f508e verified
raw
history blame
3.37 kB
import gradio as gr
from huggingface_hub import InferenceClient
import os
import requests
# Hugging Face Inference Client
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
# GitHub API details
GITHUB_USERNAME = "YOUR_USERNAME"
GITHUB_REPOSITORY = "YOUR_REPOSITORY"
GITHUB_API_TOKEN = os.getenv("GITHUB_API_TOKEN")
# Function to fetch GitHub issues
def fetch_github_issues():
url = f"https://api.github.com/repos/{GITHUB_USERNAME}/{GITHUB_REPOSITORY}/issues"
headers = {
"Authorization": f"Bearer {GITHUB_API_TOKEN}",
"Accept": "application/vnd.github.v3+json"
}
response = requests.get(url, headers=headers)
if response.status_code == 200:
return response.json()
else:
raise Exception(f"Error fetching issues: {response.status_code}")
# Function to analyze issues and provide solutions
def analyze_issues(issue_text, model_name):
nlp = pipeline("text-generation", model=model_name)
result = nlp(issue_text)
return result[0]['generated_text']
# Function to handle chat responses
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
if message.startswith("/github"):
try:
issues = fetch_github_issues()
issue_list = "\n".join([f"{i+1}. {issue['title']}" for i, issue in enumerate(issues)])
yield f"Available GitHub Issues:\n{issue_list}\n\nEnter the issue number to analyze:"
except Exception as e:
yield f"Error fetching GitHub issues: {e}"
elif message.isdigit():
try:
issue_number = int(message) - 1
issues = fetch_github_issues()
issue = issues[issue_number]
issue_text = issue['title'] + "\n\n" + issue['body']
resolution = analyze_issues(issue_text, "gpt2") # Default to gpt2 for now
yield f"Resolution for Issue '{issue['title']}':\n{resolution}"
except Exception as e:
yield f"Error analyzing issue: {e}"
else:
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
with gr.Blocks() as demo:
chatbot = gr.ChatInterface(
respond,
additional_inputs=[
gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p (nucleus sampling)",
),
],
)
if __name__ == "__main__":
demo.queue().launch(share=True, server_name="0.0.0.0", server_port=7860)