acecalisto3 commited on
Commit
73622f6
·
verified ·
1 Parent(s): 2ec88b8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +116 -56
app.py CHANGED
@@ -1,14 +1,10 @@
1
  import gradio as gr
2
- import requests
3
- from transformers import pipeline, AutoTokenizer, AutoModelForSeq2SeqLM
4
  from sentence_transformers import SentenceTransformer, util
 
 
5
 
6
- # Initialize models and tokenizers
7
- model_name = "enricoros/big-agi"
8
- tokenizer = AutoTokenizer.from_pretrained(model_name)
9
- model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
10
-
11
- # Constants
12
  GITHUB_API_BASE_URL = "https://api.github.com/repos"
13
  DEFAULT_MODEL = "microsoft/CodeBERT-base"
14
  MAX_RELATED_ISSUES = 3
@@ -16,51 +12,91 @@ MAX_RELATED_ISSUES = 3
16
  # Load a pre-trained model for sentence similarity
17
  similarity_model = SentenceTransformer('all-mpnet-base-v2')
18
 
19
- def analyze_issues(issue_text: str, model_name: str, severity: str = None, programming_language: str = None):
20
- # Generate a response using the loaded model
21
- generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
22
- response = generator(issue_text, max_length=512, num_return_sequences=1)[0]['generated_text']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
- return response
 
25
 
26
- def find_related_issues(issue_text: str, issues: list):
 
 
 
 
 
 
 
27
  issue_embedding = similarity_model.encode(issue_text)
28
- related_issues = []
29
- for issue in issues:
30
- title_embedding = similarity_model.encode(issue['title'])
31
- similarity = util.cos_sim(issue_embedding, title_embedding)[0][0]
32
- related_issues.append((issue, similarity.item()))
33
 
34
- related_issues.sort(key=lambda x: x[1], reverse=True)
35
- return [issue for issue, _ in related_issues[:MAX_RELATED_ISSUES]]
36
 
37
- def fetch_github_issues(github_api_token: str, github_username: str, github_repository: str):
 
 
 
38
  headers = {'Authorization': f'token {github_api_token}'}
39
  url = f"{GITHUB_API_BASE_URL}/{github_username}/{github_repository}/issues"
40
  response = requests.get(url, headers=headers)
41
- if response.status_code == 200:
42
- return response.json()
43
- else:
44
- raise Exception(f"Failed to fetch issues: {response.text}")
 
45
 
46
  def respond(
47
- command,
48
- history,
49
- system_message,
50
- max_tokens,
51
- temperature,
52
- top_p,
53
- github_api_token,
54
- github_username,
55
- github_repository,
56
- selected_model,
57
- severity,
58
- programming_language,
59
- ):
60
- # Processing the command and generating a response
61
- generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
62
- response = generator(f"{system_message}\n{command}\n{history}", max_length=max_tokens, num_return_sequences=1)[0]['generated_text']
63
- return response
 
 
 
 
 
 
 
 
64
 
65
  with gr.Blocks() as demo:
66
  with gr.Row():
@@ -68,29 +104,53 @@ with gr.Blocks() as demo:
68
  github_username = gr.Textbox(label="GitHub Username")
69
  github_repository = gr.Textbox(label="GitHub Repository")
70
 
71
- system_message = gr.Textbox(value="You are GitBot, the Github project guardian angel.", label="System message")
72
- model_dropdown = gr.Dropdown(choices=[DEFAULT_MODEL, "enricoros/big-agi"], label="Select Model for Issue Resolution", value=DEFAULT_MODEL)
73
- severity_dropdown = gr.Dropdown(choices=["Critical", "Major", "Minor", "Trivial"], label="Severity")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74
  programming_language_textbox = gr.Textbox(label="Programming Language")
75
- command_dropdown = gr.Dropdown(choices=["/github", "/help", "/generate_code"], label="Select Command")
76
 
77
- chatbot = gr.Interface(
78
- fn=respond,
79
- inputs=[
 
 
 
 
 
 
 
 
 
 
 
 
80
  command_dropdown,
81
  system_message,
82
- gr.Slider(minimum=1, maximum=8192, value=2048, label="Max new tokens"),
83
- gr.Slider(minimum=0.1, maximum=4.0, value=0.71, label="Temperature"),
84
- gr.Slider(minimum=0.1, maximum=1.0, value=0.95, label="Top-p (nucleus sampling)"),
85
  github_api_token,
86
  github_username,
87
  github_repository,
88
  model_dropdown,
89
  severity_dropdown,
90
- programming_language_textbox
91
  ],
92
- outputs="text"
93
  )
94
 
95
  if __name__ == "__main__":
96
- demo.launch(share=True, server_name="0.0.0.0", server_port=7860)
 
1
  import gradio as gr
2
+ from transformers import pipeline, AutoModelForSeq2SeqLM, AutoTokenizer
 
3
  from sentence_transformers import SentenceTransformer, util
4
+ import os
5
+ import requests
6
 
7
+ # Constants for enhanced organization
 
 
 
 
 
8
  GITHUB_API_BASE_URL = "https://api.github.com/repos"
9
  DEFAULT_MODEL = "microsoft/CodeBERT-base"
10
  MAX_RELATED_ISSUES = 3
 
12
  # Load a pre-trained model for sentence similarity
13
  similarity_model = SentenceTransformer('all-mpnet-base-v2')
14
 
15
+ # Define models for issue analysis
16
+ model_name = "enricoros/big-agi"
17
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
18
+ model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
19
+
20
+ def analyze_issues(issue_text: str, model_name: str, severity: str = None, programming_language: str = None) -> dict:
21
+ # Initialize the model
22
+ model = pipeline("text-generation", model=model_name)
23
+
24
+ # Generate a response
25
+ response = model(
26
+ f"{issue_text}\nAssistant: ",
27
+ max_length=512,
28
+ do_sample=True,
29
+ temperature=0.7,
30
+ top_k=50,
31
+ top_p=0.9,
32
+ )
33
+
34
+ # Extract the assistant's response
35
+ assistant_response = response[0]['generated_text'].strip()
36
+
37
+ # Analyze the response
38
+ if "Severity" in assistant_response:
39
+ severity = assistant_response.split(":")[1].strip()
40
 
41
+ if "Programming Language" in assistant_response:
42
+ programming_language = assistant_response.split(":")[1].strip()
43
 
44
+ return {
45
+ 'assistant_response': assistant_response,
46
+ 'severity': severity,
47
+ 'programming_language': programming_language,
48
+ }
49
+
50
+ def find_related_issues(issue_text: str, issues: list) -> list:
51
+ # Calculate the similarity between the issue and other issues
52
  issue_embedding = similarity_model.encode(issue_text)
53
+ similarities = [util.cos_sim(issue_embedding, similarity_model.encode(issue['title'])) for issue in issues]
54
+
55
+ # Sort the issues by similarity
56
+ sorted_issues = sorted(enumerate(similarities), key=lambda x: x[1], reverse=True)
 
57
 
58
+ # Select the top related issues
59
+ related_issues = [issues[i] for i, similarity in sorted_issues[:MAX_RELATED_ISSUES]]
60
 
61
+ return related_issues
62
+
63
+ def fetch_github_issues(github_api_token: str, github_username: str, github_repository: str) -> list:
64
+ # Fetch the issues from the GitHub API
65
  headers = {'Authorization': f'token {github_api_token}'}
66
  url = f"{GITHUB_API_BASE_URL}/{github_username}/{github_repository}/issues"
67
  response = requests.get(url, headers=headers)
68
+
69
+ # Parse the JSON response
70
+ issues = response.json()
71
+
72
+ return issues
73
 
74
  def respond(
75
+ command, history, system_message, max_tokens, temperature, top_p,
76
+ github_api_token, github_username, github_repository,
77
+ selected_model, severity, programming_language, *args, **kwargs
78
+ ) -> dict:
79
+ # Initialize the model
80
+ model = pipeline("text-generation", model="enricoros/big-agi")
81
+
82
+ # Generate a response
83
+ response = model(
84
+ f"{system_message}\n{command}\n{history}\n{github_username}/{github_repository}\nSeverity: {severity}\nProgramming Language: {programming_language}\nAssistant: ",
85
+ max_length=max_tokens,
86
+ do_sample=True,
87
+ temperature=temperature,
88
+ top_k=50,
89
+ top_p=top_p,
90
+ )
91
+
92
+ # Extract the assistant's response
93
+ assistant_response = response[0]['generated_text'].strip()
94
+
95
+ return {
96
+ 'assistant_response': assistant_response,
97
+ 'severity': severity,
98
+ 'programming_language': programming_language,
99
+ }
100
 
101
  with gr.Blocks() as demo:
102
  with gr.Row():
 
104
  github_username = gr.Textbox(label="GitHub Username")
105
  github_repository = gr.Textbox(label="GitHub Repository")
106
 
107
+ system_message = gr.Textbox(
108
+ value="You are GitBot, the Github project guardian angel. You resolve issues and propose implementation of feature requests",
109
+ label="System message",
110
+ )
111
+
112
+ model_dropdown = gr.Dropdown(
113
+ choices=["microsoft/CodeBERT-base", "Salesforce/codegen-45M-mono"],
114
+ label="Select Model for Issue Resolution",
115
+ value=DEFAULT_MODEL,
116
+ )
117
+
118
+ severity_dropdown = gr.Dropdown(
119
+ choices=["Critical", "Major", "Minor", "Trivial"],
120
+ label="Severity",
121
+ value=None,
122
+ )
123
+
124
  programming_language_textbox = gr.Textbox(label="Programming Language")
 
125
 
126
+ command_dropdown = gr.Dropdown(
127
+ choices=[
128
+ "/github",
129
+ "/help",
130
+ "/generate_code",
131
+ "/explain_concept",
132
+ "/write_documentation",
133
+ "/translate_code",
134
+ ],
135
+ label="Select Command",
136
+ )
137
+
138
+ chatbot = gr.Chatbot(
139
+ respond,
140
+ additional_inputs=[
141
  command_dropdown,
142
  system_message,
143
+ gr.Slider(minimum=1, maximum=8192, value=2048, step=1, label="Max new tokens"),
144
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
145
+ gr.Slider(minimum=0.1, maximum=1.0, value=0.9, step=0.1, label="Top-p (nucleus sampling)"),
146
  github_api_token,
147
  github_username,
148
  github_repository,
149
  model_dropdown,
150
  severity_dropdown,
151
+ programming_language_textbox,
152
  ],
 
153
  )
154
 
155
  if __name__ == "__main__":
156
+ demo.queue().launch(share=True, server_name="0.0.0.0", server_port=7860)