Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -137,7 +137,7 @@ class WebhookHandler(BaseHTTPRequestHandler):
|
|
137 |
|
138 |
event = self.headers.get('X-GitHub-Event')
|
139 |
delivery_id = self.headers.get('X-GitHub-Delivery')
|
140 |
-
logger.info(f"Received GitHub webhook event: {{}event}}
|
141 |
|
142 |
if event == 'issues' and WebhookHandler.manager_instance and WebhookHandler.main_loop:
|
143 |
action = payload.get('action')
|
@@ -158,7 +158,7 @@ class WebhookHandler(BaseHTTPRequestHandler):
|
|
158 |
elif event == 'ping':
|
159 |
logger.info("Received GitHub webhook ping.")
|
160 |
else:
|
161 |
-
logger.warning(f"Unhandled event type: {{event}}
|
162 |
|
163 |
self.send_response(200)
|
164 |
self.send_header("Content-type", "text/plain")
|
@@ -248,7 +248,7 @@ class IssueManager:
|
|
248 |
break
|
249 |
|
250 |
if not found_issue:
|
251 |
-
logger.error(f"Could not find issue data for hash {{issue_hash}}
|
252 |
return "Error: Issue data for this suggestion request (hash) not found in current state. The issue might have been updated or closed. Please re-select the issue."
|
253 |
|
254 |
if model_key not in HF_MODELS:
|
@@ -282,7 +282,7 @@ class IssueManager:
|
|
282 |
significant_change = False # Flag for changes affecting clustering/content/AI caches
|
283 |
|
284 |
if action == 'closed':
|
285 |
-
logger.info(f"Webhook: Removing closed issue {{issue_number}}
|
286 |
if self.issues.pop(issue_number, None):
|
287 |
needs_ui_update = True
|
288 |
significant_change = True
|
@@ -297,7 +297,7 @@ class IssueManager:
|
|
297 |
self.code_editors.pop(issue_number, None)
|
298 |
|
299 |
elif action in ['opened', 'reopened', 'edited', 'assigned', 'unassigned', 'labeled', 'unlabeled', 'milestoned', 'demilestoned']:
|
300 |
-
logger.info(f"Webhook: Adding/Updating issue {{issue_number}}
|
301 |
processed_data = self._process_issue_data(issue_data)
|
302 |
|
303 |
old_issue = self.issues.get(issue_number)
|
@@ -306,7 +306,7 @@ class IssueManager:
|
|
306 |
old_issue.get('title') != processed_data.get('title') or \
|
307 |
set(old_issue.get('labels', [])) != set(processed_data.get('labels', [])):
|
308 |
significant_change = True
|
309 |
-
logger.info(f"Significant change detected for issue {{issue_number}}
|
310 |
# Invalidate ALL precomputed AI state on significant edit
|
311 |
self.precomputed_context.pop(issue_number, None)
|
312 |
self.precomputed_summaries.pop(issue_number, None)
|
@@ -318,12 +318,12 @@ class IssueManager:
|
|
318 |
old_issue.get('updated_at') != processed_data.get('updated_at') or \
|
319 |
old_issue.get('assignee') != processed_data.get('assignee') or \
|
320 |
set(old_issue.get('labels', [])) != set(processed_data.get('labels', [])):
|
321 |
-
logger.debug(f"State-related change detected for issue {{issue_number}}
|
322 |
|
323 |
self.issues[issue_number] = processed_data
|
324 |
needs_ui_update = True
|
325 |
else:
|
326 |
-
logger.info(f"Ignoring webhook action '{{action}' for issue {{issue_number}
|
327 |
|
328 |
# --- Track changes for idle processing ---
|
329 |
if needs_ui_update:
|
@@ -351,7 +351,7 @@ class IssueManager:
|
|
351 |
"body": issue_data.get('body', ''),
|
352 |
"state": issue_data.get('state', 'unknown'),
|
353 |
"labels": sorted([label['name'] for label in issue_data.get('labels', [])]),
|
354 |
-
"assignee": issue_data.get('assignee', {
|
355 |
"url": issue_data.get('html_url', '#'),
|
356 |
"created_at": issue_data.get('created_at'),
|
357 |
"updated_at": issue_data.get('updated_at'),
|
@@ -417,7 +417,7 @@ class IssueManager:
|
|
417 |
remote_url = next((r.url for r in self.repo.remotes.origin.urls), None)
|
418 |
expected_urls = [self.repo_url, self.repo_url + ".git"]
|
419 |
if remote_url not in expected_urls:
|
420 |
-
logger.warning(f"Existing repo path {{self.repo_local_path} has different remote URL ('{{remote_url}' vs '{{self.repo_url}'). Re-cloning.")
|
421 |
shutil.rmtree(self.repo_local_path)
|
422 |
self.repo = Repo.clone_from(self.repo_url, self.repo_local_path, progress=lambda op, cur, tot, msg: logger.debug(f"Clone progress: {{msg}}"))
|
423 |
else:
|
@@ -428,7 +428,7 @@ class IssueManager:
|
|
428 |
logger.info("Repository is shallow, unshallowing...")
|
429 |
self.repo.git.fetch('--unshallow')
|
430 |
else:
|
431 |
-
logger.warning(f"Existing repo at {{self.repo_local_path}}
|
432 |
shutil.rmtree(self.repo_local_path)
|
433 |
self.repo = Repo.clone_from(self.repo_url, self.repo_local_path, progress=lambda op, cur, tot, msg: logger.debug(f"Clone progress: {{msg}}"))
|
434 |
|
@@ -442,7 +442,7 @@ class IssueManager:
|
|
442 |
try: self.repo = Repo(self.repo_local_path)
|
443 |
except Exception: logger.error("Failed to even load existing repo after pull error.")
|
444 |
else:
|
445 |
-
logger.info(f"Cloning repository {{self.repo_url}}
|
446 |
self.repo = Repo.clone_from(self.repo_url, self.repo_local_path, progress=lambda op, cur, tot, msg: logger.debug(f"Clone progress: {{msg}}"))
|
447 |
|
448 |
logger.info("Repository clone/update process finished.")
|
@@ -482,7 +482,7 @@ class IssueManager:
|
|
482 |
issues_page_data = await response.json()
|
483 |
if not issues_page_data: break
|
484 |
|
485 |
-
logger.info(f"Fetched page {{page}}
|
486 |
all_issues_data.extend(issues_page_data)
|
487 |
|
488 |
link_header = response.headers.get('Link')
|
@@ -499,7 +499,7 @@ class IssueManager:
|
|
499 |
if 'pull_request' not in issue_data
|
500 |
}
|
501 |
|
502 |
-
logger.info(f"Filtered out pull requests, {{len(self.issues)}}
|
503 |
|
504 |
empty_fig = go.Figure()
|
505 |
empty_fig.update_layout(title="Issue Severity Distribution", xaxis={{"visible": False}, yaxis={{"visible": False},
|
@@ -532,7 +532,7 @@ class IssueManager:
|
|
532 |
if 0 <= index < len(self.issue_list_for_clustering):
|
533 |
index_to_cluster_id[index] = cluster_id
|
534 |
else:
|
535 |
-
logger.warning(f"Clustering returned invalid index {{index}}
|
536 |
|
537 |
for i, issue in enumerate(self.issue_list_for_clustering):
|
538 |
severity = self._determine_severity(issue['labels'])
|
@@ -552,14 +552,14 @@ class IssueManager:
|
|
552 |
self.start_broadcast_loop()
|
553 |
self.start_idle_processing()
|
554 |
|
555 |
-
success_msg = f"Found {{len(self.issues)}}
|
556 |
logger.info(success_msg)
|
557 |
# Return both plots
|
558 |
return dataframe_data, stats_fig, success_msg, stats_fig # Mypy may complain about return type mismatch if not explicitly handled
|
559 |
|
560 |
except aiohttp.ClientResponseError as e:
|
561 |
logger.error(f"GitHub API request failed: Status={{e.status}, Message='{{e.message}', URL='{{e.request_info.url}'")
|
562 |
-
error_msg = f"Error fetching issues: {{e.status}}
|
563 |
if e.status == 404: error_msg = f"Error: Repository not found at {{self.repo_url}}."
|
564 |
elif e.status == 401: error_msg = "Error: Invalid GitHub token or insufficient permissions for this repository."
|
565 |
elif e.status == 403:
|
@@ -628,20 +628,20 @@ class IssueManager:
|
|
628 |
return
|
629 |
|
630 |
num_issues = len(self.issue_list_for_clustering)
|
631 |
-
logger.info(f"Generating embeddings for {{num_issues}}
|
632 |
try:
|
633 |
texts_to_embed = [
|
634 |
-
f"Title: {{i.get('title','')} Body: {{i.get('body','')[:1500]}"
|
635 |
for i in self.issue_list_for_clustering
|
636 |
]
|
637 |
embeddings = await self._generate_embeddings(texts_to_embed)
|
638 |
|
639 |
if embeddings is None or not isinstance(embeddings, list) or len(embeddings) != num_issues:
|
640 |
-
logger.error(f"Failed to generate valid embeddings for clustering. Expected {{num_issues}, got {{type(embeddings)} len {{len(embeddings) if embeddings else 'N/A'}.")
|
641 |
self.issue_clusters = {{}
|
642 |
return
|
643 |
|
644 |
-
logger.info(f"Generated {{len(embeddings)}}
|
645 |
clusterer = HDBSCAN(min_cluster_size=2, metric='cosine', allow_single_cluster=True, gen_min_span_tree=True)
|
646 |
clusters = clusterer.fit_predict(embeddings)
|
647 |
|
@@ -657,7 +657,7 @@ class IssueManager:
|
|
657 |
new_issue_clusters[cluster_id_int].append(i)
|
658 |
|
659 |
self.issue_clusters = new_issue_clusters
|
660 |
-
logger.info(f"Clustering complete. Found {{len(self.issue_clusters)}}
|
661 |
|
662 |
# Reset the change counter and flag after successful clustering
|
663 |
self._webhook_change_count = 0
|
@@ -680,7 +680,7 @@ class IssueManager:
|
|
680 |
for i, issue in enumerate(self.issue_list_for_clustering):
|
681 |
issue_id = issue.get('id')
|
682 |
if issue_id is None:
|
683 |
-
logger.warning(f"Issue at index {{i}}
|
684 |
continue
|
685 |
index_to_id[i] = issue_id
|
686 |
except Exception as e:
|
@@ -694,7 +694,7 @@ class IssueManager:
|
|
694 |
for issue_id in cluster_issue_ids:
|
695 |
self.potential_duplicates[issue_id] = [other_id for other_id in cluster_issue_ids if other_id != issue_id]
|
696 |
|
697 |
-
logger.info(f"Identified potential duplicates for {{len(self.potential_duplicates)}}
|
698 |
|
699 |
async def _generate_embeddings(self, texts: List[str]):
|
700 |
"""Generates sentence embeddings using Hugging Face Inference API."""
|
@@ -710,7 +710,7 @@ class IssueManager:
|
|
710 |
headers = {{"Authorization": f"Bearer {{self.hf_token}}"}
|
711 |
timeout = aiohttp.ClientTimeout(total=180)
|
712 |
|
713 |
-
logger.info(f"Requesting embeddings from {{api_url}}
|
714 |
async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:
|
715 |
try:
|
716 |
payload = {{"inputs": texts, "options": {{"wait_for_model": True}}
|
@@ -722,7 +722,7 @@ class IssueManager:
|
|
722 |
|
723 |
if isinstance(result, list) and all(isinstance(emb, list) and all(isinstance(f, float) for f in emb) for emb in result):
|
724 |
if len(result) == len(texts):
|
725 |
-
logger.info(f"Successfully received {{len(result)}}
|
726 |
return result
|
727 |
else:
|
728 |
logger.error(f"HF Embedding API returned wrong number of embeddings: Got {{len(result)}}, expected {{len(texts)}}.")
|
@@ -740,7 +740,7 @@ class IssueManager:
|
|
740 |
logger.error(f"HF Inference API embedding request failed: Status={{e.status}, Message='{{e.message}'. Body: {{error_body[:500]}")
|
741 |
return None
|
742 |
except asyncio.TimeoutError:
|
743 |
-
logger.error(f"HF Inference API embedding request timed out after {{timeout.total}}
|
744 |
return None
|
745 |
except Exception as e:
|
746 |
logger.exception(f"Unexpected error during embedding generation: {{e}}")
|
@@ -749,7 +749,7 @@ class IssueManager:
|
|
749 |
async def generate_code_patch(self, issue_number: int, model_key: str) -> dict:
|
750 |
"""Generates a code patch suggestion using a selected AI model."""
|
751 |
if issue_number not in self.issues:
|
752 |
-
return {{"error": f"Issue {{issue_number}}
|
753 |
if not self.hf_token:
|
754 |
return {{"error": "Hugging Face token not set."}
|
755 |
if model_key not in HF_MODELS:
|
@@ -759,7 +759,7 @@ class IssueManager:
|
|
759 |
|
760 |
issue = self.issues[issue_number]
|
761 |
model_id = HF_MODELS[model_key]
|
762 |
-
logger.info(f"Generating patch for issue {{issue_number} ('{{issue.get('title', 'N/A')[:50]}...') using model {{model_id}")
|
763 |
|
764 |
# --- Context Gathering ---
|
765 |
context_str = "Context gathering failed or not available."
|
@@ -774,11 +774,11 @@ class IssueManager:
|
|
774 |
elif context_data.get("content"):
|
775 |
context_str = context_data["content"]
|
776 |
num_files = len(context_data.get('files',[]))
|
777 |
-
context_source = f"Pre-computed ({{num_files}}
|
778 |
else:
|
779 |
context_str = "Pre-computed context was empty or unavailable."
|
780 |
context_source = f"Pre-computed (Empty @ {{timestamp_str}})"
|
781 |
-
logger.info(f"Using pre-computed context for issue {{issue_number}}
|
782 |
else:
|
783 |
logger.info(f"No pre-computed context found for issue {{issue_number}}, computing now.")
|
784 |
context_source = "Computed On-Demand"
|
@@ -788,7 +788,7 @@ class IssueManager:
|
|
788 |
context_source += " (Error)"
|
789 |
else:
|
790 |
context_str = context_result.get("content", "No specific context found.")
|
791 |
-
context_source += f" ({{len(context_result.get('files',[]))} files)"
|
792 |
self.precomputed_context[issue_number] = {{
|
793 |
"content": context_str,
|
794 |
"files": context_result.get("files", []),
|
@@ -849,7 +849,7 @@ class IssueManager:
|
|
849 |
|
850 |
if result and isinstance(result, list) and 'generated_text' in result[0]:
|
851 |
generated_text = result[0].get('generated_text', '').strip()
|
852 |
-
logger.info(f"Received patch suggestion from {{model_id}}
|
853 |
|
854 |
diff_match = re.search(r"```diff\n(.*?)```", generated_text, re.DOTALL | re.IGNORECASE)
|
855 |
explanation = generated_text.split("```diff")[0].strip() if diff_match else generated_text
|
@@ -858,7 +858,7 @@ class IssueManager:
|
|
858 |
if diff_match:
|
859 |
patch_content = diff_match.group(1).strip()
|
860 |
if not re.search(r'^(--- |\+\+\+ |@@ )', patch_content, re.MULTILINE):
|
861 |
-
logger.warning(f"Generated patch for issue {{issue_number}}
|
862 |
return {{"explanation": explanation, "patch": patch_content, "model_used": model_id}
|
863 |
else:
|
864 |
if re.search(r"(insufficient context|cannot generate|unable to create patch|context required)", explanation, re.IGNORECASE):
|
@@ -874,7 +874,7 @@ class IssueManager:
|
|
874 |
logger.error(f"HF Inference API patch error for issue {{issue_number}}: {{error_msg}}" + (f" (Est: {{estimated_time}}s)" if estimated_time else ""))
|
875 |
return {{"error": f"AI model error: {{error_msg}}"}
|
876 |
else:
|
877 |
-
logger.error(f"Unexpected patch response format from {{model_id}}
|
878 |
return {{"error": "Unexpected response format from AI model."}
|
879 |
except aiohttp.ClientResponseError as e:
|
880 |
error_body = await e.response.text()
|
@@ -919,7 +919,7 @@ class IssueManager:
|
|
919 |
if not potential_files:
|
920 |
return {{"content": "No file paths matching common patterns found in the issue title or body.", "files": [], "error": None}
|
921 |
|
922 |
-
logger.info(f"Found {{len(potential_files)}}
|
923 |
context_content = ""
|
924 |
max_context_length = 6000
|
925 |
files_included = []
|
@@ -942,13 +942,13 @@ class IssueManager:
|
|
942 |
context_content += content_snippet
|
943 |
files_included.append(str(relative_path))
|
944 |
else:
|
945 |
-
logger.warning(f"Skipping file {{relative_path}}
|
946 |
files_skipped_length.append(str(relative_path))
|
947 |
except OSError as e:
|
948 |
-
logger.warning(f"Could not read file {{full_path}}
|
949 |
files_read_error.append(str(relative_path))
|
950 |
except Exception as e:
|
951 |
-
logger.warning(f"Unexpected error reading file {{full_path}}
|
952 |
files_read_error.append(str(relative_path))
|
953 |
else:
|
954 |
logger.info(f"Potential path '{{relative_path}' (from '{{file_path_str}') not found or not a file in local repo for issue {{issue_id}.")
|
@@ -958,10 +958,10 @@ class IssueManager:
|
|
958 |
error_status = None
|
959 |
if files_included:
|
960 |
final_content = context_content.strip()
|
961 |
-
logger.info(f"Included context from {{len(files_included)}}
|
962 |
else:
|
963 |
final_content = "No content could be retrieved from the potential file paths found."
|
964 |
-
logger.warning(f"Context generation for issue {{issue_id}}
|
965 |
if potential_files: # If paths were found but none included
|
966 |
error_status = "No readable or found files among potential paths."
|
967 |
|
@@ -994,7 +994,7 @@ class IssueManager:
|
|
994 |
|
995 |
model_id = HF_MODELS[model_key]
|
996 |
issue_id = issue.get('id','N/A')
|
997 |
-
logger.info(f"Requesting resolution suggestion for issue {{issue_id} ('{{issue.get('title', 'N/A')[:50]}...') using {{model_id}")
|
998 |
|
999 |
# --- Get Pre-computed Info ---
|
1000 |
summary_text = self._get_precomputed_text(issue_id, self.precomputed_summaries, "summary", "Summary")
|
@@ -1051,7 +1051,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1051 |
|
1052 |
if result and isinstance(result, list) and 'generated_text' in result[0]:
|
1053 |
suggestion = result[0].get('generated_text', 'AI Error: No suggestion text generated.').strip()
|
1054 |
-
logger.info(f"Received suggestion from {{model_id}}
|
1055 |
return suggestion
|
1056 |
elif isinstance(result, dict) and 'error' in result:
|
1057 |
error_msg = result['error']
|
@@ -1059,7 +1059,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1059 |
logger.error(f"HF Inference API suggestion error for issue {{issue_id}}: {{error_msg}}" + (f" (Est: {{estimated_time}}s)" if estimated_time else ""))
|
1060 |
return f"Error: AI model returned an error: {{error_msg}}"
|
1061 |
else:
|
1062 |
-
logger.error(f"Unexpected suggestion response format from {{model_id}}
|
1063 |
return "Error: Received unexpected response format from AI model."
|
1064 |
except aiohttp.ClientResponseError as e:
|
1065 |
error_body = await e.response.text()
|
@@ -1080,16 +1080,16 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1080 |
is_recent = time.time() - timestamp < self.idle_processing_interval * 2
|
1081 |
|
1082 |
if entry.get("error"):
|
1083 |
-
return f"{{name} Error (at {{datetime.fromtimestamp(timestamp).strftime('%H:%M:%S')}): {{entry['error']}"
|
1084 |
elif entry.get(key) is not None: # Check key exists and is not None
|
1085 |
return entry[key]
|
1086 |
else: # No error, but key might be missing or None
|
1087 |
if is_recent:
|
1088 |
-
return f"({{name}}
|
1089 |
else:
|
1090 |
-
return f"({{name}}
|
1091 |
else:
|
1092 |
-
return f"({{name}}
|
1093 |
|
1094 |
def _get_duplicate_info_text(self, issue_id: int) -> str:
|
1095 |
"""Formats duplicate info text."""
|
@@ -1120,7 +1120,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1120 |
try:
|
1121 |
tasks.append(client.send(status_payload))
|
1122 |
except (ConnectionClosed, ConnectionAbortedError, ConnectionResetError) as e:
|
1123 |
-
logger.warning(f"Client {{getattr(client, 'client_id', client.remote_address)} seems disconnected before send: {{e}. Marking for removal.")
|
1124 |
disconnected_clients.append(client)
|
1125 |
except Exception as e:
|
1126 |
logger.error(f"Unexpected error preparing send to client {{getattr(client, 'client_id', client.remote_address)}: {{e}. Marking for removal.")
|
@@ -1140,7 +1140,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1140 |
|
1141 |
if disconnected_clients:
|
1142 |
unique_disconnected = list(set(disconnected_clients))
|
1143 |
-
logger.info(f"Removing {{len(unique_disconnected)}}
|
1144 |
for client in unique_disconnected:
|
1145 |
self.remove_ws_client(client)
|
1146 |
|
@@ -1160,16 +1160,16 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1160 |
logger.warning(f"Received code update for non-existent editor instance for issue {{issue_num}}. Ignoring.")
|
1161 |
return
|
1162 |
if issue_num not in self.issues:
|
1163 |
-
logger.warning(f"Received code update for non-existent issue {{issue_num}}
|
1164 |
return
|
1165 |
|
1166 |
-
logger.warning(f"Handling code editor update for issue {{issue_num}}
|
1167 |
"WARNING: NO OT IMPLEMENTED - Last write wins / potential conflicts.")
|
1168 |
|
1169 |
try:
|
1170 |
delta_obj = json.loads(delta_str)
|
1171 |
self.code_editors[issue_num].apply_delta(delta_obj)
|
1172 |
-
logger.info(f"Applied delta for issue {{issue_num}}
|
1173 |
|
1174 |
update_payload = json.dumps({{
|
1175 |
"type": "code_update",
|
@@ -1192,7 +1192,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1192 |
try:
|
1193 |
tasks.append(client.send(update_payload))
|
1194 |
except (ConnectionClosed, ConnectionAbortedError, ConnectionResetError) as e:
|
1195 |
-
logger.warning(f"Client {{getattr(client, 'client_id', client.remote_address)} seems disconnected before send: {{e}. Marking for removal.")
|
1196 |
disconnected_clients.append(client)
|
1197 |
except Exception as e:
|
1198 |
logger.error(f"Unexpected error preparing send to client {{getattr(client, 'client_id', client.remote_address)}: {{e}. Marking for removal.")
|
@@ -1200,7 +1200,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1200 |
|
1201 |
|
1202 |
if tasks:
|
1203 |
-
logger.debug(f"Broadcasting code update for issue {{issue_num}}
|
1204 |
results = await asyncio.gather(*tasks, return_exceptions=True)
|
1205 |
for i, result in enumerate(results):
|
1206 |
if isinstance(result, Exception):
|
@@ -1213,14 +1213,14 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1213 |
|
1214 |
if disconnected_clients:
|
1215 |
unique_disconnected = list(set(disconnected_clients))
|
1216 |
-
logger.info(f"Removing {{len(unique_disconnected)}}
|
1217 |
for client in unique_disconnected:
|
1218 |
if client: self.remove_ws_client(client)
|
1219 |
|
1220 |
except json.JSONDecodeError:
|
1221 |
-
logger.error(f"Received invalid JSON delta for issue {{issue_num}}
|
1222 |
except Exception as e:
|
1223 |
-
logger.exception(f"Error handling code editor update for issue {{issue_num}}
|
1224 |
|
1225 |
async def broadcast_issue_update(self):
|
1226 |
"""Notifies clients that the issue list/data has changed (e.g., due to webhook)."""
|
@@ -1240,7 +1240,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1240 |
try:
|
1241 |
tasks.append(client.send(update_payload))
|
1242 |
except (ConnectionClosed, ConnectionAbortedError, ConnectionResetError) as e:
|
1243 |
-
logger.warning(f"Client {{getattr(client, 'client_id', client.remote_address)} seems disconnected before send: {{e}. Marking for removal.")
|
1244 |
disconnected_clients.append(client)
|
1245 |
except Exception as e:
|
1246 |
logger.error(f"Unexpected error preparing send to client {{getattr(client, 'client_id', client.remote_address)}: {{e}. Marking for removal.")
|
@@ -1260,7 +1260,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1260 |
|
1261 |
if disconnected_clients:
|
1262 |
unique_disconnected = list(set(disconnected_clients))
|
1263 |
-
logger.info(f"Removing {{len(unique_disconnected)}}
|
1264 |
for client in unique_disconnected:
|
1265 |
self.remove_ws_client(client)
|
1266 |
|
@@ -1268,16 +1268,16 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1268 |
"""Safely removes a client from the list and collaborator dict."""
|
1269 |
client_id = getattr(client_to_remove, 'client_id', None)
|
1270 |
client_addr = client_to_remove.remote_address
|
1271 |
-
client_desc = f"{{client_id or 'Unknown ID'} ({{client_addr})"
|
1272 |
removed_from_list = False
|
1273 |
removed_from_collab = False
|
1274 |
|
1275 |
try:
|
1276 |
self.ws_clients.remove(client_to_remove)
|
1277 |
removed_from_list = True
|
1278 |
-
logger.info(f"Removed WebSocket client from list: {{client_desc}}
|
1279 |
except ValueError:
|
1280 |
-
logger.debug(f"Client {{client_desc}}
|
1281 |
pass
|
1282 |
|
1283 |
if client_id and client_id in self.collaborators:
|
@@ -1302,7 +1302,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1302 |
try:
|
1303 |
tasks.append(client.send(status_payload))
|
1304 |
except (ConnectionClosed, ConnectionAbortedError, ConnectionResetError) as e:
|
1305 |
-
logger.warning(f"Client {{getattr(client, 'client_id', client.remote_address)} seems disconnected before send: {{e}. Marking for removal.")
|
1306 |
disconnected_clients.append(client)
|
1307 |
except Exception as e:
|
1308 |
logger.error(f"Unexpected error preparing send to client {{getattr(client, 'client_id', client.remote_address)}: {{e}. Marking for removal.")
|
@@ -1324,7 +1324,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1324 |
|
1325 |
if disconnected_clients:
|
1326 |
unique_disconnected = list(set(disconnected_clients))
|
1327 |
-
logger.info(f"Removing {{len(unique_disconnected)}}
|
1328 |
for client in unique_disconnected:
|
1329 |
self.remove_ws_client(client)
|
1330 |
|
@@ -1343,7 +1343,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1343 |
self.stale_issues.append(issue_id)
|
1344 |
except (ValueError, TypeError) as e:
|
1345 |
logger.warning(f"Could not parse 'updated_at' ('{{updated_at_str}') for issue {{issue_id}: {{e}")
|
1346 |
-
logger.info(f"Identified {{len(self.stale_issues)}}
|
1347 |
|
1348 |
def _identify_high_priority_candidates(self):
|
1349 |
"""Identifies high-priority issues (e.g., Critical/High severity)."""
|
@@ -1352,7 +1352,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1352 |
severity = self._determine_severity(issue_data.get('labels', []))
|
1353 |
if severity in ["Critical", "High"]:
|
1354 |
self.high_priority_candidates.append(issue_id)
|
1355 |
-
logger.info(f"Identified {{len(self.high_priority_candidates)}}
|
1356 |
|
1357 |
async def _compute_and_store_summary(self, issue_id: int):
|
1358 |
"""Generates and stores a summary for a given issue using an LLM (Idle Task)."""
|
@@ -1366,7 +1366,7 @@ Based on *all* the information provided above, outline a potential plan:
|
|
1366 |
try:
|
1367 |
issue = self.issues[issue_id]
|
1368 |
model_id = DEFAULT_IDLE_MODEL_ID # Use designated idle model
|
1369 |
-
logger.info(f"Idle Task: Generating summary for issue {{issue_id}}
|
1370 |
start_time = time.time()
|
1371 |
prompt = f"""Concisely summarize the following GitHub issue in 1-2 sentences. Focus on the core problem or request reported by the user.
|
1372 |
Issue Title: {{issue.get('title', 'N/A')}
|
@@ -1391,7 +1391,7 @@ Summary:"""
|
|
1391 |
if result and isinstance(result, list) and 'generated_text' in result[0]:
|
1392 |
summary = result[0].get('generated_text', '').strip() or "(AI generated empty summary)"
|
1393 |
self.precomputed_summaries[issue_id] = {{"summary": summary, "error": None, "timestamp": time.time()}
|
1394 |
-
logger.info(f"Stored summary for issue {{issue_id}}
|
1395 |
elif isinstance(result, dict) and 'error' in result:
|
1396 |
raise ValueError(f"API Error: {{result['error']}")
|
1397 |
else:
|
@@ -1412,7 +1412,7 @@ Summary:"""
|
|
1412 |
try:
|
1413 |
issue = self.issues[issue_id]
|
1414 |
model_id = DEFAULT_IDLE_MODEL_ID # Use cheap model
|
1415 |
-
logger.info(f"Idle Task: Identifying missing info for issue {{issue_id}}
|
1416 |
start_time = time.time()
|
1417 |
|
1418 |
prompt = f"""Analyze the following GitHub issue description. Identify critical information potentially missing for effective debugging or resolution. List the missing items concisely (e.g., "Steps to reproduce", "Error logs", "Expected vs. Actual behavior", "Environment details"). If the description seems reasonably complete, respond with ONLY the word "None".
|
@@ -1441,7 +1441,7 @@ Missing Information:"""
|
|
1441 |
if info_needed.lower() == "none" or not info_needed:
|
1442 |
info_needed = "None needed."
|
1443 |
self.precomputed_missing_info[issue_id] = {{"info_needed": info_needed, "error": None, "timestamp": time.time()}
|
1444 |
-
logger.info(f"Stored missing info analysis for issue {{issue_id} (took {{duration:.2f}s): '{{info_needed[:50]}...'")
|
1445 |
elif isinstance(result, dict) and 'error' in result:
|
1446 |
raise ValueError(f"API Error: {{result['error']}")
|
1447 |
else:
|
@@ -1462,7 +1462,7 @@ Missing Information:"""
|
|
1462 |
try:
|
1463 |
issue = self.issues[issue_id]
|
1464 |
model_id = DEFAULT_IDLE_MODEL_ID # Use cheap model
|
1465 |
-
logger.info(f"Idle Task: Generating preliminary analysis for issue {{issue_id}}
|
1466 |
start_time = time.time()
|
1467 |
|
1468 |
prompt = f"""Analyze the GitHub issue below. Provide a single, concise sentence hypothesizing the root cause OR the main goal. Start with "Hypothesis:". If unsure, respond ONLY with "Hypothesis: Further investigation needed.".
|
@@ -1494,7 +1494,7 @@ Response:"""
|
|
1494 |
hypothesis = "Hypothesis: (Analysis failed or too short)"
|
1495 |
|
1496 |
self.precomputed_analysis[issue_id] = {{"hypothesis": hypothesis, "error": None, "timestamp": time.time()}
|
1497 |
-
logger.info(f"Stored preliminary analysis for issue {{issue_id} (took {{duration:.2f}s): '{{hypothesis[:60]}...'")
|
1498 |
elif isinstance(result, dict) and 'error' in result:
|
1499 |
raise ValueError(f"API Error: {{result['error']}")
|
1500 |
else:
|
@@ -1583,7 +1583,7 @@ Response:"""
|
|
1583 |
cycle_tasks.append(self._compute_and_store_context(issue_id))
|
1584 |
context_computed_count += 1
|
1585 |
else: break
|
1586 |
-
if context_computed_count > 0: logger.info(f"Scheduled {{context_computed_count}}
|
1587 |
|
1588 |
# 4. Schedule Summary Generation (LLM - Medium Cost)
|
1589 |
summary_computed_count = 0
|
@@ -1592,7 +1592,7 @@ Response:"""
|
|
1592 |
cycle_tasks.append(self._compute_and_store_summary(issue_id))
|
1593 |
summary_computed_count += 1
|
1594 |
else: break
|
1595 |
-
if summary_computed_count > 0: logger.info(f"Scheduled {{summary_computed_count}}
|
1596 |
|
1597 |
# 5. Schedule Missing Info Analysis (LLM - Low Cost)
|
1598 |
missing_info_count = 0
|
@@ -1601,7 +1601,7 @@ Response:"""
|
|
1601 |
cycle_tasks.append(self._compute_and_store_missing_info(issue_id))
|
1602 |
missing_info_count += 1
|
1603 |
else: break
|
1604 |
-
if missing_info_count > 0: logger.info(f"Scheduled {{missing_info_count}}
|
1605 |
|
1606 |
# 6. Schedule Preliminary Analysis (LLM - Low Cost)
|
1607 |
analysis_count = 0
|
@@ -1610,11 +1610,11 @@ Response:"""
|
|
1610 |
cycle_tasks.append(self._compute_and_store_preliminary_analysis(issue_id))
|
1611 |
analysis_count += 1
|
1612 |
else: break
|
1613 |
-
if analysis_count > 0: logger.info(f"Scheduled {{analysis_count}}
|
1614 |
|
1615 |
# --- Execute Scheduled Async Tasks ---
|
1616 |
if cycle_tasks:
|
1617 |
-
logger.info(f"Executing {{len(cycle_tasks)}}
|
1618 |
results = await asyncio.gather(*cycle_tasks, return_exceptions=True)
|
1619 |
num_errors = 0
|
1620 |
for i, result in enumerate(results):
|
@@ -1622,7 +1622,7 @@ Response:"""
|
|
1622 |
num_errors += 1
|
1623 |
logger.error(f"Error encountered in background idle task {{i+1}}/{{len(cycle_tasks)}}: {{result}}", exc_info=False) # Keep log cleaner
|
1624 |
cycle_duration = time.time() - start_time_cycle
|
1625 |
-
logger.info(f"Idle processing cycle finished in {{cycle_duration:.2f}}
|
1626 |
else:
|
1627 |
logger.info("No async idle tasks to perform in this cycle.")
|
1628 |
logger.info(f"--- Finished idle processing cycle ---")
|
@@ -1655,13 +1655,13 @@ Response:"""
|
|
1655 |
}
|
1656 |
self.precomputed_context[issue_id] = computed_data
|
1657 |
|
1658 |
-
log_msg = f"Stored context result for issue {{issue_id} (found {{len(computed_data['files'])} files, took {{duration:.2f}s)."
|
1659 |
if computed_data['error']:
|
1660 |
log_msg += f" Error: {{computed_data['error']}"
|
1661 |
logger.info(log_msg)
|
1662 |
|
1663 |
except Exception as e:
|
1664 |
-
logger.exception(f"Failed to compute context for issue {{issue_id}}
|
1665 |
self.precomputed_context[issue_id] = {{"error": f"Unexpected computation error: {{e}}", "timestamp": time.time(), "content": None, "files": []}
|
1666 |
|
1667 |
async def _run_clustering_and_duplicates_async(self):
|
@@ -1707,7 +1707,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1707 |
|
1708 |
status_indicators = []
|
1709 |
if issue_num in manager.stale_issues:
|
1710 |
-
status_indicators.append(f"<span title='No updates in >{{manager.stale_issue_threshold_days} days' style='color: #b91c1c; font-weight: bold; font-size: 0.9em; background-color: #fee2e2; padding: 1px 4px; border-radius: 3px;'>[Stale]</span>")
|
1711 |
if issue_num in manager.high_priority_candidates:
|
1712 |
severity = manager._determine_severity(issue.get('labels', []))
|
1713 |
if severity == "Critical": color, bgcolor = "#ef4444", "#fee2e2"
|
@@ -1761,7 +1761,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1761 |
|
1762 |
if duplicate_text:
|
1763 |
dup_ids = manager.potential_duplicates.get(issue_num, [])
|
1764 |
-
dup_links = ", ".join([f"<span style='cursor: help; color: #4338ca; text-decoration: underline dotted;' title='Issue #{{dup_id} has similar content'>#{{dup_id}</span>" for dup_id in dup_ids])
|
1765 |
ai_sections.append(f"""
|
1766 |
<div style="font-size: 0.9em; margin-top: 8px; background-color: #fffbeb; padding: 6px 10px; border-radius: 4px; border: 1px solid #fef3c7;">
|
1767 |
<strong style="color: #d97706;">⚠️ Potential Duplicates:</strong> {{dup_links}
|
@@ -1773,8 +1773,8 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1773 |
preview_html = f"""
|
1774 |
<div style="border: 1px solid #e5e7eb; padding: 15px; border-radius: 8px; background-color: #ffffff; font-family: 'Inter', sans-serif; display: flex; flex-direction: column; max-height: 80vh;">
|
1775 |
<h4 style="margin-top: 0; margin-bottom: 10px; font-size: 1.1em; display: flex; justify-content: space-between; align-items: center;">
|
1776 |
-
<a href='{{issue.get('url', '#')}' target='_blank' style='color: #6d28d9; text-decoration: none; font-weight: 600;' title="Open issue #{{issue['id']} on GitHub">
|
1777 |
-
#{{issue['id']} - {{gr.Textbox.sanitize_html(issue.get('title', 'N/A'))}
|
1778 |
</a>
|
1779 |
<span style="margin-left: 10px; flex-shrink: 0;">{{status_html}</span>
|
1780 |
</h4>
|
@@ -1811,7 +1811,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1811 |
|
1812 |
issue = manager.issues[issue_num]
|
1813 |
issue_hash = manager._get_issue_hash(issue)
|
1814 |
-
logger.info(f"Requesting suggestion for issue {{issue_num}}
|
1815 |
|
1816 |
try:
|
1817 |
progress(0.3, desc=f"Querying {{model_key}}...")
|
@@ -1835,10 +1835,10 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1835 |
if not manager.repo:
|
1836 |
return "❌ Error: Repository not loaded. Please scan the repository first."
|
1837 |
|
1838 |
-
logger.info(f"Requesting patch for issue {{issue_num}}
|
1839 |
progress(0.1, desc="Gathering code context (using cache if available)...")
|
1840 |
try:
|
1841 |
-
progress(0.4, desc=f"Querying {{model_key}}
|
1842 |
result = await manager.generate_code_patch(issue_num, model_key)
|
1843 |
progress(1, desc="Patch result received.")
|
1844 |
|
@@ -1852,7 +1852,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1852 |
|
1853 |
if patch_content:
|
1854 |
patch_content_sanitized = patch_content.replace('`', '\\`')
|
1855 |
-
logger.info(f"Successfully generated patch for issue {{issue_num}}
|
1856 |
return f"""**🩹 Patch Suggestion from {{model_used}:**
|
1857 |
**Explanation:**
|
1858 |
{{explanation}
|
@@ -1899,10 +1899,10 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1899 |
logger.info(f"Issue selected via Dataframe: ID {{selected_id}}")
|
1900 |
|
1901 |
if selected_id not in manager.issues:
|
1902 |
-
logger.error(f"Selected issue ID {{selected_id} not found in manager's issue list.")
|
1903 |
return {{
|
1904 |
**default_response,
|
1905 |
-
"issue_preview_html": gr.update(value=f"<p style='color: red; font-weight: bold;'>Error: Issue {{selected_id} not found in the current list. Try re-scanning.</p>"),
|
1906 |
}
|
1907 |
|
1908 |
issue_data = manager.issues[selected_id]
|
@@ -1917,8 +1917,8 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1917 |
context_source_msg = f"Pre-computed (Failed @ {{timestamp_str}})"
|
1918 |
files_content["error_context.txt"] = f"# Error loading pre-computed context:\n# {{context_data['error']}"
|
1919 |
elif context_data.get("files"):
|
1920 |
-
context_source_msg = f"Pre-computed ({{len(context_data['files'])} files @ {{timestamp_str})"
|
1921 |
-
logger.info(f"Loading {{len(context_data['files'])} files from pre-computed context for issue {{selected_id}: {{context_data['files']}")
|
1922 |
loaded_count = 0
|
1923 |
for file_path_str in context_data["files"]:
|
1924 |
full_path = manager.repo_local_path / file_path_str
|
@@ -1926,7 +1926,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1926 |
files_content[file_path_str] = full_path.read_text(encoding='utf-8', errors='ignore')
|
1927 |
loaded_count += 1
|
1928 |
except Exception as e:
|
1929 |
-
logger.warning(f"Error reading pre-computed file {{full_path}}
|
1930 |
files_content[file_path_str] = f"# Error reading file: {{e}}"
|
1931 |
if loaded_count == 0 and context_data["files"]:
|
1932 |
files_content["error_reading_files.txt"] = "# Precomputed context found file references, but failed to read any file content."
|
@@ -1949,8 +1949,8 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1949 |
context_source_msg += f" (Error: {{context_result['error']})"
|
1950 |
files_content["error_context.txt"] = f"# Error loading context on demand:\n# {{context_result['error']}"
|
1951 |
elif context_result.get("files"):
|
1952 |
-
context_source_msg += f" ({{len(context_result['files'])} files)"
|
1953 |
-
logger.info(f"Loading {{len(context_result['files'])} files computed on-demand for issue {{selected_id}: {{context_result['files']}")
|
1954 |
loaded_count = 0
|
1955 |
for file_path_str in context_result["files"]:
|
1956 |
full_path = manager.repo_local_path / file_path_str
|
@@ -1958,7 +1958,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1958 |
files_content[file_path_str] = full_path.read_text(encoding='utf-8', errors='ignore')
|
1959 |
loaded_count +=1
|
1960 |
except Exception as e:
|
1961 |
-
logger.warning(f"Error reading on-demand file {{full_path}}
|
1962 |
files_content[file_path_str] = f"# Error reading file: {{e}}"
|
1963 |
if loaded_count == 0 and context_result["files"]:
|
1964 |
files_content["error_reading_files.txt"] = "# Context computation found file references, but failed to read any file content."
|
@@ -1972,7 +1972,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1972 |
if not files_content:
|
1973 |
files_content["placeholder.txt"] = f"# No relevant files found or context failed to load for issue {{selected_id}}."
|
1974 |
manager.code_editors[selected_id] = OTCodeEditor(initial_value=files_content)
|
1975 |
-
logger.info(f"Initialized/Updated OT editor state for issue {{selected_id}}
|
1976 |
|
1977 |
updates = {{
|
1978 |
"selected_issue_id_state": gr.update(value=selected_id),
|
@@ -1992,7 +1992,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
1992 |
}
|
1993 |
|
1994 |
# --- Gradio Blocks ---
|
1995 |
-
with gr.Blocks(theme=theme, title="AI Issue Resolver Pro", css="#collab-list .collab-item {{ margin-bottom: 4px; font-size: 0.9em; } .gradio-container {{ max-width: 1600px !important; }") as demo_app:
|
1996 |
gr.Markdown("""
|
1997 |
<div style="text-align: center; margin-bottom: 20px;">
|
1998 |
<h1 style="color: #6d28d9; font-weight: 800;">🚀 AI Issue Resolver Pro</h1>
|
@@ -2179,7 +2179,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2179 |
if (hostname === 'localhost' || hostname === '127.0.0.1' || hostname.endsWith('.gradio.live')) {{
|
2180 |
wsUrl = `${{protocol}}//${{hostname}}:${{wsPort}}`;
|
2181 |
console.log('Detected local/gradio.live environment, using direct WebSocket URL:', wsUrl);
|
2182 |
-
}}
|
2183 |
const wsPath = '/ws';
|
2184 |
wsUrl = `${{protocol}}//${{window.location.host}}${{wsPath}}`;
|
2185 |
console.log('Detected non-local environment, assuming proxied WebSocket URL:', wsUrl);
|
@@ -2209,7 +2209,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2209 |
<span style="color: #555;">${{info.status || 'Idle'}}</span>
|
2210 |
</div>`)
|
2211 |
.join('');
|
2212 |
-
}}
|
2213 |
collabListDiv.innerHTML = '<span style="color: #6b7280;">You are the only active user.</span>';
|
2214 |
}}
|
2215 |
}}
|
@@ -2227,11 +2227,11 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2227 |
console.log(`WebSocket already ${{(collabWs.readyState === WebSocket.OPEN) ? 'open' : 'connecting'}}. State: ${{collabWs.readyState}}`);
|
2228 |
return;
|
2229 |
}}
|
2230 |
-
console.log(`Attempting WebSocket connection to ${{wsUrl}}
|
2231 |
updateStatusBar(`Connecting collaboration service (Attempt ${{reconnectAttempts + 1}})...`);
|
2232 |
try {{
|
2233 |
collabWs = new WebSocket(wsUrl);
|
2234 |
-
}}
|
2235 |
console.error("WebSocket constructor failed:", e);
|
2236 |
updateStatusBar("Collaboration connection failed (init error).", true);
|
2237 |
// Handle reconnection attempt here as well
|
@@ -2252,17 +2252,17 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2252 |
const data = JSON.parse(event.data);
|
2253 |
if (data.type === 'collaboration_status') {{
|
2254 |
updateCollabList(data.collaborators);
|
2255 |
-
}}
|
2256 |
const receivedIssueNum = parseInt(data.issue_num, 10);
|
2257 |
if (aceEditorInstance && receivedIssueNum === currentIssueId && data.senderId !== clientId) {{
|
2258 |
-
console.warn(`Applying remote delta for issue ${{receivedIssueNum}}
|
2259 |
try {{
|
2260 |
const delta = JSON.parse(data.delta);
|
2261 |
// Add ignore flag for local change listener
|
2262 |
aceEditorInstance.getSession().getDocument().applyDeltas([{{...delta, ignore: true}}]);
|
2263 |
-
}}
|
2264 |
}}
|
2265 |
-
}}
|
2266 |
console.log('Received notification: Issue list updated on server.');
|
2267 |
updateStatusBar('Issue list updated on server. Refreshing the page or re-scanning is recommended.');
|
2268 |
const crawlButton = document.getElementById('crawl_btn');
|
@@ -2270,10 +2270,10 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2270 |
crawlButton.style.backgroundColor = '#fef08a';
|
2271 |
setTimeout(() => {{ crawlButton.style.backgroundColor = '' }}, 2000);
|
2272 |
}}
|
2273 |
-
}}
|
2274 |
console.warn("Received unknown WebSocket message type:", data.type, data);
|
2275 |
}}
|
2276 |
-
}}
|
2277 |
console.error('Failed to parse WebSocket message or update UI:', e, event.data);
|
2278 |
}}
|
2279 |
}};
|
@@ -2296,10 +2296,10 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2296 |
editorChangeListenerAttached = false; // Allow re-attaching on reconnect
|
2297 |
if (reconnectAttempts < maxReconnectAttempts) {{
|
2298 |
const delay = Math.pow(2, reconnectAttempts) * 1500 + Math.random() * 1000;
|
2299 |
-
console.log(`Attempting to reconnect WebSocket in approx. ${{Math.round(delay / 1000)}}
|
2300 |
setTimeout(connectWebSocket, delay);
|
2301 |
reconnectAttempts++;
|
2302 |
-
}}
|
2303 |
console.error('Max WebSocket reconnection attempts reached.');
|
2304 |
updateStatusBar('Collaboration failed - Max reconnect attempts reached.', true);
|
2305 |
}}
|
@@ -2308,10 +2308,10 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2308 |
if (collabWs && collabWs.readyState === WebSocket.OPEN) {{
|
2309 |
try {{
|
2310 |
collabWs.send(JSON.stringify(message));
|
2311 |
-
}}
|
2312 |
console.error("Failed to stringify or send WebSocket message:", e, message);
|
2313 |
}}
|
2314 |
-
}}
|
2315 |
console.warn('WebSocket not connected. Cannot send message:', message);
|
2316 |
}}
|
2317 |
}}
|
@@ -2325,7 +2325,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2325 |
console.warn(`Ace library or editor element not found yet. Retrying editor setup (${{editorSetupAttempts + 1}}/${{maxEditorSetupAttempts}})...`);
|
2326 |
editorSetupAttempts++;
|
2327 |
setTimeout(setupCodeEditorListener, 1500 + Math.random() * 500);
|
2328 |
-
}}
|
2329 |
console.error("Ace library or editor element not found after multiple attempts. Code editor collaboration disabled.");
|
2330 |
updateStatusBar("Code editor library or UI element failed to load.", true);
|
2331 |
}}
|
@@ -2347,7 +2347,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2347 |
console.log("Attaching Ace editor 'change' listener...");
|
2348 |
aceEditorInstance.getSession().on('change', function(delta) {{
|
2349 |
// Check for the custom 'ignore' flag added when applying remote deltas
|
2350 |
-
if (delta.ignore) {{ return; }}
|
2351 |
if (currentIssueId !== null) {{
|
2352 |
const now = Date.now();
|
2353 |
// Simple debounce to avoid flooding server
|
@@ -2378,7 +2378,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2378 |
}}
|
2379 |
editorSetupAttempts = 0; // Reset attempts on success
|
2380 |
updateTrackedIssueId(); // Ensure correct issue ID is tracked after setup
|
2381 |
-
}}
|
2382 |
console.error('Failed to initialize Ace editor instance or attach listeners:', e);
|
2383 |
if (editorElement) delete editorElement.dataset.collabInitialized; // Allow retry
|
2384 |
aceEditorInstance = null;
|
@@ -2387,7 +2387,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2387 |
console.warn(`Retrying editor setup after error (${{editorSetupAttempts + 1}}/${{maxEditorSetupAttempts}})...`);
|
2388 |
editorSetupAttempts++;
|
2389 |
setTimeout(setupCodeEditorListener, 2000 + Math.random() * 500);
|
2390 |
-
}}
|
2391 |
console.error("Max editor setup attempts failed after error. Collaboration disabled.");
|
2392 |
updateStatusBar("Code editor setup failed repeatedly.", true);
|
2393 |
}}
|
@@ -2418,13 +2418,13 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2418 |
newIssueId = parseInt(value, 10);
|
2419 |
// Found a potential ID, stop searching
|
2420 |
break;
|
2421 |
-
}}
|
2422 |
console.debug("Could not parse hidden input value as int:", value, e);
|
2423 |
}}
|
2424 |
}}
|
2425 |
}}
|
2426 |
if (newIssueId !== currentIssueId) {{
|
2427 |
-
console.log(`Updating tracked issue ID: from ${{currentIssueId}}
|
2428 |
currentIssueId = newIssueId;
|
2429 |
const status = currentIssueId !== null ? `Viewing Issue #${{currentIssueId}}` : 'Idle';
|
2430 |
sendWsMessage({{ type: 'status_update', clientId: clientId, status: status}});
|
@@ -2480,7 +2480,7 @@ def create_ui(manager: IssueManager) -> gr.Blocks:
|
|
2480 |
if (observerTargetNode) {{
|
2481 |
console.log("Starting MutationObserver to detect Gradio UI changes (incl. hidden state).");
|
2482 |
observer.observe(observerTargetNode, observerConfig);
|
2483 |
-
}}
|
2484 |
console.error("Could not find observer target node (document.body).");
|
2485 |
}}
|
2486 |
// Initial setup attempts
|
@@ -2500,7 +2500,7 @@ async def handle_ws_connection(websocket: WebSocketServerProtocol, path: str, ma
|
|
2500 |
client_id = f"client_{{hashlib.sha1(os.urandom(16)).hexdigest()[:8]}}"
|
2501 |
setattr(websocket, 'client_id', client_id)
|
2502 |
remote_addr = websocket.remote_address
|
2503 |
-
logger.info(f"WebSocket client connected: {{remote_addr}}
|
2504 |
|
2505 |
manager.ws_clients.append(websocket)
|
2506 |
logger.info(f"Client list size: {{len(manager.ws_clients)}}")
|
@@ -2512,12 +2512,12 @@ async def handle_ws_connection(websocket: WebSocketServerProtocol, path: str, ma
|
|
2512 |
msg_type = data.get("type")
|
2513 |
sender_id = client_id
|
2514 |
|
2515 |
-
logger.debug(f"Received WS message type '{{msg_type}' from {{sender_id} ({{remote_addr})")
|
2516 |
|
2517 |
if msg_type == "join":
|
2518 |
client_name = data.get("name", f"User_{{sender_id[:4]}}")
|
2519 |
manager.collaborators[sender_id] = {{"name": client_name, "status": "Connected"}
|
2520 |
-
logger.info(f"Client {{sender_id}}
|
2521 |
await manager.broadcast_collaboration_status_once()
|
2522 |
|
2523 |
elif msg_type == "code_update":
|
@@ -2534,25 +2534,25 @@ async def handle_ws_connection(websocket: WebSocketServerProtocol, path: str, ma
|
|
2534 |
manager.collaborators[sender_id]["status"] = status
|
2535 |
await manager.broadcast_collaboration_status_once()
|
2536 |
else:
|
2537 |
-
logger.warning(f"Received status update from client {{sender_id}}
|
2538 |
-
manager.collaborators[sender_id] = {{"name": f"User_{{sender_id[:4]}}
|
2539 |
await manager.broadcast_collaboration_status_once()
|
2540 |
|
2541 |
else:
|
2542 |
-
logger.warning(f"Unknown WebSocket message type '{{msg_type}' received from {{sender_id} ({{remote_addr}). Message: {{str(message)[:200]}")
|
2543 |
|
2544 |
except json.JSONDecodeError:
|
2545 |
-
logger.error(f"Received invalid JSON over WebSocket from {{sender_id}}
|
2546 |
except Exception as e:
|
2547 |
-
logger.exception(f"Error processing WebSocket message from {{sender_id}}
|
2548 |
|
2549 |
# Catch standard socket exceptions for disconnects
|
2550 |
except (ConnectionClosed, ConnectionClosedOK, ConnectionAbortedError, ConnectionResetError) as e:
|
2551 |
-
logger.info(f"WebSocket client {{client_id} ({{remote_addr}) disconnected: Code={{getattr(e, 'code', 'N/A')}, Reason='{{getattr(e, 'reason', 'N/A')}'")
|
2552 |
except Exception as e:
|
2553 |
-
logger.exception(f"Unexpected error in WebSocket handler for {{client_id}}
|
2554 |
finally:
|
2555 |
-
logger.info(f"Cleaning up connection for client {{client_id}}
|
2556 |
manager.remove_ws_client(websocket)
|
2557 |
|
2558 |
|
@@ -2583,7 +2583,7 @@ async def start_websocket_server(manager: IssueManager, port: int):
|
|
2583 |
|
2584 |
except OSError as e:
|
2585 |
logger.error(f"Failed to start WebSocket server on port {{port}}: {{e}}. Is the port already in use?")
|
2586 |
-
raise SystemExit(f"WebSocket Port {{port}}
|
2587 |
except asyncio.CancelledError:
|
2588 |
logger.info("WebSocket server task cancelled.")
|
2589 |
except Exception as e:
|
|
|
137 |
|
138 |
event = self.headers.get('X-GitHub-Event')
|
139 |
delivery_id = self.headers.get('X-GitHub-Delivery')
|
140 |
+
logger.info(f"Received GitHub webhook event: {{}event}} (Delivery ID: {{delivery_id}})")
|
141 |
|
142 |
if event == 'issues' and WebhookHandler.manager_instance and WebhookHandler.main_loop:
|
143 |
action = payload.get('action')
|
|
|
158 |
elif event == 'ping':
|
159 |
logger.info("Received GitHub webhook ping.")
|
160 |
else:
|
161 |
+
logger.warning(f"Unhandled event type: {{event}} or manager/loop not initialized.")
|
162 |
|
163 |
self.send_response(200)
|
164 |
self.send_header("Content-type", "text/plain")
|
|
|
248 |
break
|
249 |
|
250 |
if not found_issue:
|
251 |
+
logger.error(f"Could not find issue data for hash {{issue_hash}} in current state. Suggestion might be based on outdated info if generated.")
|
252 |
return "Error: Issue data for this suggestion request (hash) not found in current state. The issue might have been updated or closed. Please re-select the issue."
|
253 |
|
254 |
if model_key not in HF_MODELS:
|
|
|
282 |
significant_change = False # Flag for changes affecting clustering/content/AI caches
|
283 |
|
284 |
if action == 'closed':
|
285 |
+
logger.info(f"Webhook: Removing closed issue {{issue_number}} from active list.")
|
286 |
if self.issues.pop(issue_number, None):
|
287 |
needs_ui_update = True
|
288 |
significant_change = True
|
|
|
297 |
self.code_editors.pop(issue_number, None)
|
298 |
|
299 |
elif action in ['opened', 'reopened', 'edited', 'assigned', 'unassigned', 'labeled', 'unlabeled', 'milestoned', 'demilestoned']:
|
300 |
+
logger.info(f"Webhook: Adding/Updating issue {{issue_number}} (action: {{action}}).")
|
301 |
processed_data = self._process_issue_data(issue_data)
|
302 |
|
303 |
old_issue = self.issues.get(issue_number)
|
|
|
306 |
old_issue.get('title') != processed_data.get('title') or \
|
307 |
set(old_issue.get('labels', [])) != set(processed_data.get('labels', [])):
|
308 |
significant_change = True
|
309 |
+
logger.info(f"Significant change detected for issue {{issue_number}} (content/labels).")
|
310 |
# Invalidate ALL precomputed AI state on significant edit
|
311 |
self.precomputed_context.pop(issue_number, None)
|
312 |
self.precomputed_summaries.pop(issue_number, None)
|
|
|
318 |
old_issue.get('updated_at') != processed_data.get('updated_at') or \
|
319 |
old_issue.get('assignee') != processed_data.get('assignee') or \
|
320 |
set(old_issue.get('labels', [])) != set(processed_data.get('labels', [])):
|
321 |
+
logger.debug(f"State-related change detected for issue {{issue_number}} (update time, assignee, labels). Idle loop will re-evaluate.")
|
322 |
|
323 |
self.issues[issue_number] = processed_data
|
324 |
needs_ui_update = True
|
325 |
else:
|
326 |
+
logger.info(f"Ignoring webhook action '{{action}}' for issue {{issue_number}} (already filtered).")
|
327 |
|
328 |
# --- Track changes for idle processing ---
|
329 |
if needs_ui_update:
|
|
|
351 |
"body": issue_data.get('body', ''),
|
352 |
"state": issue_data.get('state', 'unknown'),
|
353 |
"labels": sorted([label['name'] for label in issue_data.get('labels', [])]),
|
354 |
+
"assignee": issue_data.get('assignee', {}).get('login') if issue_data.get('assignee') else None,
|
355 |
"url": issue_data.get('html_url', '#'),
|
356 |
"created_at": issue_data.get('created_at'),
|
357 |
"updated_at": issue_data.get('updated_at'),
|
|
|
417 |
remote_url = next((r.url for r in self.repo.remotes.origin.urls), None)
|
418 |
expected_urls = [self.repo_url, self.repo_url + ".git"]
|
419 |
if remote_url not in expected_urls:
|
420 |
+
logger.warning(f"Existing repo path {{self.repo_local_path}} has different remote URL ('{{remote_url}' vs '{{self.repo_url}'). Re-cloning.")
|
421 |
shutil.rmtree(self.repo_local_path)
|
422 |
self.repo = Repo.clone_from(self.repo_url, self.repo_local_path, progress=lambda op, cur, tot, msg: logger.debug(f"Clone progress: {{msg}}"))
|
423 |
else:
|
|
|
428 |
logger.info("Repository is shallow, unshallowing...")
|
429 |
self.repo.git.fetch('--unshallow')
|
430 |
else:
|
431 |
+
logger.warning(f"Existing repo at {{self.repo_local_path}} has no remotes defined. Re-cloning.")
|
432 |
shutil.rmtree(self.repo_local_path)
|
433 |
self.repo = Repo.clone_from(self.repo_url, self.repo_local_path, progress=lambda op, cur, tot, msg: logger.debug(f"Clone progress: {{msg}}"))
|
434 |
|
|
|
442 |
try: self.repo = Repo(self.repo_local_path)
|
443 |
except Exception: logger.error("Failed to even load existing repo after pull error.")
|
444 |
else:
|
445 |
+
logger.info(f"Cloning repository {{self.repo_url}} to {{self.repo_local_path}}")
|
446 |
self.repo = Repo.clone_from(self.repo_url, self.repo_local_path, progress=lambda op, cur, tot, msg: logger.debug(f"Clone progress: {{msg}}"))
|
447 |
|
448 |
logger.info("Repository clone/update process finished.")
|
|
|
482 |
issues_page_data = await response.json()
|
483 |
if not issues_page_data: break
|
484 |
|
485 |
+
logger.info(f"Fetched page {{page}} with {{len(issues_page_data)}} items.")
|
486 |
all_issues_data.extend(issues_page_data)
|
487 |
|
488 |
link_header = response.headers.get('Link')
|
|
|
499 |
if 'pull_request' not in issue_data
|
500 |
}
|
501 |
|
502 |
+
logger.info(f"Filtered out pull requests, {{len(self.issues)}} actual open issues remaining.")
|
503 |
|
504 |
empty_fig = go.Figure()
|
505 |
empty_fig.update_layout(title="Issue Severity Distribution", xaxis={{"visible": False}, yaxis={{"visible": False},
|
|
|
532 |
if 0 <= index < len(self.issue_list_for_clustering):
|
533 |
index_to_cluster_id[index] = cluster_id
|
534 |
else:
|
535 |
+
logger.warning(f"Clustering returned invalid index {{index}} for list of length {{len(self.issue_list_for_clustering)}}")
|
536 |
|
537 |
for i, issue in enumerate(self.issue_list_for_clustering):
|
538 |
severity = self._determine_severity(issue['labels'])
|
|
|
552 |
self.start_broadcast_loop()
|
553 |
self.start_idle_processing()
|
554 |
|
555 |
+
success_msg = f"Found {{len(self.issues)}} open issues. Clustered into {{len(self.issue_clusters)}} groups. Repo ready. Background analysis started."
|
556 |
logger.info(success_msg)
|
557 |
# Return both plots
|
558 |
return dataframe_data, stats_fig, success_msg, stats_fig # Mypy may complain about return type mismatch if not explicitly handled
|
559 |
|
560 |
except aiohttp.ClientResponseError as e:
|
561 |
logger.error(f"GitHub API request failed: Status={{e.status}, Message='{{e.message}', URL='{{e.request_info.url}'")
|
562 |
+
error_msg = f"Error fetching issues: {{e.status}} - {{e.message}}. Check token/URL."
|
563 |
if e.status == 404: error_msg = f"Error: Repository not found at {{self.repo_url}}."
|
564 |
elif e.status == 401: error_msg = "Error: Invalid GitHub token or insufficient permissions for this repository."
|
565 |
elif e.status == 403:
|
|
|
628 |
return
|
629 |
|
630 |
num_issues = len(self.issue_list_for_clustering)
|
631 |
+
logger.info(f"Generating embeddings for {{num_issues}} issues for clustering...")
|
632 |
try:
|
633 |
texts_to_embed = [
|
634 |
+
f"Title: {{i.get('title','')}} Body: {{i.get('body','')[:1500]}"
|
635 |
for i in self.issue_list_for_clustering
|
636 |
]
|
637 |
embeddings = await self._generate_embeddings(texts_to_embed)
|
638 |
|
639 |
if embeddings is None or not isinstance(embeddings, list) or len(embeddings) != num_issues:
|
640 |
+
logger.error(f"Failed to generate valid embeddings for clustering. Expected {{num_issues}, got {{type(embeddings)}} len {{len(embeddings) if embeddings else 'N/A'}.")
|
641 |
self.issue_clusters = {{}
|
642 |
return
|
643 |
|
644 |
+
logger.info(f"Generated {{len(embeddings)}} embeddings. Running HDBSCAN clustering...")
|
645 |
clusterer = HDBSCAN(min_cluster_size=2, metric='cosine', allow_single_cluster=True, gen_min_span_tree=True)
|
646 |
clusters = clusterer.fit_predict(embeddings)
|
647 |
|
|
|
657 |
new_issue_clusters[cluster_id_int].append(i)
|
658 |
|
659 |
self.issue_clusters = new_issue_clusters
|
660 |
+
logger.info(f"Clustering complete. Found {{len(self.issue_clusters)}} clusters (min size 2) with {{noise_count}} noise points.")
|
661 |
|
662 |
# Reset the change counter and flag after successful clustering
|
663 |
self._webhook_change_count = 0
|
|
|
680 |
for i, issue in enumerate(self.issue_list_for_clustering):
|
681 |
issue_id = issue.get('id')
|
682 |
if issue_id is None:
|
683 |
+
logger.warning(f"Issue at index {{i}} in clustering list is missing an ID.")
|
684 |
continue
|
685 |
index_to_id[i] = issue_id
|
686 |
except Exception as e:
|
|
|
694 |
for issue_id in cluster_issue_ids:
|
695 |
self.potential_duplicates[issue_id] = [other_id for other_id in cluster_issue_ids if other_id != issue_id]
|
696 |
|
697 |
+
logger.info(f"Identified potential duplicates for {{len(self.potential_duplicates)}} issues based on clustering.")
|
698 |
|
699 |
async def _generate_embeddings(self, texts: List[str]):
|
700 |
"""Generates sentence embeddings using Hugging Face Inference API."""
|
|
|
710 |
headers = {{"Authorization": f"Bearer {{self.hf_token}}"}
|
711 |
timeout = aiohttp.ClientTimeout(total=180)
|
712 |
|
713 |
+
logger.info(f"Requesting embeddings from {{api_url}} for {{len(texts)}} texts.")
|
714 |
async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:
|
715 |
try:
|
716 |
payload = {{"inputs": texts, "options": {{"wait_for_model": True}}
|
|
|
722 |
|
723 |
if isinstance(result, list) and all(isinstance(emb, list) and all(isinstance(f, float) for f in emb) for emb in result):
|
724 |
if len(result) == len(texts):
|
725 |
+
logger.info(f"Successfully received {{len(result)}} embeddings of expected dimension.")
|
726 |
return result
|
727 |
else:
|
728 |
logger.error(f"HF Embedding API returned wrong number of embeddings: Got {{len(result)}}, expected {{len(texts)}}.")
|
|
|
740 |
logger.error(f"HF Inference API embedding request failed: Status={{e.status}, Message='{{e.message}'. Body: {{error_body[:500]}")
|
741 |
return None
|
742 |
except asyncio.TimeoutError:
|
743 |
+
logger.error(f"HF Inference API embedding request timed out after {{timeout.total}} seconds.")
|
744 |
return None
|
745 |
except Exception as e:
|
746 |
logger.exception(f"Unexpected error during embedding generation: {{e}}")
|
|
|
749 |
async def generate_code_patch(self, issue_number: int, model_key: str) -> dict:
|
750 |
"""Generates a code patch suggestion using a selected AI model."""
|
751 |
if issue_number not in self.issues:
|
752 |
+
return {{"error": f"Issue {{issue_number}} not found."}
|
753 |
if not self.hf_token:
|
754 |
return {{"error": "Hugging Face token not set."}
|
755 |
if model_key not in HF_MODELS:
|
|
|
759 |
|
760 |
issue = self.issues[issue_number]
|
761 |
model_id = HF_MODELS[model_key]
|
762 |
+
logger.info(f"Generating patch for issue {{issue_number}} ('{{issue.get('title', 'N/A')[:50]}...') using model {{model_id}")
|
763 |
|
764 |
# --- Context Gathering ---
|
765 |
context_str = "Context gathering failed or not available."
|
|
|
774 |
elif context_data.get("content"):
|
775 |
context_str = context_data["content"]
|
776 |
num_files = len(context_data.get('files',[]))
|
777 |
+
context_source = f"Pre-computed ({{num_files}} files @ {{timestamp_str}})"
|
778 |
else:
|
779 |
context_str = "Pre-computed context was empty or unavailable."
|
780 |
context_source = f"Pre-computed (Empty @ {{timestamp_str}})"
|
781 |
+
logger.info(f"Using pre-computed context for issue {{issue_number}} (Source: {{context_source}})")
|
782 |
else:
|
783 |
logger.info(f"No pre-computed context found for issue {{issue_number}}, computing now.")
|
784 |
context_source = "Computed On-Demand"
|
|
|
788 |
context_source += " (Error)"
|
789 |
else:
|
790 |
context_str = context_result.get("content", "No specific context found.")
|
791 |
+
context_source += f" ({{len(context_result.get('files',[]))}} files)"
|
792 |
self.precomputed_context[issue_number] = {{
|
793 |
"content": context_str,
|
794 |
"files": context_result.get("files", []),
|
|
|
849 |
|
850 |
if result and isinstance(result, list) and 'generated_text' in result[0]:
|
851 |
generated_text = result[0].get('generated_text', '').strip()
|
852 |
+
logger.info(f"Received patch suggestion from {{model_id}} ({{len(generated_text)}} chars).")
|
853 |
|
854 |
diff_match = re.search(r"```diff\n(.*?)```", generated_text, re.DOTALL | re.IGNORECASE)
|
855 |
explanation = generated_text.split("```diff")[0].strip() if diff_match else generated_text
|
|
|
858 |
if diff_match:
|
859 |
patch_content = diff_match.group(1).strip()
|
860 |
if not re.search(r'^(--- |\+\+\+ |@@ )', patch_content, re.MULTILINE):
|
861 |
+
logger.warning(f"Generated patch for issue {{issue_number}} might lack standard diff headers or spacing.")
|
862 |
return {{"explanation": explanation, "patch": patch_content, "model_used": model_id}
|
863 |
else:
|
864 |
if re.search(r"(insufficient context|cannot generate|unable to create patch|context required)", explanation, re.IGNORECASE):
|
|
|
874 |
logger.error(f"HF Inference API patch error for issue {{issue_number}}: {{error_msg}}" + (f" (Est: {{estimated_time}}s)" if estimated_time else ""))
|
875 |
return {{"error": f"AI model error: {{error_msg}}"}
|
876 |
else:
|
877 |
+
logger.error(f"Unexpected patch response format from {{model_id}} for issue {{issue_number}}: {{str(result)[:500]}}")
|
878 |
return {{"error": "Unexpected response format from AI model."}
|
879 |
except aiohttp.ClientResponseError as e:
|
880 |
error_body = await e.response.text()
|
|
|
919 |
if not potential_files:
|
920 |
return {{"content": "No file paths matching common patterns found in the issue title or body.", "files": [], "error": None}
|
921 |
|
922 |
+
logger.info(f"Found {{len(potential_files)}} potential file references in issue {{issue_id}}: {{potential_files}}")
|
923 |
context_content = ""
|
924 |
max_context_length = 6000
|
925 |
files_included = []
|
|
|
942 |
context_content += content_snippet
|
943 |
files_included.append(str(relative_path))
|
944 |
else:
|
945 |
+
logger.warning(f"Skipping file {{relative_path}} for context in issue {{issue_id}} due to total length limit ({{max_context_length}} chars).")
|
946 |
files_skipped_length.append(str(relative_path))
|
947 |
except OSError as e:
|
948 |
+
logger.warning(f"Could not read file {{full_path}} for issue {{issue_id}}: {{e}}")
|
949 |
files_read_error.append(str(relative_path))
|
950 |
except Exception as e:
|
951 |
+
logger.warning(f"Unexpected error reading file {{full_path}} for issue {{issue_id}}: {{e}}")
|
952 |
files_read_error.append(str(relative_path))
|
953 |
else:
|
954 |
logger.info(f"Potential path '{{relative_path}' (from '{{file_path_str}') not found or not a file in local repo for issue {{issue_id}.")
|
|
|
958 |
error_status = None
|
959 |
if files_included:
|
960 |
final_content = context_content.strip()
|
961 |
+
logger.info(f"Included context from {{len(files_included)}} files for issue {{issue_id}}: {{files_included}}")
|
962 |
else:
|
963 |
final_content = "No content could be retrieved from the potential file paths found."
|
964 |
+
logger.warning(f"Context generation for issue {{issue_id}} resulted in no included files.")
|
965 |
if potential_files: # If paths were found but none included
|
966 |
error_status = "No readable or found files among potential paths."
|
967 |
|
|
|
994 |
|
995 |
model_id = HF_MODELS[model_key]
|
996 |
issue_id = issue.get('id','N/A')
|
997 |
+
logger.info(f"Requesting resolution suggestion for issue {{issue_id}} ('{{issue.get('title', 'N/A')[:50]}...') using {{model_id}")
|
998 |
|
999 |
# --- Get Pre-computed Info ---
|
1000 |
summary_text = self._get_precomputed_text(issue_id, self.precomputed_summaries, "summary", "Summary")
|
|
|
1051 |
|
1052 |
if result and isinstance(result, list) and 'generated_text' in result[0]:
|
1053 |
suggestion = result[0].get('generated_text', 'AI Error: No suggestion text generated.').strip()
|
1054 |
+
logger.info(f"Received suggestion from {{model_id}} for issue {{issue_id}} ({{len(suggestion)}} chars).")
|
1055 |
return suggestion
|
1056 |
elif isinstance(result, dict) and 'error' in result:
|
1057 |
error_msg = result['error']
|
|
|
1059 |
logger.error(f"HF Inference API suggestion error for issue {{issue_id}}: {{error_msg}}" + (f" (Est: {{estimated_time}}s)" if estimated_time else ""))
|
1060 |
return f"Error: AI model returned an error: {{error_msg}}"
|
1061 |
else:
|
1062 |
+
logger.error(f"Unexpected suggestion response format from {{model_id}} for issue {{issue_id}}: {{str(result)[:500]}}")
|
1063 |
return "Error: Received unexpected response format from AI model."
|
1064 |
except aiohttp.ClientResponseError as e:
|
1065 |
error_body = await e.response.text()
|
|
|
1080 |
is_recent = time.time() - timestamp < self.idle_processing_interval * 2
|
1081 |
|
1082 |
if entry.get("error"):
|
1083 |
+
return f"{{name}} Error (at {{datetime.fromtimestamp(timestamp).strftime('%H:%M:%S')}): {{entry['error']}"
|
1084 |
elif entry.get(key) is not None: # Check key exists and is not None
|
1085 |
return entry[key]
|
1086 |
else: # No error, but key might be missing or None
|
1087 |
if is_recent:
|
1088 |
+
return f"({{name}} computation pending...)"
|
1089 |
else:
|
1090 |
+
return f"({{name}} not computed or result was empty)"
|
1091 |
else:
|
1092 |
+
return f"({{name}} not computed yet)"
|
1093 |
|
1094 |
def _get_duplicate_info_text(self, issue_id: int) -> str:
|
1095 |
"""Formats duplicate info text."""
|
|
|
1120 |
try:
|
1121 |
tasks.append(client.send(status_payload))
|
1122 |
except (ConnectionClosed, ConnectionAbortedError, ConnectionResetError) as e:
|
1123 |
+
logger.warning(f"Client {{getattr(client, 'client_id', client.remote_address)}} seems disconnected before send: {{e}. Marking for removal.")
|
1124 |
disconnected_clients.append(client)
|
1125 |
except Exception as e:
|
1126 |
logger.error(f"Unexpected error preparing send to client {{getattr(client, 'client_id', client.remote_address)}: {{e}. Marking for removal.")
|
|
|
1140 |
|
1141 |
if disconnected_clients:
|
1142 |
unique_disconnected = list(set(disconnected_clients))
|
1143 |
+
logger.info(f"Removing {{len(unique_disconnected)}} disconnected clients after broadcast.")
|
1144 |
for client in unique_disconnected:
|
1145 |
self.remove_ws_client(client)
|
1146 |
|
|
|
1160 |
logger.warning(f"Received code update for non-existent editor instance for issue {{issue_num}}. Ignoring.")
|
1161 |
return
|
1162 |
if issue_num not in self.issues:
|
1163 |
+
logger.warning(f"Received code update for non-existent issue {{issue_num}} in manager. Ignoring.")
|
1164 |
return
|
1165 |
|
1166 |
+
logger.warning(f"Handling code editor update for issue {{issue_num}} from {{sender_client_id}}. "
|
1167 |
"WARNING: NO OT IMPLEMENTED - Last write wins / potential conflicts.")
|
1168 |
|
1169 |
try:
|
1170 |
delta_obj = json.loads(delta_str)
|
1171 |
self.code_editors[issue_num].apply_delta(delta_obj)
|
1172 |
+
logger.info(f"Applied delta for issue {{issue_num}} from client {{sender_client_id}} (Placeholder OT Logic - Revision {{self.code_editors[issue_num].revision}})")
|
1173 |
|
1174 |
update_payload = json.dumps({{
|
1175 |
"type": "code_update",
|
|
|
1192 |
try:
|
1193 |
tasks.append(client.send(update_payload))
|
1194 |
except (ConnectionClosed, ConnectionAbortedError, ConnectionResetError) as e:
|
1195 |
+
logger.warning(f"Client {{getattr(client, 'client_id', client.remote_address)}} seems disconnected before send: {{e}. Marking for removal.")
|
1196 |
disconnected_clients.append(client)
|
1197 |
except Exception as e:
|
1198 |
logger.error(f"Unexpected error preparing send to client {{getattr(client, 'client_id', client.remote_address)}: {{e}. Marking for removal.")
|
|
|
1200 |
|
1201 |
|
1202 |
if tasks:
|
1203 |
+
logger.debug(f"Broadcasting code update for issue {{issue_num}} to {{len(tasks)}} other clients.")
|
1204 |
results = await asyncio.gather(*tasks, return_exceptions=True)
|
1205 |
for i, result in enumerate(results):
|
1206 |
if isinstance(result, Exception):
|
|
|
1213 |
|
1214 |
if disconnected_clients:
|
1215 |
unique_disconnected = list(set(disconnected_clients))
|
1216 |
+
logger.info(f"Removing {{len(unique_disconnected)}} clients after code update broadcast failure.")
|
1217 |
for client in unique_disconnected:
|
1218 |
if client: self.remove_ws_client(client)
|
1219 |
|
1220 |
except json.JSONDecodeError:
|
1221 |
+
logger.error(f"Received invalid JSON delta for issue {{issue_num}} from {{sender_client_id}}: {{delta_str[:200]}}")
|
1222 |
except Exception as e:
|
1223 |
+
logger.exception(f"Error handling code editor update for issue {{issue_num}} from {{sender_client_id}}: {{e}}")
|
1224 |
|
1225 |
async def broadcast_issue_update(self):
|
1226 |
"""Notifies clients that the issue list/data has changed (e.g., due to webhook)."""
|
|
|
1240 |
try:
|
1241 |
tasks.append(client.send(update_payload))
|
1242 |
except (ConnectionClosed, ConnectionAbortedError, ConnectionResetError) as e:
|
1243 |
+
logger.warning(f"Client {{getattr(client, 'client_id', client.remote_address)}} seems disconnected before send: {{e}. Marking for removal.")
|
1244 |
disconnected_clients.append(client)
|
1245 |
except Exception as e:
|
1246 |
logger.error(f"Unexpected error preparing send to client {{getattr(client, 'client_id', client.remote_address)}: {{e}. Marking for removal.")
|
|
|
1260 |
|
1261 |
if disconnected_clients:
|
1262 |
unique_disconnected = list(set(disconnected_clients))
|
1263 |
+
logger.info(f"Removing {{len(unique_disconnected)}} clients after issue update broadcast.")
|
1264 |
for client in unique_disconnected:
|
1265 |
self.remove_ws_client(client)
|
1266 |
|
|
|
1268 |
"""Safely removes a client from the list and collaborator dict."""
|
1269 |
client_id = getattr(client_to_remove, 'client_id', None)
|
1270 |
client_addr = client_to_remove.remote_address
|
1271 |
+
client_desc = f"{{client_id or 'Unknown ID'}} ({{client_addr})"
|
1272 |
removed_from_list = False
|
1273 |
removed_from_collab = False
|
1274 |
|
1275 |
try:
|
1276 |
self.ws_clients.remove(client_to_remove)
|
1277 |
removed_from_list = True
|
1278 |
+
logger.info(f"Removed WebSocket client from list: {{client_desc}} (Remaining: {{len(self.ws_clients)}})")
|
1279 |
except ValueError:
|
1280 |
+
logger.debug(f"Client {{client_desc}} already removed from list or not found.")
|
1281 |
pass
|
1282 |
|
1283 |
if client_id and client_id in self.collaborators:
|
|
|
1302 |
try:
|
1303 |
tasks.append(client.send(status_payload))
|
1304 |
except (ConnectionClosed, ConnectionAbortedError, ConnectionResetError) as e:
|
1305 |
+
logger.warning(f"Client {{getattr(client, 'client_id', client.remote_address)}} seems disconnected before send: {{e}. Marking for removal.")
|
1306 |
disconnected_clients.append(client)
|
1307 |
except Exception as e:
|
1308 |
logger.error(f"Unexpected error preparing send to client {{getattr(client, 'client_id', client.remote_address)}: {{e}. Marking for removal.")
|
|
|
1324 |
|
1325 |
if disconnected_clients:
|
1326 |
unique_disconnected = list(set(disconnected_clients))
|
1327 |
+
logger.info(f"Removing {{len(unique_disconnected)}} clients found disconnected during single broadcast.")
|
1328 |
for client in unique_disconnected:
|
1329 |
self.remove_ws_client(client)
|
1330 |
|
|
|
1343 |
self.stale_issues.append(issue_id)
|
1344 |
except (ValueError, TypeError) as e:
|
1345 |
logger.warning(f"Could not parse 'updated_at' ('{{updated_at_str}') for issue {{issue_id}: {{e}")
|
1346 |
+
logger.info(f"Identified {{len(self.stale_issues)}} potentially stale issues (updated > {{self.stale_issue_threshold_days}} days ago).")
|
1347 |
|
1348 |
def _identify_high_priority_candidates(self):
|
1349 |
"""Identifies high-priority issues (e.g., Critical/High severity)."""
|
|
|
1352 |
severity = self._determine_severity(issue_data.get('labels', []))
|
1353 |
if severity in ["Critical", "High"]:
|
1354 |
self.high_priority_candidates.append(issue_id)
|
1355 |
+
logger.info(f"Identified {{len(self.high_priority_candidates)}} high-priority candidates (Critical/High severity).")
|
1356 |
|
1357 |
async def _compute_and_store_summary(self, issue_id: int):
|
1358 |
"""Generates and stores a summary for a given issue using an LLM (Idle Task)."""
|
|
|
1366 |
try:
|
1367 |
issue = self.issues[issue_id]
|
1368 |
model_id = DEFAULT_IDLE_MODEL_ID # Use designated idle model
|
1369 |
+
logger.info(f"Idle Task: Generating summary for issue {{issue_id}} using {{model_id}}")
|
1370 |
start_time = time.time()
|
1371 |
prompt = f"""Concisely summarize the following GitHub issue in 1-2 sentences. Focus on the core problem or request reported by the user.
|
1372 |
Issue Title: {{issue.get('title', 'N/A')}
|
|
|
1391 |
if result and isinstance(result, list) and 'generated_text' in result[0]:
|
1392 |
summary = result[0].get('generated_text', '').strip() or "(AI generated empty summary)"
|
1393 |
self.precomputed_summaries[issue_id] = {{"summary": summary, "error": None, "timestamp": time.time()}
|
1394 |
+
logger.info(f"Stored summary for issue {{issue_id}} (took {{duration:.2f}}s).")
|
1395 |
elif isinstance(result, dict) and 'error' in result:
|
1396 |
raise ValueError(f"API Error: {{result['error']}")
|
1397 |
else:
|
|
|
1412 |
try:
|
1413 |
issue = self.issues[issue_id]
|
1414 |
model_id = DEFAULT_IDLE_MODEL_ID # Use cheap model
|
1415 |
+
logger.info(f"Idle Task: Identifying missing info for issue {{issue_id}} using {{model_id}}")
|
1416 |
start_time = time.time()
|
1417 |
|
1418 |
prompt = f"""Analyze the following GitHub issue description. Identify critical information potentially missing for effective debugging or resolution. List the missing items concisely (e.g., "Steps to reproduce", "Error logs", "Expected vs. Actual behavior", "Environment details"). If the description seems reasonably complete, respond with ONLY the word "None".
|
|
|
1441 |
if info_needed.lower() == "none" or not info_needed:
|
1442 |
info_needed = "None needed."
|
1443 |
self.precomputed_missing_info[issue_id] = {{"info_needed": info_needed, "error": None, "timestamp": time.time()}
|
1444 |
+
logger.info(f"Stored missing info analysis for issue {{issue_id}} (took {{duration:.2f}s): '{{info_needed[:50]}...'")
|
1445 |
elif isinstance(result, dict) and 'error' in result:
|
1446 |
raise ValueError(f"API Error: {{result['error']}")
|
1447 |
else:
|
|
|
1462 |
try:
|
1463 |
issue = self.issues[issue_id]
|
1464 |
model_id = DEFAULT_IDLE_MODEL_ID # Use cheap model
|
1465 |
+
logger.info(f"Idle Task: Generating preliminary analysis for issue {{issue_id}} using {{model_id}}")
|
1466 |
start_time = time.time()
|
1467 |
|
1468 |
prompt = f"""Analyze the GitHub issue below. Provide a single, concise sentence hypothesizing the root cause OR the main goal. Start with "Hypothesis:". If unsure, respond ONLY with "Hypothesis: Further investigation needed.".
|
|
|
1494 |
hypothesis = "Hypothesis: (Analysis failed or too short)"
|
1495 |
|
1496 |
self.precomputed_analysis[issue_id] = {{"hypothesis": hypothesis, "error": None, "timestamp": time.time()}
|
1497 |
+
logger.info(f"Stored preliminary analysis for issue {{issue_id}} (took {{duration:.2f}s): '{{hypothesis[:60]}...'")
|
1498 |
elif isinstance(result, dict) and 'error' in result:
|
1499 |
raise ValueError(f"API Error: {{result['error']}")
|
1500 |
else:
|
|
|
1583 |
cycle_tasks.append(self._compute_and_store_context(issue_id))
|
1584 |
context_computed_count += 1
|
1585 |
else: break
|
1586 |
+
if context_computed_count > 0: logger.info(f"Scheduled {{context_computed_count}} context computations.")
|
1587 |
|
1588 |
# 4. Schedule Summary Generation (LLM - Medium Cost)
|
1589 |
summary_computed_count = 0
|
|
|
1592 |
cycle_tasks.append(self._compute_and_store_summary(issue_id))
|
1593 |
summary_computed_count += 1
|
1594 |
else: break
|
1595 |
+
if summary_computed_count > 0: logger.info(f"Scheduled {{summary_computed_count}} summary computations.")
|
1596 |
|
1597 |
# 5. Schedule Missing Info Analysis (LLM - Low Cost)
|
1598 |
missing_info_count = 0
|
|
|
1601 |
cycle_tasks.append(self._compute_and_store_missing_info(issue_id))
|
1602 |
missing_info_count += 1
|
1603 |
else: break
|
1604 |
+
if missing_info_count > 0: logger.info(f"Scheduled {{missing_info_count}} missing info analyses.")
|
1605 |
|
1606 |
# 6. Schedule Preliminary Analysis (LLM - Low Cost)
|
1607 |
analysis_count = 0
|
|
|
1610 |
cycle_tasks.append(self._compute_and_store_preliminary_analysis(issue_id))
|
1611 |
analysis_count += 1
|
1612 |
else: break
|
1613 |
+
if analysis_count > 0: logger.info(f"Scheduled {{analysis_count}} preliminary analyses.")
|
1614 |
|
1615 |
# --- Execute Scheduled Async Tasks ---
|
1616 |
if cycle_tasks:
|
1617 |
+
logger.info(f"Executing {{len(cycle_tasks)}} async idle tasks for this cycle...")
|
1618 |
results = await asyncio.gather(*cycle_tasks, return_exceptions=True)
|
1619 |
num_errors = 0
|
1620 |
for i, result in enumerate(results):
|
|
|
1622 |
num_errors += 1
|
1623 |
logger.error(f"Error encountered in background idle task {{i+1}}/{{len(cycle_tasks)}}: {{result}}", exc_info=False) # Keep log cleaner
|
1624 |
cycle_duration = time.time() - start_time_cycle
|
1625 |
+
logger.info(f"Idle processing cycle finished in {{cycle_duration:.2f}} seconds. {{len(results)}} tasks processed ({{num_errors}} errors).")
|
1626 |
else:
|
1627 |
logger.info("No async idle tasks to perform in this cycle.")
|
1628 |
logger.info(f"--- Finished idle processing cycle ---")
|
|
|
1655 |
}
|
1656 |
self.precomputed_context[issue_id] = computed_data
|
1657 |
|
1658 |
+
log_msg = f"Stored context result for issue {{issue_id}} (found {{len(computed_data['files'])}} files, took {{duration:.2f}s)."
|
1659 |
if computed_data['error']:
|
1660 |
log_msg += f" Error: {{computed_data['error']}"
|
1661 |
logger.info(log_msg)
|
1662 |
|
1663 |
except Exception as e:
|
1664 |
+
logger.exception(f"Failed to compute context for issue {{issue_id}} in background task: {{e}}")
|
1665 |
self.precomputed_context[issue_id] = {{"error": f"Unexpected computation error: {{e}}", "timestamp": time.time(), "content": None, "files": []}
|
1666 |
|
1667 |
async def _run_clustering_and_duplicates_async(self):
|
|
|
1707 |
|
1708 |
status_indicators = []
|
1709 |
if issue_num in manager.stale_issues:
|
1710 |
+
status_indicators.append(f"<span title='No updates in >{{manager.stale_issue_threshold_days}} days' style='color: #b91c1c; font-weight: bold; font-size: 0.9em; background-color: #fee2e2; padding: 1px 4px; border-radius: 3px;'>[Stale]</span>")
|
1711 |
if issue_num in manager.high_priority_candidates:
|
1712 |
severity = manager._determine_severity(issue.get('labels', []))
|
1713 |
if severity == "Critical": color, bgcolor = "#ef4444", "#fee2e2"
|
|
|
1761 |
|
1762 |
if duplicate_text:
|
1763 |
dup_ids = manager.potential_duplicates.get(issue_num, [])
|
1764 |
+
dup_links = ", ".join([f"<span style='cursor: help; color: #4338ca; text-decoration: underline dotted;' title='Issue #{{dup_id}} has similar content'>#{{dup_id}</span>" for dup_id in dup_ids])
|
1765 |
ai_sections.append(f"""
|
1766 |
<div style="font-size: 0.9em; margin-top: 8px; background-color: #fffbeb; padding: 6px 10px; border-radius: 4px; border: 1px solid #fef3c7;">
|
1767 |
<strong style="color: #d97706;">⚠️ Potential Duplicates:</strong> {{dup_links}
|
|
|
1773 |
preview_html = f"""
|
1774 |
<div style="border: 1px solid #e5e7eb; padding: 15px; border-radius: 8px; background-color: #ffffff; font-family: 'Inter', sans-serif; display: flex; flex-direction: column; max-height: 80vh;">
|
1775 |
<h4 style="margin-top: 0; margin-bottom: 10px; font-size: 1.1em; display: flex; justify-content: space-between; align-items: center;">
|
1776 |
+
<a href='{{issue.get('url', '#')}' target='_blank' style='color: #6d28d9; text-decoration: none; font-weight: 600;' title="Open issue #{{issue['id']}} on GitHub">
|
1777 |
+
#{{issue['id']}} - {{gr.Textbox.sanitize_html(issue.get('title', 'N/A'))}
|
1778 |
</a>
|
1779 |
<span style="margin-left: 10px; flex-shrink: 0;">{{status_html}</span>
|
1780 |
</h4>
|
|
|
1811 |
|
1812 |
issue = manager.issues[issue_num]
|
1813 |
issue_hash = manager._get_issue_hash(issue)
|
1814 |
+
logger.info(f"Requesting suggestion for issue {{issue_num}} (hash: {{issue_hash}}) using model {{model_key}}.")
|
1815 |
|
1816 |
try:
|
1817 |
progress(0.3, desc=f"Querying {{model_key}}...")
|
|
|
1835 |
if not manager.repo:
|
1836 |
return "❌ Error: Repository not loaded. Please scan the repository first."
|
1837 |
|
1838 |
+
logger.info(f"Requesting patch for issue {{issue_num}} using model {{model_key}}.")
|
1839 |
progress(0.1, desc="Gathering code context (using cache if available)...")
|
1840 |
try:
|
1841 |
+
progress(0.4, desc=f"Querying {{model_key}} for patch...")
|
1842 |
result = await manager.generate_code_patch(issue_num, model_key)
|
1843 |
progress(1, desc="Patch result received.")
|
1844 |
|
|
|
1852 |
|
1853 |
if patch_content:
|
1854 |
patch_content_sanitized = patch_content.replace('`', '\\`')
|
1855 |
+
logger.info(f"Successfully generated patch for issue {{issue_num}} using {{model_used}}.")
|
1856 |
return f"""**🩹 Patch Suggestion from {{model_used}:**
|
1857 |
**Explanation:**
|
1858 |
{{explanation}
|
|
|
1899 |
logger.info(f"Issue selected via Dataframe: ID {{selected_id}}")
|
1900 |
|
1901 |
if selected_id not in manager.issues:
|
1902 |
+
logger.error(f"Selected issue ID {{selected_id}} not found in manager's issue list.")
|
1903 |
return {{
|
1904 |
**default_response,
|
1905 |
+
"issue_preview_html": gr.update(value=f"<p style='color: red; font-weight: bold;'>Error: Issue {{selected_id}} not found in the current list. Try re-scanning.</p>"),
|
1906 |
}
|
1907 |
|
1908 |
issue_data = manager.issues[selected_id]
|
|
|
1917 |
context_source_msg = f"Pre-computed (Failed @ {{timestamp_str}})"
|
1918 |
files_content["error_context.txt"] = f"# Error loading pre-computed context:\n# {{context_data['error']}"
|
1919 |
elif context_data.get("files"):
|
1920 |
+
context_source_msg = f"Pre-computed ({{len(context_data['files'])}} files @ {{timestamp_str})"
|
1921 |
+
logger.info(f"Loading {{len(context_data['files'])}} files from pre-computed context for issue {{selected_id}: {{context_data['files']}")
|
1922 |
loaded_count = 0
|
1923 |
for file_path_str in context_data["files"]:
|
1924 |
full_path = manager.repo_local_path / file_path_str
|
|
|
1926 |
files_content[file_path_str] = full_path.read_text(encoding='utf-8', errors='ignore')
|
1927 |
loaded_count += 1
|
1928 |
except Exception as e:
|
1929 |
+
logger.warning(f"Error reading pre-computed file {{full_path}} for issue {{selected_id}}: {{e}}")
|
1930 |
files_content[file_path_str] = f"# Error reading file: {{e}}"
|
1931 |
if loaded_count == 0 and context_data["files"]:
|
1932 |
files_content["error_reading_files.txt"] = "# Precomputed context found file references, but failed to read any file content."
|
|
|
1949 |
context_source_msg += f" (Error: {{context_result['error']})"
|
1950 |
files_content["error_context.txt"] = f"# Error loading context on demand:\n# {{context_result['error']}"
|
1951 |
elif context_result.get("files"):
|
1952 |
+
context_source_msg += f" ({{len(context_result['files'])}} files)"
|
1953 |
+
logger.info(f"Loading {{len(context_result['files'])}} files computed on-demand for issue {{selected_id}: {{context_result['files']}")
|
1954 |
loaded_count = 0
|
1955 |
for file_path_str in context_result["files"]:
|
1956 |
full_path = manager.repo_local_path / file_path_str
|
|
|
1958 |
files_content[file_path_str] = full_path.read_text(encoding='utf-8', errors='ignore')
|
1959 |
loaded_count +=1
|
1960 |
except Exception as e:
|
1961 |
+
logger.warning(f"Error reading on-demand file {{full_path}} for issue {{selected_id}}: {{e}}")
|
1962 |
files_content[file_path_str] = f"# Error reading file: {{e}}"
|
1963 |
if loaded_count == 0 and context_result["files"]:
|
1964 |
files_content["error_reading_files.txt"] = "# Context computation found file references, but failed to read any file content."
|
|
|
1972 |
if not files_content:
|
1973 |
files_content["placeholder.txt"] = f"# No relevant files found or context failed to load for issue {{selected_id}}."
|
1974 |
manager.code_editors[selected_id] = OTCodeEditor(initial_value=files_content)
|
1975 |
+
logger.info(f"Initialized/Updated OT editor state for issue {{selected_id}} with files: {{list(files_content.keys())}}")
|
1976 |
|
1977 |
updates = {{
|
1978 |
"selected_issue_id_state": gr.update(value=selected_id),
|
|
|
1992 |
}
|
1993 |
|
1994 |
# --- Gradio Blocks ---
|
1995 |
+
with gr.Blocks(theme=theme, title="AI Issue Resolver Pro", css="#collab-list .collab-item {{ margin-bottom: 4px; font-size: 0.9em; }} .gradio-container {{ max-width: 1600px !important; }") as demo_app:
|
1996 |
gr.Markdown("""
|
1997 |
<div style="text-align: center; margin-bottom: 20px;">
|
1998 |
<h1 style="color: #6d28d9; font-weight: 800;">🚀 AI Issue Resolver Pro</h1>
|
|
|
2179 |
if (hostname === 'localhost' || hostname === '127.0.0.1' || hostname.endsWith('.gradio.live')) {{
|
2180 |
wsUrl = `${{protocol}}//${{hostname}}:${{wsPort}}`;
|
2181 |
console.log('Detected local/gradio.live environment, using direct WebSocket URL:', wsUrl);
|
2182 |
+
}} else {{
|
2183 |
const wsPath = '/ws';
|
2184 |
wsUrl = `${{protocol}}//${{window.location.host}}${{wsPath}}`;
|
2185 |
console.log('Detected non-local environment, assuming proxied WebSocket URL:', wsUrl);
|
|
|
2209 |
<span style="color: #555;">${{info.status || 'Idle'}}</span>
|
2210 |
</div>`)
|
2211 |
.join('');
|
2212 |
+
}} else {{
|
2213 |
collabListDiv.innerHTML = '<span style="color: #6b7280;">You are the only active user.</span>';
|
2214 |
}}
|
2215 |
}}
|
|
|
2227 |
console.log(`WebSocket already ${{(collabWs.readyState === WebSocket.OPEN) ? 'open' : 'connecting'}}. State: ${{collabWs.readyState}}`);
|
2228 |
return;
|
2229 |
}}
|
2230 |
+
console.log(`Attempting WebSocket connection to ${{wsUrl}} (Attempt ${{reconnectAttempts + 1}}/${{maxReconnectAttempts}})...`);
|
2231 |
updateStatusBar(`Connecting collaboration service (Attempt ${{reconnectAttempts + 1}})...`);
|
2232 |
try {{
|
2233 |
collabWs = new WebSocket(wsUrl);
|
2234 |
+
}} catch (e) {{
|
2235 |
console.error("WebSocket constructor failed:", e);
|
2236 |
updateStatusBar("Collaboration connection failed (init error).", true);
|
2237 |
// Handle reconnection attempt here as well
|
|
|
2252 |
const data = JSON.parse(event.data);
|
2253 |
if (data.type === 'collaboration_status') {{
|
2254 |
updateCollabList(data.collaborators);
|
2255 |
+
}} else if (data.type === 'code_update') {{
|
2256 |
const receivedIssueNum = parseInt(data.issue_num, 10);
|
2257 |
if (aceEditorInstance && receivedIssueNum === currentIssueId && data.senderId !== clientId) {{
|
2258 |
+
console.warn(`Applying remote delta for issue ${{receivedIssueNum}} from ${{data.senderId}}. WARNING: NO OT!`);
|
2259 |
try {{
|
2260 |
const delta = JSON.parse(data.delta);
|
2261 |
// Add ignore flag for local change listener
|
2262 |
aceEditorInstance.getSession().getDocument().applyDeltas([{{...delta, ignore: true}}]);
|
2263 |
+
}} catch (e) {{ console.error('Failed to parse or apply remote delta:', e, data.delta); }}
|
2264 |
}}
|
2265 |
+
}} else if (data.type === 'issues_updated') {{
|
2266 |
console.log('Received notification: Issue list updated on server.');
|
2267 |
updateStatusBar('Issue list updated on server. Refreshing the page or re-scanning is recommended.');
|
2268 |
const crawlButton = document.getElementById('crawl_btn');
|
|
|
2270 |
crawlButton.style.backgroundColor = '#fef08a';
|
2271 |
setTimeout(() => {{ crawlButton.style.backgroundColor = '' }}, 2000);
|
2272 |
}}
|
2273 |
+
}} else {{
|
2274 |
console.warn("Received unknown WebSocket message type:", data.type, data);
|
2275 |
}}
|
2276 |
+
}} catch (e) {{
|
2277 |
console.error('Failed to parse WebSocket message or update UI:', e, event.data);
|
2278 |
}}
|
2279 |
}};
|
|
|
2296 |
editorChangeListenerAttached = false; // Allow re-attaching on reconnect
|
2297 |
if (reconnectAttempts < maxReconnectAttempts) {{
|
2298 |
const delay = Math.pow(2, reconnectAttempts) * 1500 + Math.random() * 1000;
|
2299 |
+
console.log(`Attempting to reconnect WebSocket in approx. ${{Math.round(delay / 1000)}} seconds...`);
|
2300 |
setTimeout(connectWebSocket, delay);
|
2301 |
reconnectAttempts++;
|
2302 |
+
}} else {{
|
2303 |
console.error('Max WebSocket reconnection attempts reached.');
|
2304 |
updateStatusBar('Collaboration failed - Max reconnect attempts reached.', true);
|
2305 |
}}
|
|
|
2308 |
if (collabWs && collabWs.readyState === WebSocket.OPEN) {{
|
2309 |
try {{
|
2310 |
collabWs.send(JSON.stringify(message));
|
2311 |
+
}} catch (e) {{
|
2312 |
console.error("Failed to stringify or send WebSocket message:", e, message);
|
2313 |
}}
|
2314 |
+
}} else {{
|
2315 |
console.warn('WebSocket not connected. Cannot send message:', message);
|
2316 |
}}
|
2317 |
}}
|
|
|
2325 |
console.warn(`Ace library or editor element not found yet. Retrying editor setup (${{editorSetupAttempts + 1}}/${{maxEditorSetupAttempts}})...`);
|
2326 |
editorSetupAttempts++;
|
2327 |
setTimeout(setupCodeEditorListener, 1500 + Math.random() * 500);
|
2328 |
+
}} else {{
|
2329 |
console.error("Ace library or editor element not found after multiple attempts. Code editor collaboration disabled.");
|
2330 |
updateStatusBar("Code editor library or UI element failed to load.", true);
|
2331 |
}}
|
|
|
2347 |
console.log("Attaching Ace editor 'change' listener...");
|
2348 |
aceEditorInstance.getSession().on('change', function(delta) {{
|
2349 |
// Check for the custom 'ignore' flag added when applying remote deltas
|
2350 |
+
if (delta.ignore) {{ return; }} // Ignore remote changes
|
2351 |
if (currentIssueId !== null) {{
|
2352 |
const now = Date.now();
|
2353 |
// Simple debounce to avoid flooding server
|
|
|
2378 |
}}
|
2379 |
editorSetupAttempts = 0; // Reset attempts on success
|
2380 |
updateTrackedIssueId(); // Ensure correct issue ID is tracked after setup
|
2381 |
+
}} catch (e) {{
|
2382 |
console.error('Failed to initialize Ace editor instance or attach listeners:', e);
|
2383 |
if (editorElement) delete editorElement.dataset.collabInitialized; // Allow retry
|
2384 |
aceEditorInstance = null;
|
|
|
2387 |
console.warn(`Retrying editor setup after error (${{editorSetupAttempts + 1}}/${{maxEditorSetupAttempts}})...`);
|
2388 |
editorSetupAttempts++;
|
2389 |
setTimeout(setupCodeEditorListener, 2000 + Math.random() * 500);
|
2390 |
+
}} else {{
|
2391 |
console.error("Max editor setup attempts failed after error. Collaboration disabled.");
|
2392 |
updateStatusBar("Code editor setup failed repeatedly.", true);
|
2393 |
}}
|
|
|
2418 |
newIssueId = parseInt(value, 10);
|
2419 |
// Found a potential ID, stop searching
|
2420 |
break;
|
2421 |
+
}} catch (e) {{
|
2422 |
console.debug("Could not parse hidden input value as int:", value, e);
|
2423 |
}}
|
2424 |
}}
|
2425 |
}}
|
2426 |
if (newIssueId !== currentIssueId) {{
|
2427 |
+
console.log(`Updating tracked issue ID: from ${{currentIssueId}} to ${{newIssueId}}`);
|
2428 |
currentIssueId = newIssueId;
|
2429 |
const status = currentIssueId !== null ? `Viewing Issue #${{currentIssueId}}` : 'Idle';
|
2430 |
sendWsMessage({{ type: 'status_update', clientId: clientId, status: status}});
|
|
|
2480 |
if (observerTargetNode) {{
|
2481 |
console.log("Starting MutationObserver to detect Gradio UI changes (incl. hidden state).");
|
2482 |
observer.observe(observerTargetNode, observerConfig);
|
2483 |
+
}} else {{
|
2484 |
console.error("Could not find observer target node (document.body).");
|
2485 |
}}
|
2486 |
// Initial setup attempts
|
|
|
2500 |
client_id = f"client_{{hashlib.sha1(os.urandom(16)).hexdigest()[:8]}}"
|
2501 |
setattr(websocket, 'client_id', client_id)
|
2502 |
remote_addr = websocket.remote_address
|
2503 |
+
logger.info(f"WebSocket client connected: {{remote_addr}} assigned ID {{client_id}}")
|
2504 |
|
2505 |
manager.ws_clients.append(websocket)
|
2506 |
logger.info(f"Client list size: {{len(manager.ws_clients)}}")
|
|
|
2512 |
msg_type = data.get("type")
|
2513 |
sender_id = client_id
|
2514 |
|
2515 |
+
logger.debug(f"Received WS message type '{{msg_type}' from {{sender_id}} ({{remote_addr})")
|
2516 |
|
2517 |
if msg_type == "join":
|
2518 |
client_name = data.get("name", f"User_{{sender_id[:4]}}")
|
2519 |
manager.collaborators[sender_id] = {{"name": client_name, "status": "Connected"}
|
2520 |
+
logger.info(f"Client {{sender_id}} ({{client_name}}) joined collaboration. Current collaborators: {{list(manager.collaborators.keys())}}")
|
2521 |
await manager.broadcast_collaboration_status_once()
|
2522 |
|
2523 |
elif msg_type == "code_update":
|
|
|
2534 |
manager.collaborators[sender_id]["status"] = status
|
2535 |
await manager.broadcast_collaboration_status_once()
|
2536 |
else:
|
2537 |
+
logger.warning(f"Received status update from client {{sender_id}} not in collaborator list. Adding/Updating.")
|
2538 |
+
manager.collaborators[sender_id] = {{"name": f"User_{{sender_id[:4]}} (Re-added)", "status": status}
|
2539 |
await manager.broadcast_collaboration_status_once()
|
2540 |
|
2541 |
else:
|
2542 |
+
logger.warning(f"Unknown WebSocket message type '{{msg_type}' received from {{sender_id}} ({{remote_addr}). Message: {{str(message)[:200]}")
|
2543 |
|
2544 |
except json.JSONDecodeError:
|
2545 |
+
logger.error(f"Received invalid JSON over WebSocket from {{sender_id}} ({{remote_addr}}): {{str(message)[:200]}}...")
|
2546 |
except Exception as e:
|
2547 |
+
logger.exception(f"Error processing WebSocket message from {{sender_id}} ({{remote_addr}}): {{e}}")
|
2548 |
|
2549 |
# Catch standard socket exceptions for disconnects
|
2550 |
except (ConnectionClosed, ConnectionClosedOK, ConnectionAbortedError, ConnectionResetError) as e:
|
2551 |
+
logger.info(f"WebSocket client {{client_id}} ({{remote_addr}) disconnected: Code={{getattr(e, 'code', 'N/A')}, Reason='{{getattr(e, 'reason', 'N/A')}'")
|
2552 |
except Exception as e:
|
2553 |
+
logger.exception(f"Unexpected error in WebSocket handler for {{client_id}} ({{remote_addr}}): {{e}}")
|
2554 |
finally:
|
2555 |
+
logger.info(f"Cleaning up connection for client {{client_id}} ({{remote_addr}})")
|
2556 |
manager.remove_ws_client(websocket)
|
2557 |
|
2558 |
|
|
|
2583 |
|
2584 |
except OSError as e:
|
2585 |
logger.error(f"Failed to start WebSocket server on port {{port}}: {{e}}. Is the port already in use?")
|
2586 |
+
raise SystemExit(f"WebSocket Port {{port}} unavailable. Application cannot start.")
|
2587 |
except asyncio.CancelledError:
|
2588 |
logger.info("WebSocket server task cancelled.")
|
2589 |
except Exception as e:
|