File size: 2,664 Bytes
0040626
 
 
159c5e2
0040626
b595fa7
93c3822
0040626
b595fa7
0040626
 
b595fa7
 
6bb18d3
 
 
b595fa7
6bb18d3
b6e81a2
 
6bb18d3
b6e81a2
6bb18d3
 
b6e81a2
b595fa7
 
 
 
 
 
0040626
b595fa7
 
159c5e2
 
 
 
 
 
0040626
6bb18d3
b595fa7
0040626
159c5e2
 
0040626
159c5e2
0040626
 
 
 
 
 
159c5e2
0040626
 
b595fa7
0040626
 
 
b595fa7
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
from transformers import AutoModelForCausalLM, AutoTokenizer
import gradio as gr
import torch
import tempfile  # βœ… Import tempfile to create temp files

# βœ… Load the fastest model on CPU
model_name = "Salesforce/codegen-350M-mono"  # Fastest model for code review
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name).to("cpu")  # Force CPU mode

def review_code(code_snippet):
    print("βœ… Received Code:", code_snippet)  # Debugging log
    
    # βœ… Add a clear instruction to the model
    prompt = f"### Instruction: Review and fix the Python code below.\n### Input Code:\n{code_snippet}\n### Fixed Code:\n"

    # Process input
    inputs = tokenizer(prompt, return_tensors="pt").to("cpu")  # Move to CPU
    outputs = model.generate(
        **inputs,
        max_length=80,  # βœ… Increased length for full function review
        do_sample=False,
        num_beams=5,  # βœ… Higher beams for better decision-making
        repetition_penalty=1.8  # βœ… Lower penalty to avoid weird token removal
    )

    # Check if the model generated output
    if outputs is None:
        print("❌ Model did not generate output!")  # Debugging log
        return "Error: Model did not generate output."

    reviewed_code = tokenizer.decode(outputs[0], skip_special_tokens=True)
    print("βœ… Generated Code:", reviewed_code)  # Debugging log

    # βœ… Write reviewed code to a temporary file for download
    temp_file_path = tempfile.NamedTemporaryFile(delete=False, suffix=".txt").name
    with open(temp_file_path, "w") as temp_file:
        temp_file.write(reviewed_code)

    return reviewed_code, temp_file_path  # βœ… Return reviewed code & file path


# βœ… Handle user input and return reviewed code
def check_code(input_code):
    reviewed_code, file_path = review_code(input_code)
    return input_code, reviewed_code, file_path  # βœ… Correctly return file path

# βœ… Gradio UI with Side-by-Side Comparison & Fixed Download Option
interface = gr.Interface(
    fn=check_code,
    inputs=gr.Textbox(label="Enter Python Code"),
    outputs=[
        gr.Textbox(label="Original Code", interactive=False),  # Left side
        gr.Textbox(label="Reviewed Code", interactive=False),  # Right side
        gr.File(label="Download Reviewed Code")  # βœ… Fixed Download Button
    ],
    title="πŸš€ AI Code Reviewer",
    description="πŸ“Œ Enter Python code and get a reviewed version. Download the reviewed code as a file.",
    allow_flagging="never"
)

# βœ… Launch app (Fixes font issues and removes `share=True`)
interface.launch(server_name="0.0.0.0", server_port=7860, show_error=True)