ayushraj2349-2's picture
Update app.py
b595fa7 verified
raw
history blame
1.95 kB
from transformers import AutoModelForCausalLM, AutoTokenizer
import gradio as gr
import torch
# βœ… Load the fastest model on CPU
model_name = "EleutherAI/pythia-70m" # Fastest model for code review
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name).to("cpu") # Force CPU mode
# βœ… Function to review Python code with debug logs
def review_code(code_snippet):
print("βœ… Received Code:", code_snippet) # Debugging log
# Process input
inputs = tokenizer(code_snippet, return_tensors="pt").to("cpu") # Move to CPU
outputs = model.generate(**inputs, max_length=80, do_sample=False, num_beams=3)
# Check if the model generated output
if outputs is None:
print("❌ Model did not generate output!") # Debugging log
return "Error: Model did not generate output."
reviewed_code = tokenizer.decode(outputs[0], skip_special_tokens=True)
print("βœ… Generated Code:", reviewed_code) # Debugging log
return reviewed_code
# βœ… Handle user input and return reviewed code
def check_code(input_code):
reviewed_code = review_code(input_code)
return input_code, reviewed_code, reviewed_code # Return all for UI & download
# βœ… Gradio UI with Side-by-Side Comparison & Download Option
interface = gr.Interface(
fn=check_code,
inputs=gr.Textbox(label="Enter Python Code"),
outputs=[
gr.Textbox(label="Original Code", interactive=False), # Left side
gr.Textbox(label="Reviewed Code", interactive=False), # Right side
gr.File(label="Download Reviewed Code") # Download button
],
title="πŸš€ AI Code Reviewer",
description="πŸ“Œ Enter Python code and get a reviewed version. Download the reviewed code as a file.",
allow_flagging="never"
)
# βœ… Launch app (Fixes font issues and removes `share=True`)
interface.launch(server_name="0.0.0.0", server_port=7860, show_error=True)