Spaces:
Running
Running
import torch | |
from transformers import AutoModelForSequenceClassification, AutoTokenizer | |
# Load pre-trained CodeBERT model | |
model = AutoModelForSequenceClassification.from_pretrained("microsoft/codebert-base", num_labels=2) | |
tokenizer = AutoTokenizer.from_pretrained("microsoft/codebert-base") | |
def detect_bug(code): | |
inputs = tokenizer(code, return_tensors="pt", truncation=True, padding=True) | |
outputs = model(**inputs) | |
probabilities = torch.nn.functional.softmax(outputs.logits, dim=-1) | |
return "buggy" if probabilities[0][1] > probabilities[0][0] else "correct" | |
# Optional test | |
if __name__ == "__main__": | |
sample = "def multiply(a, b): return a + b" | |
print(detect_bug(sample)) | |
#detects if there's a bug in code | |