# full_code_fixer.py from transformers import AutoTokenizer, AutoModelForSeq2SeqLM model_name = "Salesforce/codet5-base" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForSeq2SeqLM.from_pretrained(model_name) def fix_code(code): input_text = f"fix Python: {code}" inputs = tokenizer(input_text, return_tensors="pt", max_length=512, truncation=True) outputs = model.generate(**inputs, max_length=512, num_beams=4, early_stopping=True) fixed_code = tokenizer.decode(outputs[0], skip_special_tokens=True) return fixed_code # Optional test if __name__ == "__main__": buggy_code = "def add(a,b):\n return a*b" # wrong logic print(fix_code(buggy_code))