import gradio as gr from transformers import AutoTokenizer, AutoModelForSeq2SeqLM import logging # Set up logging logging.basicConfig(level=logging.INFO) # Load tokenizer and model model_name = "mrm8488/t5-base-finetuned-wikiSQL" # Alternative model tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForSeq2SeqLM.from_pretrained(model_name) def preprocess_query(query): # Example preprocessing: convert to lowercase return query.lower() def generate_sql(query): try: processed_query = preprocess_query(query) inputs = tokenizer(processed_query, return_tensors="pt", padding=True) outputs = model.generate(**inputs, max_length=512) sql_query = tokenizer.decode(outputs[0], skip_special_tokens=True) return sql_query except Exception as e: logging.error(f"Error generating SQL query: {e}") return "Error generating SQL query" # Create a Gradio interface interface = gr.Interface( fn=generate_sql, inputs=gr.Textbox(lines=2, placeholder="Enter your natural language query here..."), outputs="text", title="NL to SQL with T5", description="This model converts natural language queries into SQL. Enter your query!" ) # Launch the app if __name__ == "__main__": interface.launch()