fkalpana commited on
Commit
4c23181
Β·
verified Β·
1 Parent(s): 87e4241

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -36
app.py CHANGED
@@ -1,47 +1,29 @@
1
  import gradio as gr
2
- from transformers import T5Tokenizer, T5ForConditionalGeneration
3
- from datasets import load_dataset
4
 
5
- # Load the tokenizer and model
6
- tokenizer = T5Tokenizer.from_pretrained('t5-small', legacy=False)
7
- model = T5ForConditionalGeneration.from_pretrained('t5-small')
8
 
9
- # dataset = load_dataset("b-mc2/sql-create-context")
10
- dataset = load_dataset("wikisql", split="train")
11
 
12
- examples = []
13
-
14
- for i in range(3): # Let's take the first 3 examples
15
- item = dataset[i]
16
- question = item['question']
17
- examples.append([question])
18
-
19
- def generate_sql(question):
20
- # Format the question for the model if needed. For example:
21
- input_text = f"translate English to SQL: {question}"
22
- # input_text = f"{question}" # Directly use the question if the model is fine-tuned for SQL generation
23
-
24
- # Tokenize the input text
25
- input_ids = tokenizer.encode(input_text, return_tensors="pt")
26
-
27
- # Generate the output sequence
28
- output_ids = model.generate(input_ids, max_length=512, num_beams=5)[0]
29
-
30
- # Decode the generated ids to get the SQL query
31
- sql_query = tokenizer.decode(output_ids, skip_special_tokens=True)
32
  return sql_query
33
 
34
-
35
- # Define the Gradio interface
36
- iface = gr.Interface(
37
  fn=generate_sql,
38
- inputs=gr.Textbox(lines=2, placeholder="Enter your question here..."),
39
- outputs=gr.Textbox(),
40
- title="Natural Language to SQL",
41
- description="This app uses a Seq2Seq model to generate SQL queries from natural language questions.",
42
- examples=examples
43
  )
44
 
45
  # Launch the app
46
  if __name__ == "__main__":
47
- iface.launch()
 
1
  import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
 
3
 
4
+ # Load tokenizer and model
5
+ tokenizer = AutoTokenizer.from_pretrained("hrshtsharma2012/NL2SQL-Picard-final")
6
+ model = AutoModelForSeq2SeqLM.from_pretrained("hrshtsharma2012/NL2SQL-Picard-final")
7
 
8
+ # Initialize the pipeline
9
+ nl2sql_pipeline = pipeline("text2text-generation", model=model, tokenizer=tokenizer)
10
 
11
+ def generate_sql(query):
12
+ # Use the model to generate SQL from the natural language query
13
+ results = nl2sql_pipeline(query)
14
+ # Extract the first result (highest likelihood)
15
+ sql_query = results[0]['generated_text']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  return sql_query
17
 
18
+ # Create a Gradio interface
19
+ interface = gr.Interface(
 
20
  fn=generate_sql,
21
+ inputs=gr.inputs.Textbox(lines=2, placeholder="Enter your natural language query here..."),
22
+ outputs="text",
23
+ title="NL to SQL with Picard",
24
+ description="This model converts natural language queries into SQL. It's based on the Spider dataset. Enter a query to get started!"
 
25
  )
26
 
27
  # Launch the app
28
  if __name__ == "__main__":
29
+ interface.launch()