comethrusws commited on
Commit
419f138
·
verified ·
1 Parent(s): c9a5f40

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -0
app.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModel, AutoTokenizer
3
+ import torch
4
+
5
+ # Load the model and tokenizer from Hugging Face Hub
6
+ model = AutoModel.from_pretrained("comethrusws/finlytic-compliance")
7
+ tokenizer = AutoTokenizer.from_pretrained("comethrusws/finlytic-compliance")
8
+
9
+ # Define a function to handle inference
10
+ def predict(input_data):
11
+ inputs = tokenizer(input_data, return_tensors="pt")
12
+ outputs = model(**inputs)
13
+
14
+ # Assuming the model returns logits (modify this depending on your model's architecture)
15
+ prediction = torch.argmax(outputs.logits, dim=-1).item()
16
+ return prediction
17
+
18
+ # Create a Gradio interface
19
+ interface = gr.Interface(
20
+ fn=predict,
21
+ inputs=gr.inputs.Textbox(label="Input Data"),
22
+ outputs=gr.outputs.Textbox(label="Prediction"),
23
+ title="Fintlytic Compliance Model",
24
+ description="Predict using the Finlytic compliance model",
25
+ )
26
+
27
+ # Launch the Gradio app
28
+ if __name__ == "__main__":
29
+ interface.launch()