abdelghanighpgmailcom commited on
Commit
d1e3a30
·
verified ·
1 Parent(s): 73464ae

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -0
app.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, Request, Query
2
+ from fastapi.responses import StreamingResponse
3
+ from transformers import AutoModelForSequenceClassification, AutoTokenizer
4
+
5
+ import torch
6
+
7
+ import time
8
+ app = FastAPI()
9
+ model_name = "prajjwal1/bert-tiny" # Pretrained BERT-Tiny on Hugging Face
10
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
11
+ model = AutoModelForSequenceClassification.from_pretrained(model_name)
12
+
13
+
14
+
15
+ # SSE generator
16
+ def event_stream(text: str):
17
+
18
+ time.sleep(1)
19
+
20
+ inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
21
+ with torch.no_grad():
22
+ outputs = model(**inputs)
23
+ probs = torch.nn.functional.softmax(outputs.logits, dim=1)
24
+ prediction = torch.argmax(probs, dim=1).item()
25
+
26
+ yield f"data: {prediction}\n\n"
27
+
28
+
29
+
30
+ @app.get("/chatstrm")
31
+ async def chat(query: str = Query(..., description="User's message")):
32
+ return StreamingResponse(event_stream(query) , media_type="text/event-stream")
33
+
34
+ # Entry point
35
+ if __name__ == "__main__":
36
+ import uvicorn
37
+ uvicorn.run("app:app", host="0.0.0.0", port=7899)