Spaces:
Sleeping
Sleeping
text generation
Browse files- app.py +11 -0
- requirements.txt +6 -2
app.py
CHANGED
@@ -1,7 +1,18 @@
|
|
1 |
from fastapi import FastAPI
|
2 |
|
|
|
|
|
|
|
|
|
|
|
|
|
3 |
app = FastAPI()
|
4 |
|
|
|
|
|
|
|
|
|
|
|
5 |
@app.get('/')
|
6 |
def greet():
|
7 |
return {'Hello': 'Moti'}
|
|
|
1 |
from fastapi import FastAPI
|
2 |
|
3 |
+
|
4 |
+
|
5 |
+
from transformers import pipeline
|
6 |
+
|
7 |
+
pipe_flan = pipeline("text2text-generation", model="google/flan-t5-small")
|
8 |
+
|
9 |
app = FastAPI()
|
10 |
|
11 |
+
@app.get("/infer_t5")
|
12 |
+
def t5(input):
|
13 |
+
output = pipe_flan(input)
|
14 |
+
return {"output": output[0]["generated_text"]}
|
15 |
+
|
16 |
@app.get('/')
|
17 |
def greet():
|
18 |
return {'Hello': 'Moti'}
|
requirements.txt
CHANGED
@@ -1,2 +1,6 @@
|
|
1 |
-
|
2 |
-
|
|
|
|
|
|
|
|
|
|
1 |
+
fastapi==0.74.*
|
2 |
+
requests==2.27.*
|
3 |
+
sentencepiece==0.1.*
|
4 |
+
torch==1.11.*
|
5 |
+
transformers==4.*
|
6 |
+
uvicorn[standard]==0.17.*
|