ragul2607 commited on
Commit
ae7e219
·
verified ·
1 Parent(s): 0c5348e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -13
app.py CHANGED
@@ -1,25 +1,22 @@
1
- from fastapi import FastAPI, Request, HTTPException
2
  import requests
3
  import os
4
- from pydantic import BaseModel
5
 
6
  app = FastAPI()
7
 
8
-
9
  API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
10
- HF_TOKEN = os.getenv("HF_API_KEY") # Load from environment variable
11
  headers = {"Authorization": f"Bearer {HF_TOKEN}"}
12
 
13
 
14
- class PromptRequest(BaseModel):
15
- prompt: str
 
16
 
17
 
18
  @app.post("/generate")
19
- async def generate_text(data: PromptRequest):
20
- try:
21
- response = requests.post(API_URL, headers=headers, json={"inputs": data.prompt})
22
- response.raise_for_status()
23
- return response.json()
24
- except Exception as e:
25
- raise HTTPException(status_code=500, detail=str(e))
 
1
+ from fastapi import FastAPI, Request
2
  import requests
3
  import os
 
4
 
5
  app = FastAPI()
6
 
 
7
  API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-3-8B-Instruct"
8
+ HF_TOKEN = os.getenv("HF_API_KEY")
9
  headers = {"Authorization": f"Bearer {HF_TOKEN}"}
10
 
11
 
12
+ @app.get("/")
13
+ async def root():
14
+ return {"message": "✅ QuickPrep is running!"}
15
 
16
 
17
  @app.post("/generate")
18
+ async def generate(request: Request):
19
+ data = await request.json()
20
+ prompt = data.get("prompt")
21
+ response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
22
+ return response.json()