abdullahalioo commited on
Commit
03991d8
·
verified ·
1 Parent(s): b32075b

Create main.py

Browse files
Files changed (1) hide show
  1. main.py +50 -0
main.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, Request
2
+ from pydantic import BaseModel
3
+ from fastapi.middleware.cors import CORSMiddleware
4
+ from g4f.client import Client
5
+ from fastapi.responses import StreamingResponse
6
+
7
+ # Initialize the AI client
8
+ client = Client()
9
+
10
+ # FastAPI app
11
+ app = FastAPI()
12
+
13
+ # CORS Middleware (so JS from browser can access it too)
14
+ app.add_middleware(
15
+ CORSMiddleware,
16
+ allow_origins=["*"], # Change "*" to your frontend URL for better security
17
+ allow_credentials=True,
18
+ allow_methods=["*"],
19
+ allow_headers=["*"],
20
+ )
21
+
22
+ # Request body model
23
+ class Question(BaseModel):
24
+ question: str
25
+
26
+ async def generate_response_chunks(prompt: str):
27
+ try:
28
+ response = client.chat.completions.create(
29
+ model="gpt-4", # Use a supported model
30
+ messages=[
31
+ {"role": "user", "content": prompt},
32
+ {"role": "system", "content": "You are a helpful AI assistant created by abdullah ali who is very intelegent and he is 13 years old and live in lahore."}
33
+ ],
34
+ stream=True # Enable streaming
35
+ )
36
+
37
+ for part in response:
38
+ content = part.choices[0].delta.content
39
+ if content:
40
+ yield content
41
+
42
+ except Exception as e:
43
+ yield f"Error occurred: {e}"
44
+
45
+ @app.post("/ask")
46
+ async def ask(question: Question):
47
+ return StreamingResponse(
48
+ generate_response_chunks(question.question),
49
+ media_type="text/plain"
50
+ )