snackshell commited on
Commit
191a48e
·
verified ·
1 Parent(s): 16f7ae8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -0
app.py CHANGED
@@ -3,6 +3,9 @@ from fastapi.responses import StreamingResponse
3
  from pydantic import BaseModel
4
  from typing import List
5
  from g4f import ChatCompletion
 
 
 
6
 
7
  app = FastAPI()
8
 
@@ -14,6 +17,34 @@ models = [
14
  "llama-3.1-405b"
15
  ]
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  # Request model
18
  class Message(BaseModel):
19
  role: str
 
3
  from pydantic import BaseModel
4
  from typing import List
5
  from g4f import ChatCompletion
6
+ from g4f.typing import Messages, AsyncResult
7
+ import g4f.Provider
8
+ from g4f.Provider import BackendApi
9
 
10
  app = FastAPI()
11
 
 
17
  "llama-3.1-405b"
18
  ]
19
 
20
+ url = "https://ahe.hopto.org"
21
+ headers = {"Authorization": "Basic Z2dnOmc0Zl8="}
22
+
23
+ BackendApi.working = True
24
+ BackendApi.ssl = False
25
+ BackendApi.url = url
26
+ BackendApi.headers = headers
27
+
28
+ class BackendApi(BackendApi):
29
+ working = True
30
+ ssl = False
31
+ url = url
32
+ headers = headers
33
+ image_models = ["flux", "flux-pro"]
34
+ models = ["deepseek-r1", *g4f.Provider.OpenaiAccount.get_models(), "flux", "flux-pro"]
35
+
36
+ @classmethod
37
+ async def create_async_generator(
38
+ cls,
39
+ model: str,
40
+ messages: Messages,
41
+ **kwargs
42
+ ) -> AsyncResult:
43
+ if model in g4f.Provider.OpenaiAccount.get_models():
44
+ kwargs["provider"] = "OpenaiAccount"
45
+ async for chunk in super().create_async_generator(model, messages, **kwargs):
46
+ yield chunk
47
+
48
  # Request model
49
  class Message(BaseModel):
50
  role: str