snackshell commited on
Commit
d57368c
·
verified ·
1 Parent(s): 6566a73

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -32
app.py CHANGED
@@ -1,12 +1,8 @@
1
- import g4f.api
2
- import g4f.Provider
3
  from fastapi import FastAPI, HTTPException, Depends, Header, Request
4
  from fastapi.responses import StreamingResponse
5
  from pydantic import BaseModel
6
  from typing import List
7
  from g4f import ChatCompletion
8
- from g4f.typing import Messages, AsyncResult
9
- from g4f.Provider import BackendApi
10
 
11
  app = FastAPI()
12
 
@@ -17,34 +13,6 @@ models = [
17
  "claude-3.7-sonnet", "o3-mini", "o1", "grok-3", "gemini-2.5-pro-exp-03-25", "claude-3.5",
18
  "llama-3.1-405b"
19
  ]
20
-
21
- url = "https://ahe.hopto.org"
22
- headers = {"Authorization": "Basic Z2dnOmc0Zl8="}
23
-
24
- BackendApi.working = True
25
- BackendApi.ssl = False
26
- BackendApi.url = url
27
- BackendApi.headers = headers
28
-
29
- class BackendApi(BackendApi):
30
- working = True
31
- ssl = False
32
- url = url
33
- headers = headers
34
- image_models = ["flux", "flux-pro"]
35
- models = ["deepseek-r1", *g4f.Provider.OpenaiAccount.get_models(), "flux", "flux-pro"]
36
-
37
- @classmethod
38
- async def create_async_generator(
39
- cls,
40
- model: str,
41
- messages: Messages,
42
- **kwargs
43
- ) -> AsyncResult:
44
- if model in g4f.Provider.OpenaiAccount.get_models():
45
- kwargs["provider"] = "OpenaiAccount"
46
- async for chunk in super().create_async_generator(model, messages, **kwargs):
47
- yield chunk
48
 
49
  # Request model
50
  class Message(BaseModel):
 
 
 
1
  from fastapi import FastAPI, HTTPException, Depends, Header, Request
2
  from fastapi.responses import StreamingResponse
3
  from pydantic import BaseModel
4
  from typing import List
5
  from g4f import ChatCompletion
 
 
6
 
7
  app = FastAPI()
8
 
 
13
  "claude-3.7-sonnet", "o3-mini", "o1", "grok-3", "gemini-2.5-pro-exp-03-25", "claude-3.5",
14
  "llama-3.1-405b"
15
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
 
17
  # Request model
18
  class Message(BaseModel):