Update app.py
Browse files
app.py
CHANGED
@@ -10,6 +10,7 @@ from models.fetch import FetchModel
|
|
10 |
from auth.key import NimbusAuthKey
|
11 |
from tools.googlesearch.main import search
|
12 |
from tools.fetch import Tools
|
|
|
13 |
|
14 |
app = FastAPI()
|
15 |
|
@@ -200,3 +201,59 @@ async def searchtool(request: Request):
|
|
200 |
response = search(term=query, num_results=num_results, advanced=True, unique=False)
|
201 |
|
202 |
return response
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
from auth.key import NimbusAuthKey
|
11 |
from tools.googlesearch.main import search
|
12 |
from tools.fetch import Tools
|
13 |
+
import httpx
|
14 |
|
15 |
app = FastAPI()
|
16 |
|
|
|
201 |
response = search(term=query, num_results=num_results, advanced=True, unique=False)
|
202 |
|
203 |
return response
|
204 |
+
|
205 |
+
OPENROUTER_HEADERS = {
|
206 |
+
'accept': 'application/json',
|
207 |
+
'accept-language': 'en-US,en;q=0.9,ja;q=0.8',
|
208 |
+
'authorization': 'Bearer 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',
|
209 |
+
'content-type': 'application/json',
|
210 |
+
'http-referer': 'https://lomni.io',
|
211 |
+
'origin': 'https://lomni.io',
|
212 |
+
'priority': 'u=1, i',
|
213 |
+
'referer': 'https://lomni.io/',
|
214 |
+
'sec-ch-ua': '"Google Chrome";v="137", "Chromium";v="137", "Not/A)Brand";v="24"',
|
215 |
+
'sec-ch-ua-mobile': '?0',
|
216 |
+
'sec-ch-ua-platform': '"macOS"',
|
217 |
+
'sec-fetch-dest': 'empty',
|
218 |
+
'sec-fetch-mode': 'cors',
|
219 |
+
'sec-fetch-site': 'cross-site',
|
220 |
+
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36',
|
221 |
+
'x-stainless-arch': 'unknown',
|
222 |
+
'x-stainless-lang': 'js',
|
223 |
+
'x-stainless-os': 'Unknown',
|
224 |
+
'x-stainless-package-version': '4.86.1',
|
225 |
+
'x-stainless-retry-count': '0',
|
226 |
+
'x-stainless-runtime': 'browser:chrome',
|
227 |
+
'x-stainless-runtime-version': '137.0.0',
|
228 |
+
'x-stainless-timeout': '600000',
|
229 |
+
'x-title': 'lomni',
|
230 |
+
}
|
231 |
+
|
232 |
+
@app.post('/api/stream')
|
233 |
+
async def streamres(request: Request):
|
234 |
+
body = await request.json()
|
235 |
+
messages = body.get('messages', [])
|
236 |
+
model = body.get('model', 'anthropic/claude-sonnet-4') # fallback default
|
237 |
+
|
238 |
+
data = {
|
239 |
+
'model': model,
|
240 |
+
'messages': messages,
|
241 |
+
'max_tokens': 1000000,
|
242 |
+
'stream': True
|
243 |
+
}
|
244 |
+
|
245 |
+
async def proxy_stream():
|
246 |
+
async with httpx.AsyncClient(timeout=None) as client:
|
247 |
+
async with client.stream(
|
248 |
+
"POST",
|
249 |
+
"https://openrouter.ai/api/v1/chat/completions",
|
250 |
+
headers=OPENROUTER_HEADERS,
|
251 |
+
json=data,
|
252 |
+
) as response:
|
253 |
+
async for line in response.aiter_lines():
|
254 |
+
if line:
|
255 |
+
yield f"{line}\n"
|
256 |
+
|
257 |
+
return StreamingResponse(proxy_stream(), media_type='text/event-stream')
|
258 |
+
|
259 |
+
|