TextTutor / app.py
sushku's picture
Update app.py
fdfb7a5 verified
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from fastapi.middleware.cors import CORSMiddleware
import uvicorn
from langchain_google_genai import ChatGoogleGenerativeAI
import os
from dotenv import load_dotenv
load_dotenv()
app = FastAPI()
# Allow requests from your front-end's origin.
app.add_middleware(
CORSMiddleware,
allow_origins=["http://localhost:3000", "chrome-extension://*"], # Allow specific origins
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# It's recommended to load secrets from environment variables
GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
# Define the request model that expects a JSON body with "text"
class MeaningRequest(BaseModel):
text: str
# Define the response model that will return the meaning
class MeaningResponse(BaseModel):
meaning: str
def get_meaning_from_llm(text: str) -> str:
"""
Get meaning of text using Google's Generative AI.
"""
# Create a prompt for your LLM
prompt = f"Explain the meaning of the following text in simple terms in only one or two lines not more than that: '{text}'"
# Make sure this URL is accessible and valid
llm = ChatGoogleGenerativeAI(
model="gemini-1.5-flash",
temperature=0.1,
max_tokens=None,
timeout=None,
max_retries=2,
google_api_key=GOOGLE_API_KEY
)
response = llm.invoke(prompt)
return response.content
@app.post("/get_meaning", response_model=MeaningResponse)
async def get_meaning(request: MeaningRequest):
"""
Endpoint to return meaning.
"""
try:
print(f"Received text: {request.text}")
# Extract text from the request
text = request.text
# Generate meaning using the LLM call
meaning = get_meaning_from_llm(text)
return MeaningResponse(
meaning=meaning
)
except Exception as e:
print(f"An error occurred: {e}")
raise HTTPException(status_code=500, detail=str(e))
if __name__ == "__main__":
# Run the FastAPI app with Uvicorn
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)