Spaces:
Running
Running
import os | |
from langchain_core.output_parsers import StrOutputParser | |
from langchain_groq import ChatGroq | |
from langchain_core.prompts import PromptTemplate | |
from langchain_google_genai import ChatGoogleGenerativeAI | |
from typing import List | |
from typing_extensions import TypedDict | |
from typing import Annotated | |
from langgraph.graph.message import AnyMessage, add_messages | |
from langchain_core.messages import HumanMessage, AIMessage | |
from langgraph.graph import END, StateGraph, START | |
from langgraph.checkpoint.memory import MemorySaver | |
from langchain_community.document_loaders import DirectoryLoader | |
from langchain_text_splitters import CharacterTextSplitter | |
from fastapi import FastAPI | |
from fastapi.middleware.cors import CORSMiddleware | |
from pydantic import BaseModel | |
import datetime | |
app = FastAPI() | |
app.add_middleware( | |
CORSMiddleware, | |
allow_origins=["*"], | |
allow_credentials=True, | |
allow_methods=["*"], | |
allow_headers=["*"], | |
) | |
class Request(BaseModel): | |
query : str | |
id : str | |
llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0.5) | |
memory = MemorySaver() | |
glob_pattern="./*.md" | |
directory_path = "./documents" | |
loader = DirectoryLoader(directory_path, glob=glob_pattern) | |
cv = loader.load() | |
prompt = PromptTemplate.from_template(""" | |
Tu dois uniquement répondre aux questions posées à propos de Ninon Roche. | |
Pour t'aider dans tes réponses, voici un texte qui comporte des informations sur Ninon Roche : | |
{document} | |
Voici l'historique de la conversation : | |
{historical} | |
Et enfin, la question posée par l'utilisateur : | |
{question} | |
Voici la date du jour : {date} | |
""") | |
chain = prompt | llm | StrOutputParser() | |
def format_historical(hist): | |
historical = [] | |
for i in range(0,len(hist)-2,2): | |
historical.append("Utilisateur : "+hist[i].content) | |
historical.append("Assistant : "+hist[i+1].content) | |
return "\n".join(historical[-10:]) | |
class GraphState(TypedDict): | |
messages: Annotated[list[AnyMessage], add_messages] | |
def chatbot(state : GraphState): | |
response = chain.invoke({'document': cv, 'historical': format_historical(state['messages']), 'question' : state['messages'][-1].content, 'date': datetime.date.today()}) | |
return {"messages": [AIMessage(content=response)]} | |
workflow = StateGraph(GraphState) | |
workflow.add_node('chatbot', chatbot) | |
workflow.add_edge(START,'chatbot') | |
workflow.add_edge('chatbot', END) | |
app_chatbot = workflow.compile(checkpointer=memory) | |
def request(req: Request): | |
config = {"configurable": {"thread_id": req.id}} | |
rep = app_chatbot.invoke({"messages": [HumanMessage(content=req.query)]},config, stream_mode="values") | |
return {"response":rep['messages'][-1].content} | |