twimbit / query_data.py
twimbit-ai's picture
Update query_data.py
ebb95ee
raw
history blame
1.5 kB
from langchain.prompts.prompt import PromptTemplate
from langchain.llms import OpenAI, OpenAIChat
from langchain.chains import ChatVectorDBChain, ConversationalRetrievalChain
from langchain.chat_models import ChatOpenAI
_template = """Given the following conversation and a follow up question, rephrase the follow up question to be a
standalone question.
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:"""
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
template = """Give an answer to the question based on the context below, and if you don't find the answer in
the context, then simply say "Sorry no information is present in twimbit's data", But do give an answer that fits
right, the answer can be outside of this context and at the end of the answer, do mention that this answer is not
from twimbit's data and also include sources like article links etc. And also don't use "According to the information
provided in the given links, " while giving an answer.
{context}
Question: {question}
Helpful Answer:"""
QA_PROMPT = PromptTemplate(template=template, input_variables=["question", "context"])
def get_chain(vectorstore):
llm = ChatOpenAI(temperature=0)
qa_chain = ConversationalRetrievalChain.from_llm(
llm,
vectorstore.as_retriever(search_kwargs={"k": 8, "include_metadata": True})
# qa_prompt=QA_PROMPT,
# condense_question_prompt=CONDENSE_QUESTION_PROMPT,
)
return qa_chain