File size: 1,228 Bytes
1969750
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b5ce5c1
1969750
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
from langchain.prompts.prompt import PromptTemplate
from langchain.llms import OpenAI, OpenAIChat
from langchain.chains import ChatVectorDBChain, ConversationalRetrievalChain
from langchain.chat_models import ChatOpenAI

_template = """Given the following conversation and a follow up question, rephrase the follow up question to be a
standalone question.


Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:"""
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)

template = """You are a personal assistance for twimbit company for answering questions. You are given the following 
extracted parts of a long document and a question. Provide a brief answer. If you don't know the answer, just say " 
I'm not sure." Question: {question} ========= {context} ========= Answer in Markdown: """
QA_PROMPT = PromptTemplate(template=template, input_variables=["question", "context"])


def get_chain(vectorstore):
    llm = ChatOpenAI(temperature=0)
    qa_chain = ConversationalRetrievalChain.from_llm(
        llm,
        vectorstore.as_retriever(search_kwargs={"k": 10})
        # qa_prompt=QA_PROMPT,
        # condense_question_prompt=CONDENSE_QUESTION_PROMPT,
    )
    return qa_chain