abhivsh commited on
Commit
2d13d0a
·
verified ·
1 Parent(s): 4f64706

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -41,7 +41,7 @@ fs_token = os.environ.get('fs_token')
41
 
42
  llm_name = "gpt-3.5-turbo-0301"
43
 
44
- vectordb, client = initialize.initialize()
45
 
46
  chat_history = []
47
 
@@ -64,14 +64,14 @@ def chat_query_doc(question, history):
64
  If don't get the answer, feel free to reply from your own knowledge."""
65
 
66
 
67
- query = f"""If the Question is related to Electrical Domain, Provide a detailed, accurate and point-wise reply to the query: {question} based on provided context only. \
68
- Ensure that your reply addresses each aspect of the query thoroughly. """
69
 
70
 
71
 
72
  #llm = ChatOpenAI(model = llm_name, temperature = 0.1, api_key = OPENAI_API_KEY)
73
  #llm = GoogleGenerativeAI(model = "gemini-pro", google_api_key = GEMINI_API_KEY)
74
- llm = ChatGoogleGenerativeAI(model = "gemini-1.0-pro", google_api_key = GEMINI_API_KEY, temperature = 0, client_options = client)
75
 
76
  # Conversation Retrival Chain with Memory
77
  #memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
 
41
 
42
  llm_name = "gpt-3.5-turbo-0301"
43
 
44
+ vectordb = initialize.initialize()
45
 
46
  chat_history = []
47
 
 
64
  If don't get the answer, feel free to reply from your own knowledge."""
65
 
66
 
67
+ query = f"""You'll be asked with a User Query. If the Query is related to Electrical Domain, Provide a precise and point-wise reply to the query: {question} \
68
+ based on provided context only. Ensure that your reply addresses each aspect of the query thoroughly. """
69
 
70
 
71
 
72
  #llm = ChatOpenAI(model = llm_name, temperature = 0.1, api_key = OPENAI_API_KEY)
73
  #llm = GoogleGenerativeAI(model = "gemini-pro", google_api_key = GEMINI_API_KEY)
74
+ llm = ChatGoogleGenerativeAI(model = "gemini-1.0-pro", google_api_key = GEMINI_API_KEY, temperature = 0)
75
 
76
  # Conversation Retrival Chain with Memory
77
  #memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)