llm_4 / app.py
lorentz's picture
Update app.py
31f48af verified
raw
history blame
2.64 kB
import streamlit as st
from streamlit_chat import message # Ensure you have streamlit_chat installed
from langchain_openai import ChatOpenAI
from langchain.chains import ConversationChain
from langchain.chains.conversation.memory import ConversationSummaryMemory
# Initialize session state variables if they don't exist
if 'conversation' not in st.session_state:
st.session_state['conversation'] = None
if 'messages' not in st.session_state:
st.session_state['messages'] = []
if 'API_Key' not in st.session_state:
st.session_state['API_Key'] = ''
# Set page configuration
st.set_page_config(page_title="ChatMate: Your AI Conversation Partner", page_icon=":robot_face:")
st.markdown("<h1 style='text-align: center; color: navy;'>ChatMate</h1>", unsafe_allow_html=True)
st.markdown("<h4 style='text-align: center;'>Engage with a cutting-edge language model.</h4>", unsafe_allow_html=True)
# Sidebar for API Key Input
st.sidebar.title("API Key πŸ”‘")
st.session_state['API_Key'] = st.sidebar.text_input(
"Enter your OpenAI API Key:",
type="password",
help="Your API Key is safe with us and only used to power this conversation."
)
# Function to get a response from the model
def get_response(user_input, api_key):
if st.session_state['conversation'] is None:
llm = ChatOpenAI(
temperature=0,
openai_api_key=api_key,
model_name='gpt-3.5-turbo'
)
st.session_state['conversation'] = ConversationChain(
llm=llm,
verbose=True,
memory=ConversationSummaryMemory(llm=llm)
)
response = st.session_state['conversation'].predict(input=user_input)
return response
# Main container for user input and displaying the conversation
response_container = st.container()
container = st.container()
with container:
with st.form(key='my_form', clear_on_submit=True):
user_input = st.text_area("Ask me anything.", height=100)
submit_button = st.form_submit_button(label='Send')
if submit_button:
st.session_state['messages'].append(user_input)
model_response = get_response(user_input, st.session_state['API_Key'])
st.session_state['messages'].append(model_response)
with response_container:
for i, msg in enumerate(st.session_state['messages']):
# Alternate messages between user and AI
is_user = (i % 2) == 0
message(msg, is_user=is_user, key=f"msg_{i}")
# Optionally, you can add a feature to clear the conversation or summarize it using sidebar options