import openai import os import streamlit as st import pandas as pd from streamlit_chat import message as st_message # Ensure streamlit_chat is installed # Load data function remains the same def load_data(path): return pd.read_csv(path) # Assuming you've set your OpenAI API key in the environment variables openai.api_key = os.getenv("OPENAI_API_KEY") # File uploader and data loading logic can remain the same uploaded_file = st.sidebar.file_uploader("Choose a CSV file", type="csv") if uploaded_file is not None: st.session_state["df"] = pd.read_csv(uploaded_file) # Function to generate a response from OpenAI's chat model def ask_openai(prompt): try: response = openai.ChatCompletion.create( model="gpt-3.5-turbo", # Adjust model as needed messages=[{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": prompt}], ) return response.choices[0].message["content"] except Exception as e: st.error(f"Error in generating response: {e}") return "I encountered an error. Please try again." # Input for new messages user_input = st.text_input("Ask me anything:", key="chat_input") # Process input on submission if user_input: with st.chat_message("user"): st.write(user_input) # Generate and display response ai_response = ask_openai(user_input) with st.chat_message("assistant"): st.write(ai_response)