File size: 1,678 Bytes
6ca0fd8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import openai
import os
import streamlit as st
import pandas as pd
from streamlit_chat import message as st_message  # Ensure streamlit_chat is installed

# Load data function remains the same
def load_data(path):
    return pd.read_csv(path)

# Assuming you've set your OpenAI API key in the environment variables
openai.api_key = os.getenv("OPENAI_API_KEY")


# File uploader and data loading logic can remain the same
uploaded_file = st.sidebar.file_uploader("Choose a CSV file", type="csv")
if uploaded_file is not None:
    st.session_state["df"] = pd.read_csv(uploaded_file)

# Example function to generate a response from OpenAI's chat model
def ask_openai(prompt):
    try:
        response = openai.ChatCompletion.create(
            model="gpt-3.5-turbo",  # Adjust model as needed
            messages=[{"role": "system", "content": "You are a helpful assistant."}, 
                      {"role": "user", "content": prompt}],
        )
        return response.choices[0].message["content"]
    except Exception as e:
        print(f"Error in generating response: {e}")
        return "Sorry, I couldn't generate a response. Please try again."

# Example chat interaction in Streamlit
if "chat_history" not in st.session_state:
    st.session_state["chat_history"] = []

if prompt := st.text_input("Ask me anything about the data:"):
    st.session_state["chat_history"].append({"role": "user", "content": prompt})
    response = ask_openai(prompt)
    st.session_state["chat_history"].append({"role": "assistant", "content": response})

for chat in st.session_state["chat_history"]:
    st_message(chat["content"], is_user=True if chat["role"] == "user" else False)