|
import openai |
|
import os |
|
import streamlit as st |
|
import pandas as pd |
|
from streamlit_chat import message as st_message |
|
|
|
|
|
def load_data(path): |
|
return pd.read_csv(path) |
|
|
|
|
|
openai.api_key = os.getenv("OPENAI_API_KEY") |
|
|
|
|
|
|
|
uploaded_file = st.sidebar.file_uploader("Choose a CSV file", type="csv") |
|
if uploaded_file is not None: |
|
st.session_state["df"] = pd.read_csv(uploaded_file) |
|
|
|
|
|
def ask_openai(prompt): |
|
try: |
|
response = openai.ChatCompletion.create( |
|
model="gpt-3.5-turbo", |
|
messages=[{"role": "system", "content": "You are a helpful assistant."}, |
|
{"role": "user", "content": prompt}], |
|
) |
|
return response.choices[0].message["content"] |
|
except Exception as e: |
|
print(f"Error in generating response: {e}") |
|
return "Sorry, I couldn't generate a response. Please try again." |
|
|
|
|
|
if "chat_history" not in st.session_state: |
|
st.session_state["chat_history"] = [] |
|
|
|
if prompt := st.text_input("Ask me anything about the data:"): |
|
st.session_state["chat_history"].append({"role": "user", "content": prompt}) |
|
response = ask_openai(prompt) |
|
st.session_state["chat_history"].append({"role": "assistant", "content": response}) |
|
|
|
for chat in st.session_state["chat_history"]: |
|
st_message(chat["content"], is_user=True if chat["role"] == "user" else False) |
|
|