MLDeveloper commited on
Commit
ff8aeca
·
verified ·
1 Parent(s): b1cc69a

Upload 4 files

Browse files
gemini-pro-streamlit-chatbot-main/.env ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ GOOGLE_API_KEY=AIzaSyB8XqUjANeAvV1Z8O24wKzD90gly3jZmTA
2
+
3
+
4
+
gemini-pro-streamlit-chatbot-main/README.md ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ # gemini-pro-streamlit-chatbot
2
+ This repository is about building a chatbot using Google's Gemini-Pro with streamlit.
gemini-pro-streamlit-chatbot-main/app.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ from dotenv import load_dotenv
4
+ import google.generativeai as gen_ai
5
+
6
+ # Load environment variables
7
+ load_dotenv()
8
+
9
+ # Configure Streamlit page settings
10
+ st.set_page_config(
11
+ page_title="ML Galaxy!",
12
+ page_icon=":brain:", # Favicon emoji
13
+ layout="centered", # Page layout option
14
+ )
15
+
16
+ # Retrieve the Google API key from the environment
17
+ GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
18
+
19
+ # Check if the API key is loaded
20
+ if not GOOGLE_API_KEY:
21
+ st.error("API key not found! Please set the GOOGLE_API_KEY in your .env file.")
22
+ st.stop()
23
+
24
+ # Configure the Generative AI model
25
+ try:
26
+ gen_ai.configure(api_key=GOOGLE_API_KEY)
27
+ model = gen_ai.GenerativeModel("gemini-pro")
28
+ except Exception as e:
29
+ st.error(f"Error initializing the Gemini-Pro model: {e}")
30
+ st.stop()
31
+
32
+ # Function to translate roles between Gemini-Pro and Streamlit terminology
33
+ def translate_role_for_streamlit(user_role):
34
+ return "assistant" if user_role == "model" else user_role
35
+
36
+ # Initialize the chat session if not already present in session state
37
+ if "chat_session" not in st.session_state:
38
+ try:
39
+ st.session_state.chat_session = model.start_chat(history=[])
40
+ except Exception as e:
41
+ st.error(f"Error initializing chat session: {e}")
42
+ st.stop()
43
+
44
+ # Display the chatbot's title
45
+ st.title("🤖 ML Galaxy")
46
+
47
+ # Display the chat history
48
+ try:
49
+ for message in st.session_state.chat_session.history:
50
+ with st.chat_message(translate_role_for_streamlit(message.role)):
51
+ st.markdown(message.parts[0].text)
52
+ except Exception as e:
53
+ st.error(f"Error displaying chat history: {e}")
54
+
55
+ # Input field for user's message
56
+ user_prompt = st.chat_input("Ask Gemini-Pro...")
57
+ if user_prompt:
58
+ # Add the user's message to the chat and display it
59
+ st.chat_message("user").markdown(user_prompt)
60
+
61
+ # Send the user's message to Gemini-Pro and get the response
62
+ try:
63
+ gemini_response = st.session_state.chat_session.send_message(user_prompt)
64
+ # Display Gemini-Pro's response
65
+ with st.chat_message("assistant"):
66
+ st.markdown(gemini_response.text)
67
+ except Exception as e:
68
+ st.error(f"Error processing your message: {e}")
gemini-pro-streamlit-chatbot-main/requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ python-dotenv==1.0.1
2
+ google-generativeai==0.3.2
3
+ streamlit==1.30.0
4
+ deep-translator
5
+ deep-translator
6
+ streamlit
7
+