import streamlit as st from transformers import pipeline import streamlit as st from transformers import pipeline import os # Disable parallelism to avoid warnings os.environ["TOKENIZERS_PARALLELISM"] = "false" # Initialize the game def init_game(): if 'questions_asked' not in st.session_state: st.session_state.questions_asked = 0 st.session_state.answers = [] st.session_state.game_over = False st.session_state.current_question = "Is it a living thing?" # Load a smaller model with caching @st.cache_resource(show_spinner="Loading game engine...") def load_model(): try: return pipeline( "text-generation", model="distilgpt2", # Smaller than GPT-2 device=-1, # Force CPU framework="pt", # Explicitly use PyTorch torch_dtype="auto" ) except Exception as e: st.error(f"Failed to load model: {str(e)}") return None def generate_question(model, previous_answers): if not previous_answers: return "Is it a living thing?" prompt = """We're playing a guessing game. Here are the previous Q&A: """ for i, (q, a) in enumerate(previous_answers, 1): prompt += f"{i}. Q: {q} A: {'Yes' if a else 'No'}\n" prompt += "\nWhat should be the next yes/no question to narrow down the possible options?\nQ:" try: response = model( prompt, max_new_tokens=30, # Shorter response do_sample=True, temperature=0.7, top_p=0.9 ) question = response[0]['generated_text'].split("Q:")[-1].strip() question = question.split("\n")[0].split("?")[0] + "?" if "?" not in question else question.split("?")[0] + "?" return question except Exception as e: st.error(f"Error generating question: {str(e)}") return "Is it something you can hold in your hand?" def main(): st.title("KASOTI - The Guessing Game") # Initialize with loading state with st.spinner("Setting up the game..."): model = load_model() if model is None: st.error("Failed to initialize the game. Please refresh the page.") return init_game() st.header("Think of a famous person, place, or object") st.write(f"Questions asked: {st.session_state.questions_asked}/20") if not st.session_state.game_over: st.subheader(st.session_state.current_question) col1, col2, col3 = st.columns(3) with col1: if st.button("Yes"): st.session_state.answers.append((st.session_state.current_question, True)) st.session_state.questions_asked += 1 st.session_state.current_question = generate_question(model, st.session_state.answers) st.rerun() with col2: if st.button("No"): st.session_state.answers.append((st.session_state.current_question, False)) st.session_state.questions_asked += 1 st.session_state.current_question = generate_question(model, st.session_state.answers) st.rerun() with col3: if st.button("I don't know"): st.session_state.answers.append((st.session_state.current_question, None)) st.session_state.questions_asked += 1 st.session_state.current_question = generate_question(model, st.session_state.answers) st.rerun() if st.session_state.questions_asked >= 20: st.session_state.game_over = True if st.session_state.game_over: st.subheader("Game Over!") st.write("I've run out of questions. What were you thinking of?") user_input = st.text_input("Enter what you were thinking of:") if user_input: st.write(f"Ah! I was thinking of {user_input}. Let's play again!") st.session_state.clear() st.rerun() if st.button("Play Again"): st.session_state.clear() st.rerun() if __name__ == "__main__": main() # Initialize the game def init_game(): if 'questions_asked' not in st.session_state: st.session_state.questions_asked = 0 st.session_state.answers = [] st.session_state.game_over = False st.session_state.current_question = "Is it a living thing?" # Load the LLM model @st.cache_resource def load_model(): return pipeline("text-generation", model="gpt2") def generate_question(model, previous_answers): if not previous_answers: return "Is it a living thing?" # Create a prompt for the LLM based on previous answers prompt = "We're playing a guessing game. Here are the previous Q&A:\n" for i, (q, a) in enumerate(previous_answers, 1): prompt += f"{i}. Q: {q} A: {'Yes' if a else 'No'}\n" prompt += "What should be the next yes/no question to narrow down the possible options?\nQ:" # Generate the next question response = model(prompt, max_length=100, num_return_sequences=1) question = response[0]['generated_text'].split("Q:")[-1].strip() # Clean up the question (remove anything after newlines or multiple questions) question = question.split("\n")[0].split("?")[0] + "?" if "?" not in question else question.split("?")[0] + "?" return question def main(): st.title("KASOTI - The Guessing Game") # Initialize the model and game state model = load_model() init_game() # Display game header st.header("Think of a famous person, place, or object") st.write(f"Questions asked: {st.session_state.questions_asked}/20") # Display the current question if not st.session_state.game_over: st.subheader(st.session_state.current_question) # Answer buttons col1, col2, col3 = st.columns(3) with col1: if st.button("Yes"): st.session_state.answers.append((st.session_state.current_question, True)) st.session_state.questions_asked += 1 st.session_state.current_question = generate_question(model, st.session_state.answers) st.rerun() with col2: if st.button("No"): st.session_state.answers.append((st.session_state.current_question, False)) st.session_state.questions_asked += 1 st.session_state.current_question = generate_question(model, st.session_state.answers) st.rerun() with col3: if st.button("I don't know"): st.session_state.answers.append((st.session_state.current_question, None)) st.session_state.questions_asked += 1 st.session_state.current_question = generate_question(model, st.session_state.answers) st.rerun() # Check if game is over if st.session_state.questions_asked >= 20: st.session_state.game_over = True # Game over state if st.session_state.game_over: st.subheader("Game Over!") st.write("I've run out of questions. What were you thinking of?") user_input = st.text_input("Enter what you were thinking of:") if user_input: st.write(f"Ah! I was thinking of {user_input}. Let's play again!") st.session_state.clear() st.rerun() if st.button("Play Again"): st.session_state.clear() st.rerun() if __name__ == "__main__": main()