from bertopic import BERTopic import streamlit as st import streamlit.components.v1 as components from datasets import load_dataset import pandas as pd from sentence_transformers import SentenceTransformer from umap import UMAP from hdbscan import HDBSCAN from sklearn.feature_extraction.text import CountVectorizer st.set_page_config(page_title='eRupt Topic Trendy (e-Commerce x Social Media)', page_icon=None, layout='centered', initial_sidebar_state='auto') st.markdown("

Topic Trendy

", unsafe_allow_html=True) #BerTopic_model = BERTopic.load("my_topics_model") #sentence_model = SentenceTransformer("all-MiniLM-L6-v2") #umap_model = UMAP(n_neighbors=15, n_components=2, min_dist=0.1, metric="cosine") #hdbscan_model = HDBSCAN(min_cluster_size=5, min_samples = 3, metric="euclidean", prediction_data=True) #vectorizer_model = CountVectorizer(lowercase = True, ngram_range=(1, 3), analyzer="word", max_df=1.0, min_df=0.5, stop_words="english") #kw_model = BERTopic(embedding_model=sentence_model, umap_model = umap_model, hdbscan_model = hdbscan_model, vectorizer_model = vectorizer_model, nr_topics = "auto", calculate_probabilities = True) #BerTopic_model = kw_model input_text = st.text_area("Enter product topic here") topic = pd.read_csv('./Data/tiktok_utf8.csv') timestamps = topic.date.to_list() tiktok = topic.text.to_list() vectorizer_model = CountVectorizer(stop_words="english") topic_model = BERTopic(verbose=True,vectorizer_model=vectorizer_model) #topics, probs = topic_model.fit_transform(tiktok) similar_topics, similarity = topic_model.find_topics(input_text, top_n=20) most_similar = similar_topics[0] print(similar_topics[0]) print("Most Similar Topic Info: \n{}".format(topic_model.get_topic(most_similar))) print("Similarity Score: {}".format(similarity[0])) answer_as_string = topic_model.get_topic(most_similar) st.text_area("Most Similar Topic List is Here",answer_as_string,key="topic_list") st.image('https://freepngimg.com/download/keyboard/6-2-keyboard-png-file.png',use_column_width=True) st.markdown("
Created By LiHE
", unsafe_allow_html=True)