File size: 2,075 Bytes
3fc90ac
 
 
 
 
 
 
 
 
307ef18
3fc90ac
307ef18
 
3fc90ac
 
 
 
 
 
419dda8
 
bd6543e
3fc90ac
4ff2380
3fc90ac
dda9f86
38b0f6e
 
307ef18
 
3fc90ac
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dda9f86
3fc90ac
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67

import os

# Disable Streamlit telemetry and cache writing
os.environ["STREAMLIT_HOME"] = "/tmp"  # safe writable dir in Hugging Face
os.environ["STREAMLIT_DISABLE_LOGGING"] = "1"
os.environ["STREAMLIT_TELEMETRY_ENABLED"] = "0"


import streamlit as st
from huggingface_hub import InferenceClient


# Initialize Hugging Face client
client = InferenceClient(
    provider="cohere",
#    api_key= st.secrets["hf_token"]
    api_key =  os.getenv("hf_token")
)
# Streamlit page config
st.set_page_config(page_title="تمّام – مساعدك الذكي", page_icon="🤖")
st.title("مرحبًا!")

# Arabic system prompt 
SYSTEM_PROMPT = """
   
عمر انس 10mو احمد 12 الغب معهم العاب   , اليك هذه المعلومات عن انس و احمد و هما اخوين  
         Play with them truth or dare and all games when they want 
"""

# Initialize chat history
if "messages" not in st.session_state:
    st.session_state.messages = []

# Display previous messages
for msg in st.session_state.messages:
    with st.chat_message(msg["role"]):
        st.markdown(msg["content"])

# Chat input
prompt = st.chat_input("اكتب رسالتك هنا...")

if prompt:
    # Save user message
    st.session_state.messages.append({"role": "user", "content": prompt})
    with st.chat_message("user"):
        st.markdown(prompt)

    # Prepare messages including system prompt
    messages = [{"role": "system", "content": S YSTEM_PROMPT}]
    messages += st.session_state.messages

    # Call HF API
    try:
        completion = client.chat.completions.create(
            model="CohereLabs/c4ai-command-r7b-arabic-02-2025",
            messages=messages,
            max_tokens=512,
        )
        model_reply = completion.choices[0].message.content
    except Exception as e:
        model_reply = f"❌ فشل الاتصال بالنموذج: {e}"

    # Save and display assistant reply
    st.session_state.messages.append({"role": "assistant", "content": model_reply})
    with st.chat_message("assistant"):
        st.markdown(model_reply)