File size: 1,750 Bytes
a2561e3
71fdc5c
a2561e3
71fdc5c
a2561e3
7049d4a
a2561e3
 
71fdc5c
a2561e3
 
 
 
 
71fdc5c
a2561e3
 
 
 
 
71fdc5c
a2561e3
 
 
 
 
71fdc5c
a2561e3
 
71fdc5c
a2561e3
71fdc5c
a2561e3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71fdc5c
a2561e3
 
 
 
71fdc5c
 
 
c2fbb38
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import os
import gradio as gr
from openai import OpenAI

# Load API key securely from Hugging Face secrets or environment
api_key = os.getenv("NV_API_KEY")
if not api_key:
    raise ValueError("Please set the NV_API_KEY environment variable in your Hugging Face Space.")

# NVIDIA-compatible OpenAI client
client = OpenAI(
    base_url="https://integrate.api.nvidia.com/v1",
    api_key=api_key
)

# System message
system_prompt = {
    "role": "system",
    "content": "You are a helpful assistant to answer user queries."
}

# Main chat function with memory from Gradio
def get_text_response(user_message, history):
    # Convert Gradio history to OpenAI format
    formatted_history = [{"role": "user" if i % 2 == 0 else "assistant", "content": msg}
                         for i, msg in enumerate(sum(history, []))]

    # Combine system prompt, history, and current user input
    messages = [system_prompt] + formatted_history + [{"role": "user", "content": user_message}]

    # Stream the response
    response = ""
    completion = client.chat.completions.create(
        model="nvidia/llama-3.1-nemotron-70b-instruct",
        messages=messages,
        temperature=0.5,
        top_p=1,
        max_tokens=100,
        stream=True
    )

    for chunk in completion:
        delta = chunk.choices[0].delta
        if delta and delta.content:
            response += delta.content

    return response

# Gradio Chat Interface
demo = gr.ChatInterface(
    fn=get_text_response,
    theme="soft",
    textbox=gr.Textbox(placeholder="Ask me anything...", container=False),
    examples=["How are you doing?", "What are your interests?", "Which places do you like to visit?"]
)

if __name__ == "__main__":
    demo.queue().launch(share=True)