Spaces:
Sleeping
Sleeping
File size: 1,686 Bytes
758426c e6113d8 758426c e6933ea 758426c e6113d8 bfdb32d 758426c 776592e e247783 758426c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
import gradio as gr
from huggingface_hub import InferenceClient
client = InferenceClient(
"meta-llama/Meta-Llama-3-8B-Instruct",
)
def chat_mem(message,chat_history):
print(len(chat_history))
chat_history_role = [{"role": "system", "content": "You are a medical psychologist called LASO AI that revolutionizes with the warmth of traditional psychology. Use cognitive behavioral therapy. Be mindful, and concise to your responses" },]
if chat_history != []:
for i in range(len(chat_history)):
chat_history_role.append({"role": "user", "content": chat_history[i][0]})
chat_history_role.append({"role": "assistant", "content": chat_history[i][1]})
chat_history_role.append({"role": "user", "content": message})
chat_completion = client.chat_completion(
messages=chat_history_role,
max_tokens=500,
#stream=True
)
chat_history_role.append({"role": "assistant", "content": chat_completion.choices[0].message.content})
print(chat_history_role)
modified = map(lambda x: x["content"], chat_history_role)
a = list(modified)
chat_history=[(a[i*2+1], a[i*2+2]) for i in range(len(a)//2)]
return "", chat_history
with gr.Blocks() as demo:
with gr.Row():
with gr.Column():
chatbot = gr.Chatbot(label='Laso AI Psychologist')
msg = gr.Textbox(interactive=True, label='Input')
with gr.Row():
clear = gr.ClearButton([msg, chatbot], icon="🧹")
send_btn = gr.Button("Send 📨", variant='primary')
msg.submit(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
send_btn.click(fn=chat_mem, inputs=[msg, chatbot], outputs=[msg, chatbot])
if __name__ == "__main__":
demo.launch() |