Ronith55's picture
Update app.py
fd0e912 verified
raw
history blame
909 Bytes
import gradio as gr
from transformers import pipeline
# Load the model pipeline
pipe = pipeline("image-text-to-text", model="deepseek-ai/deepseek-vl2-small", trust_remote_code=True)
def chatbot(user_input):
"""
Function to process user input using the DeepSeek-VL2-Small model.
"""
messages = [{"role": "user", "content": user_input}]
response = pipe(messages)
if isinstance(response, list) and len(response) > 0:
return response[0]["generated_text"] # Extract response text
else:
return "No response received."
# Create a Gradio interface
demo = gr.Interface(
fn=chatbot,
inputs=gr.Textbox(lines=2, placeholder="Ask something..."),
outputs="text",
title="DeepSeek-VL2 Chatbot",
description="Ask questions and get AI-generated responses using DeepSeek-VL2-Small."
)
# Launch the Gradio app
if __name__ == "__main__":
demo.launch()