Deepseek-Model / app.py
Balaramkm's picture
Update app.py
1ddb3c9 verified
raw
history blame contribute delete
749 Bytes
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
model_id = "deepseek-ai/deepseek-coder-6.7b-base"
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(
model_id,
torch_dtype=torch.float16,
device_map="auto",
trust_remote_code=True
)
def chat(prompt):
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
outputs = model.generate(**inputs, max_new_tokens=256, do_sample=True)
return tokenizer.decode(outputs[0], skip_special_tokens=True)
gr.Interface(
fn=chat,
inputs="text",
outputs="text",
title="DeepSeek Coder 1.3B",
description="Free coding assistant running on Hugging Face CPU"
).launch()