Spaces:
Runtime error
Runtime error
import torch | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
from peft import PeftModel | |
import gradio as gr | |
import os | |
import zipfile | |
if os.path.exists("dorna-diabetes-finetuned-20250514T183411Z-1-001.zip") and not os.path.exists("dorna-diabetes-finetuned.zip"): | |
os.rename("dorna-diabetes-finetuned-20250514T183411Z-1-001.zip", "dorna-diabetes-finetuned.zip") | |
print("✅ اسم فایل تغییر کرد.") | |
if not os.path.exists("dorna-diabetes-finetuned"): | |
with zipfile.ZipFile("dorna-diabetes-finetuned.zip", "r") as zip_ref: | |
zip_ref.extractall(".") | |
print("✅ فایل ZIP اکسترکت شد.") | |
BASE_MODEL = "PartAI/Dorna-Llama3-8B-Instruct" | |
LORA_PATH = "./dorna-diabetes-finetuned" # این پوشه رو آپلود میکنی توی اسپیس | |
tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL) | |
base_model = AutoModelForCausalLM.from_pretrained( | |
BASE_MODEL, | |
device_map="auto", | |
torch_dtype=torch.float16, | |
) | |
model = PeftModel.from_pretrained(base_model, LORA_PATH) | |
def generate_response(prompt): | |
input_ids = tokenizer(prompt, return_tensors="pt").input_ids.to(model.device) | |
with torch.no_grad(): | |
output = model.generate( | |
input_ids=input_ids, | |
max_new_tokens=200, | |
do_sample=True, | |
temperature=0.7, | |
top_p=0.9, | |
) | |
return tokenizer.decode(output[0], skip_special_tokens=True) | |
gr.Interface(fn=generate_response, inputs="text", outputs="text", title="Dorna-Llama3 LoRA").launch() | |