Spaces:
Running
Running
File size: 1,013 Bytes
8c4eb8a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
# app.py
import os
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
from fastapi import FastAPI
from pydantic import BaseModel
hf_token = os.getenv("HF_TOKEN")
model_path = "./capybara-finetuned" # локальная модель или путь на Hugging Face
tokenizer = AutoTokenizer.from_pretrained(
model_path,
token=hf_token,
trust_remote_code=True,
use_fast=False,
)
model = AutoModelForCausalLM.from_pretrained(
model_path,
token=hf_token,
device_map="auto",
torch_dtype="auto",
trust_remote_code=True
)
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
app = FastAPI()
class Input(BaseModel):
text: str
@app.post("/classify")
async def classify(input: Input):
prompt = f"### Вопрос:\n{input.text}\n\n### Класс:"
output = pipe(prompt, max_new_tokens=10, do_sample=False)[0]["generated_text"]
label = output.split("### Класс:")[-1].strip().split()[0].lower()
return {"label": label} |