Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import pipeline | |
# This is the key line: | |
# It loads your private model from your *other* repository. | |
pipe = pipeline( | |
"text-classification", | |
model="kxshrx/infrnce-bert-classifier" | |
) | |
def classify_log(log_text): | |
# This function runs the classification. | |
results = pipe(log_text, top_k=None) | |
# We format the result into a simple dictionary. | |
return {item['label']: item['score'] for item in results[0]} | |
# This creates a simple web UI for testing and, more importantly, | |
# an API endpoint that we can call. | |
gr.Interface( | |
fn=classify_log, | |
inputs=gr.Textbox(lines=5, label="Log Entry"), | |
outputs=gr.Label(num_top_classes=6), | |
title="Infrnce Private Log Classifier API" | |
).launch() | |