Spaces:
Sleeping
Sleeping
import torch | |
from fastai.text.all import * | |
from huggingface_hub import hf_hub_download | |
# Step 1: Recreate the DataLoaders used during training | |
def create_dataloaders(): | |
data = [ | |
{"input": "Who is rahul7star?", "output": "Rahul7star is a software developer living in NSW."}, | |
{"input": "What is Rahul7star's profession?", "output": "Rahul7star is an AI creator and software developer."}, | |
{"input": "What does Rahul7star do?", "output": "Rahul7star works as a developer and enjoys solving complex coding problems."}, | |
{"input": "Tell me about Rahul7star", "output": "Rahul7star is a talented developer, AI creator, and a fan of traveling."}, | |
{"input": "What is Rahul7star known for?", "output": "Rahul7star is known for his work in AI, software development, and his love for coding."} | |
] | |
# Split into training and validation sets | |
train_data = data[:4] # First 4 examples for training | |
valid_data = data[4:] # Last example for validation | |
# Define the DataBlock (use the same structure as during training) | |
dblock = DataBlock( | |
blocks=(TextBlock.from_df(text_cols='input'), CategoryBlock), | |
get_x=lambda x: x['input'], | |
get_y=lambda x: x['output'], | |
splitter=RandomSplitter(valid_pct=0.2, seed=42) | |
) | |
# Create DataLoaders | |
dls = dblock.dataloaders(train_data, bs=2) # Batch size of 2 for quick experimentation | |
return dls | |
# Step 2: Load the model weights | |
def load_model_weights(): | |
model_path = hf_hub_download( | |
repo_id="rahul7star/fastai-rahul-text-model-v02", | |
filename="rahul9star.pth" | |
) | |
# Load the model architecture (you need to define the same architecture used during training) | |
learn = text_classifier_learner(create_dataloaders(), AWD_LSTM, metrics=accuracy) | |
# Load the weights into the model | |
learn.load(model_path) | |
print("Model loaded successfully from Hugging Face.") | |
return learn | |
# Step 3: Use the model in your Gradio interface | |
learn = load_model_weights() | |
# Check if the model is loaded successfully | |
if learn is None: | |
raise ValueError("Failed to load the model") | |
# Step 4: Define the Gradio Interface | |
def predict(input_text): | |
try: | |
# Get prediction from the model | |
pred, _, probs = learn.predict(input_text) | |
return f"Prediction: {pred}, Confidence: {probs.max():.2f}" | |
except Exception as e: | |
return f"Error during prediction: {e}" | |
# Step 5: Create Gradio Interface | |
import gradio as gr | |
gr.Interface(fn=predict, inputs="text", outputs="text").launch() | |