Spaces:
Sleeping
Sleeping
File size: 1,037 Bytes
677f706 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
from fastapi import FastAPI
from pydantic import BaseModel
import joblib
import nltk
from nltk.corpus import stopwords
from nltk.stem import PorterStemmer
import re
nltk.download('stopwords')
app = FastAPI()
# Load the model pipeline
pipeline = joblib.load('spam_classifier_pipeline.joblib')
class EmailRequest(BaseModel):
subject: str
body: str
def preprocess_text(text):
text = text.lower()
text = re.sub(r'[^a-zA-Z\s]', '', text)
words = text.split()
stop_words = set(stopwords.words('english'))
words = [word for word in words if word not in stop_words]
stemmer = PorterStemmer()
words = [stemmer.stem(word) for word in words]
return ' '.join(words)
@app.post("/predict")
async def predict(email: EmailRequest):
processed_text = preprocess_text(email.subject + ' ' + email.body)
prediction = pipeline.predict([processed_text])[0]
return {'prediction': ['ham', 'not_spam', 'spam'][prediction]}
@app.get("/")
async def root():
return {"message": "Spam Classification API"} |