File size: 1,739 Bytes
6af176b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import gradio as gr
from newspaper import Article
from transformers import pipeline

# Load small LLM
reviewer = pipeline("text2text-generation", model="google/flan-t5-base")

# Step 1: Extract blog content
def extract_text_from_url(url):
    try:
        article = Article(url)
        article.download()
        article.parse()
        return article.text
    except Exception as e:
        return f"Error: {e}"

# Step 2: Review content
def review_blog(text):
    prompt = f"""Please review the following blog content. Fix grammar, crude/offensive language, or policy-violating parts. Suggest replacements:

{text}"""
    result = reviewer(prompt, max_new_tokens=300)[0]['generated_text']
    return result

# Step 3: Finalize after approval
def finalize_text(original_text, reviewed_suggestions):
    # For simplicity, we'll just return reviewed version for now
    # Later you can implement side-by-side diff + selective replacement
    return reviewed_suggestions

# Gradio UI
with gr.Blocks() as app:
    gr.Markdown("## 🧠 BlogChecker AI")
    
    url = gr.Textbox(label="Enter Blog URL")
    blog_content = gr.Textbox(label="Extracted Blog Text", lines=10)
    reviewed = gr.Textbox(label="Suggested Revisions", lines=10)
    final_output = gr.Textbox(label="Final Clean Blog", lines=10)

    extract_btn = gr.Button("1️⃣ Extract Blog Text")
    review_btn = gr.Button("2️⃣ Review Content")
    approve_btn = gr.Button("3️⃣ Approve and Finalize")

    extract_btn.click(fn=extract_text_from_url, inputs=url, outputs=blog_content)
    review_btn.click(fn=review_blog, inputs=blog_content, outputs=reviewed)
    approve_btn.click(fn=finalize_text, inputs=[blog_content, reviewed], outputs=final_output)

app.launch()