File size: 1,415 Bytes
98bfd8d
 
d182c1f
98bfd8d
9b5b26a
8c01ffb
 
98bfd8d
 
ae7a494
8c01ffb
98bfd8d
 
 
8c01ffb
98bfd8d
 
 
 
 
8c01ffb
9b5b26a
8fe992b
98bfd8d
 
 
a91bdd8
98bfd8d
 
 
 
 
 
 
 
 
9b5b26a
98bfd8d
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import gradio as gr
from transformers import pipeline

from Gradio_UI import GradioUI



# Load your fine-tuned model from Hugging Face Hub
model = pipeline("text2text-generation", model='unica/CLiMA')  # Replace with your actual model repo name


# Define your prompt template (customize as needed)
def format_prompt(user_input):
    return f"Identify causal relations in the following clinical narrative:\n\n{user_input}\n\nCausal relations:"  # Modify if your model uses a different template

# Define prediction function
def generate_relations(text):
    prompt = format_prompt(text)
    result = model(prompt, max_length=512, do_sample=False)
    return result[0]['generated_text']

    

# Gradio interface
demo = gr.Interface(
    fn=generate_relations,
    inputs=gr.Textbox(lines=10, label="Clinical Note or Drug Review Text"),
    outputs=gr.Textbox(label="Extracted Causal Relations"),
    title="Causal Relation Extractor with MedLlama",
    description="Paste your clinical note or drug review. This AI agent extracts drug-condition or symptom causal relations using a fine-tuned LLM.",
    examples=[
        ["Patient reported severe headaches after starting amitriptyline."],
        ["Lisinopril helped reduce the patient's blood pressure but caused persistent cough."],
        ["After using Metformin, the patient experienced gastrointestinal discomfort."]
    ]
)

# Launch the app
demo.launch()