import gradio as gr import json from huggingface_hub import hf_hub_download model_path = hf_hub_download(repo_id="Mbilal755/Radiology_Bart", filename="tf_model.h5") # Load model directly import tensorflow as tf model = tf.keras.models.load_model(model_path) # Load tokenizer tokenizer_path = hf_hub_download(repo_id="Mbilal755/Radiology_Bart", filename="tokenizer.json") with open(tokenizer_path) as f: tokenizer_data = json.load(f) tokenizer = tokenizer_data["tokenizer"] def summarize(text): inputs = tokenizer.encode(text) # Run model inference summary_ids = model.generate(inputs) summary = tokenizer.decode(summary_ids) return summary iface = gr.Interface(fn=summarize, inputs="text", outputs="text") if __name__ == "__main__": iface.launch(share=True)