Spaces:
Sleeping
Sleeping
File size: 1,367 Bytes
77b3501 165652d 38b9bca d6d41eb 77b3501 134e421 165652d 67013df 21cdd54 d6d41eb 67013df d6d41eb 165652d d6d41eb 38b9bca 165652d 77b3501 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 |
import os
from dotenv import load_dotenv
import requests
import json
import gradio as gr
# Load environment variables
load_dotenv()
# Access the Hugging Face API key and endpoint URL
hf_api_key = os.getenv('HF_API_KEY')
MODEL_NAME = "sshleifer/distilbart-cnn-12-6"
ENDPOINT_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}"
def get_completion(inputs, parameters=None):
headers = {
"Authorization": f"Bearer {hf_api_key}",
"Content-Type": "application/json"
}
data = {
"inputs": inputs
}
if parameters is not None:
data.update({"parameters": parameters})
try:
response = requests.post(ENDPOINT_URL, headers=headers, json=data)
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
print(f"Request failed: {e}")
return {"error": f"Request failed: {str(e)}"}
def summarize(input_text):
try:
output = get_completion(input_text)
if isinstance(output, list) and len(output) > 0 and 'summary_text' in output[0]:
return output[0]['summary_text']
else:
return f"Unexpected response format: {output}"
except Exception as e:
return f"An error occurred: {str(e)}"
demo = gr.Interface(fn=summarize, inputs="text", outputs="text")
demo.launch() |