Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,25 +2,25 @@ import gradio as gr
|
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import os
|
4 |
|
5 |
-
# API
|
6 |
HF_API_TOKEN = os.getenv("HUG_TOKEN_READ")
|
7 |
|
8 |
# Hugging Face Inference API Client
|
9 |
client = InferenceClient(model="mistralai/Mistral-7B-Instruct-v0.1", token=HF_API_TOKEN)
|
10 |
|
11 |
-
#
|
12 |
def text_to_emoji(text):
|
13 |
-
prompt = f"Convert this sentence into an
|
14 |
response = client.text_generation(prompt, max_new_tokens=50)
|
15 |
return response
|
16 |
|
17 |
# Gradio UI
|
18 |
iface = gr.Interface(
|
19 |
fn=text_to_emoji,
|
20 |
-
inputs=gr.Textbox(lines=2, placeholder="
|
21 |
outputs="text",
|
22 |
-
title="
|
23 |
-
description="
|
24 |
)
|
25 |
|
26 |
iface.launch()
|
|
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import os
|
4 |
|
5 |
+
# Load API key from environment variables
|
6 |
HF_API_TOKEN = os.getenv("HUG_TOKEN_READ")
|
7 |
|
8 |
# Hugging Face Inference API Client
|
9 |
client = InferenceClient(model="mistralai/Mistral-7B-Instruct-v0.1", token=HF_API_TOKEN)
|
10 |
|
11 |
+
# Function to translate text into emojis
|
12 |
def text_to_emoji(text):
|
13 |
+
prompt = f"Convert this sentence into an emoji-sequence of the same meaning and return only the emojis, no explanation:\n\n\"{text}\""
|
14 |
response = client.text_generation(prompt, max_new_tokens=50)
|
15 |
return response
|
16 |
|
17 |
# Gradio UI
|
18 |
iface = gr.Interface(
|
19 |
fn=text_to_emoji,
|
20 |
+
inputs=gr.Textbox(lines=2, placeholder="Enter a sentence..."),
|
21 |
outputs="text",
|
22 |
+
title="AI-Powered Emoji Translator",
|
23 |
+
description="Enter a sentence, and the AI will transform it into an emoji-version!"
|
24 |
)
|
25 |
|
26 |
iface.launch()
|