Upload 3 files
Browse files- app.py +87 -0
- index.html +500 -0
- requirements.txt +76 -0
app.py
ADDED
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from flask import Flask, request, jsonify
|
2 |
+
from flask_cors import CORS
|
3 |
+
from transformers import pipeline
|
4 |
+
import os # Necesario para acceder a variables de entorno
|
5 |
+
|
6 |
+
app = Flask(__name__)
|
7 |
+
CORS(app) # Habilita CORS para permitir peticiones desde tu frontend
|
8 |
+
|
9 |
+
# Nombre del modelo de Hugging Face
|
10 |
+
MODEL_ID = "sarvamai/sarvam-m"
|
11 |
+
|
12 |
+
# Variable para almacenar el pipeline de la IA
|
13 |
+
pipe = None
|
14 |
+
|
15 |
+
# FunciΓ³n para cargar el modelo
|
16 |
+
# Se llama al inicio para que el modelo estΓ© listo
|
17 |
+
def load_model():
|
18 |
+
global pipe
|
19 |
+
try:
|
20 |
+
print(f"Cargando modelo '{MODEL_ID}'. Esto puede tardar unos minutos...")
|
21 |
+
# El token de Hugging Face se obtiene automΓ‘ticamente de la variable de entorno HF_TOKEN
|
22 |
+
# que configuras como un "Secret" en Hugging Face Spaces.
|
23 |
+
# No necesitas pasarlo explΓcitamente aquΓ si estΓ‘ en los Secrets.
|
24 |
+
# device=-1 usa CPU, device=0 usa GPU si estΓ‘ disponible (en Spaces, se elegirΓ‘ automΓ‘ticamente si el hardware lo soporta)
|
25 |
+
pipe = pipeline("text-generation", model=MODEL_ID, device=-1)
|
26 |
+
print(f"Modelo '{MODEL_ID}' cargado con Γ©xito.")
|
27 |
+
except Exception as e:
|
28 |
+
print(f"Error al cargar el modelo '{MODEL_ID}': {e}")
|
29 |
+
pipe = None
|
30 |
+
print("El servidor no podrΓ‘ responder a las solicitudes de IA.")
|
31 |
+
|
32 |
+
# Cargar el modelo cuando la aplicaciΓ³n Flask se inicia
|
33 |
+
# Usamos app.before_first_request para asegurar que se carga una vez al inicio
|
34 |
+
@app.before_first_request
|
35 |
+
def init_model():
|
36 |
+
load_model()
|
37 |
+
|
38 |
+
@app.route('/chat', methods=['POST'])
|
39 |
+
def chat():
|
40 |
+
if pipe is None:
|
41 |
+
return jsonify({"error": "El modelo de IA no estΓ‘ cargado. Por favor, revisa el log del servidor."}), 503
|
42 |
+
|
43 |
+
data = request.json
|
44 |
+
messages = data.get('messages') # El frontend envΓa el historial completo
|
45 |
+
|
46 |
+
if not messages:
|
47 |
+
return jsonify({"error": "No se proporcionaron mensajes."}), 400
|
48 |
+
|
49 |
+
try:
|
50 |
+
# Construimos un prompt simple para el modelo de text-generation
|
51 |
+
# concatenando el historial de mensajes.
|
52 |
+
# Esta es una una simplificaciΓ³n; los modelos de chat suelen tener un formato especΓfico.
|
53 |
+
prompt = ""
|
54 |
+
for msg in messages:
|
55 |
+
if msg['role'] == 'user':
|
56 |
+
prompt += f"Usuario: {msg['content']}\n"
|
57 |
+
elif msg['role'] == 'assistant':
|
58 |
+
prompt += f"Asistente: {msg['content']}\n"
|
59 |
+
prompt += "Asistente:" # Indicamos que esperamos la respuesta del asistente
|
60 |
+
|
61 |
+
# Ajusta parΓ‘metros como max_new_tokens, temperature, do_sample segΓΊn tu necesidad
|
62 |
+
response = pipe(prompt, max_new_tokens=250, temperature=0.7, do_sample=True, clean_up_tokenization_spaces=True)
|
63 |
+
|
64 |
+
# La respuesta del pipe es una lista. Tomamos el primer elemento.
|
65 |
+
generated_text = response[0]['generated_text']
|
66 |
+
|
67 |
+
# Extraemos la parte de la respuesta de la IA.
|
68 |
+
# Esto puede ser complicado con text-generation si el modelo repite el prompt.
|
69 |
+
# AquΓ asumimos que la respuesta de la IA comienza despuΓ©s del ΓΊltimo "Asistente:"
|
70 |
+
ai_response_content = generated_text.split("Asistente:", 1)[-1].strip()
|
71 |
+
|
72 |
+
# A veces el modelo puede generar texto vacΓo o solo espacios.
|
73 |
+
if not ai_response_content:
|
74 |
+
ai_response_content = "Lo siento, no pude generar una respuesta clara. ΒΏPuedes intentarlo de nuevo?"
|
75 |
+
|
76 |
+
return jsonify({"response": ai_response_content})
|
77 |
+
|
78 |
+
except Exception as e:
|
79 |
+
print(f"Error en la inferencia de la IA: {e}")
|
80 |
+
return jsonify({"error": f"Error interno del servidor: {str(e)}"}), 500
|
81 |
+
|
82 |
+
if __name__ == '__main__':
|
83 |
+
# Obtener el puerto de la variable de entorno que proporciona Hugging Face Spaces
|
84 |
+
# Por defecto, en Gradio Spaces, es 7860
|
85 |
+
port = int(os.environ.get('PORT', 7860))
|
86 |
+
# Β‘NUNCA USES debug=True EN PRODUCCIΓN! Solo para desarrollo.
|
87 |
+
app.run(debug=False, host='0.0.0.0', port=port)
|
index.html
ADDED
@@ -0,0 +1,500 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE html>
|
2 |
+
<html lang="es">
|
3 |
+
<head>
|
4 |
+
<meta charset="UTF-8">
|
5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
6 |
+
<title>Amside AI - Interfaz con IA Real</title>
|
7 |
+
<style>
|
8 |
+
:root {
|
9 |
+
--primary-color: #22272b; /* Dark Background */
|
10 |
+
--secondary-color: #37404a; /* Darker Grey */
|
11 |
+
--accent-color: #00bcd4; /* Cyan */
|
12 |
+
--text-color-primary: #eee;
|
13 |
+
--text-color-secondary: #ccc;
|
14 |
+
--error-color: #f44336; /* Red accent */
|
15 |
+
--border-radius: 8px;
|
16 |
+
--box-shadow: 0 8px 24px rgba(0, 0, 0, 0.2);
|
17 |
+
--transition: all 0.3s ease-in-out;
|
18 |
+
--sidebar-width: 300px;
|
19 |
+
}
|
20 |
+
|
21 |
+
body {
|
22 |
+
font-family: 'Roboto Mono', monospace;
|
23 |
+
background-color: var(--primary-color);
|
24 |
+
color: var(--text-color-primary);
|
25 |
+
margin: 0;
|
26 |
+
display: flex;
|
27 |
+
min-height: 100vh;
|
28 |
+
padding: 0;
|
29 |
+
}
|
30 |
+
|
31 |
+
.sidebar {
|
32 |
+
background-color: var(--secondary-color);
|
33 |
+
width: var(--sidebar-width);
|
34 |
+
border-right: 1px solid #444d57;
|
35 |
+
padding: 25px;
|
36 |
+
display: flex;
|
37 |
+
flex-direction: column;
|
38 |
+
align-items: flex-start;
|
39 |
+
flex-shrink: 0;
|
40 |
+
}
|
41 |
+
|
42 |
+
.sidebar-header {
|
43 |
+
color: var(--text-color-primary);
|
44 |
+
margin-bottom: 30px;
|
45 |
+
text-align: left;
|
46 |
+
width: 100%;
|
47 |
+
}
|
48 |
+
|
49 |
+
.sidebar-header h2 {
|
50 |
+
margin: 0;
|
51 |
+
font-size: 1.8em;
|
52 |
+
font-weight: 400;
|
53 |
+
}
|
54 |
+
|
55 |
+
.new-chat-button {
|
56 |
+
background-color: var(--accent-color);
|
57 |
+
color: var(--text-color-primary);
|
58 |
+
border: none;
|
59 |
+
padding: 12px 18px;
|
60 |
+
border-radius: var(--border-radius);
|
61 |
+
cursor: pointer;
|
62 |
+
font-size: 1em;
|
63 |
+
margin-bottom: 20px;
|
64 |
+
transition: background-color var(--transition);
|
65 |
+
width: 100%;
|
66 |
+
text-align: left;
|
67 |
+
display: flex;
|
68 |
+
align-items: center;
|
69 |
+
justify-content: flex-start;
|
70 |
+
}
|
71 |
+
|
72 |
+
.new-chat-button svg {
|
73 |
+
margin-right: 10px;
|
74 |
+
}
|
75 |
+
|
76 |
+
.chat-list-title {
|
77 |
+
font-size: 0.9em;
|
78 |
+
color: var(--text-color-secondary);
|
79 |
+
margin-bottom: 10px;
|
80 |
+
text-transform: uppercase;
|
81 |
+
letter-spacing: 0.8px;
|
82 |
+
}
|
83 |
+
|
84 |
+
.chat-list {
|
85 |
+
list-style: none;
|
86 |
+
padding: 0;
|
87 |
+
margin: 0;
|
88 |
+
width: 100%;
|
89 |
+
}
|
90 |
+
|
91 |
+
.chat-list li {
|
92 |
+
padding: 10px 15px;
|
93 |
+
border-radius: var(--border-radius);
|
94 |
+
margin-bottom: 6px;
|
95 |
+
cursor: pointer;
|
96 |
+
background-color: #2c343c;
|
97 |
+
color: var(--text-color-secondary);
|
98 |
+
transition: background-color var(--transition), color var(--transition);
|
99 |
+
overflow: hidden;
|
100 |
+
text-overflow: ellipsis;
|
101 |
+
white-space: nowrap;
|
102 |
+
}
|
103 |
+
|
104 |
+
.chat-list li:hover {
|
105 |
+
background-color: var(--accent-color);
|
106 |
+
color: var(--text-color-primary);
|
107 |
+
}
|
108 |
+
|
109 |
+
.chat-container {
|
110 |
+
background-color: var(--primary-color);
|
111 |
+
border-radius: var(--border-radius);
|
112 |
+
box-shadow: var(--box-shadow);
|
113 |
+
display: flex;
|
114 |
+
flex-direction: column;
|
115 |
+
overflow: hidden;
|
116 |
+
flex-grow: 1;
|
117 |
+
margin: 20px;
|
118 |
+
border: 1px solid #333a40;
|
119 |
+
}
|
120 |
+
|
121 |
+
.chat-header {
|
122 |
+
background-color: var(--secondary-color);
|
123 |
+
color: var(--text-color-primary);
|
124 |
+
padding: 15px 20px;
|
125 |
+
text-align: left;
|
126 |
+
border-bottom: 1px solid #444d57;
|
127 |
+
border-top-left-radius: var(--border-radius);
|
128 |
+
border-top-right-radius: var(--border-radius);
|
129 |
+
display: flex;
|
130 |
+
align-items: center;
|
131 |
+
}
|
132 |
+
|
133 |
+
.chat-header button {
|
134 |
+
background: none;
|
135 |
+
border: none;
|
136 |
+
color: var(--text-color-primary);
|
137 |
+
font-size: 1em;
|
138 |
+
cursor: pointer;
|
139 |
+
margin-right: 20px;
|
140 |
+
padding: 8px 12px;
|
141 |
+
border-radius: var(--border-radius);
|
142 |
+
transition: background-color var(--transition);
|
143 |
+
}
|
144 |
+
|
145 |
+
.chat-header button:hover {
|
146 |
+
background-color: #444d57;
|
147 |
+
}
|
148 |
+
|
149 |
+
.chat-header h2 {
|
150 |
+
margin: 0;
|
151 |
+
font-size: 1.4em;
|
152 |
+
font-weight: 400;
|
153 |
+
}
|
154 |
+
|
155 |
+
.chat-body {
|
156 |
+
padding: 20px;
|
157 |
+
overflow-y: auto;
|
158 |
+
flex-grow: 1;
|
159 |
+
display: flex;
|
160 |
+
flex-direction: column;
|
161 |
+
gap: 12px;
|
162 |
+
}
|
163 |
+
|
164 |
+
.message {
|
165 |
+
background-color: #2c343c;
|
166 |
+
color: var(--text-color-secondary);
|
167 |
+
border-radius: var(--border-radius);
|
168 |
+
padding: 10px 14px;
|
169 |
+
word-break: break-word;
|
170 |
+
align-self: flex-start;
|
171 |
+
max-width: 80%;
|
172 |
+
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
173 |
+
font-size: 0.95em;
|
174 |
+
}
|
175 |
+
|
176 |
+
.error-message {
|
177 |
+
background-color: #422b2b;
|
178 |
+
color: var(--error-color);
|
179 |
+
border-radius: var(--border-radius);
|
180 |
+
padding: 10px 14px;
|
181 |
+
word-break: break-word;
|
182 |
+
align-self: flex-start;
|
183 |
+
max-width: 80%;
|
184 |
+
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
185 |
+
font-size: 0.95em;
|
186 |
+
border: 1px solid #5a3232;
|
187 |
+
}
|
188 |
+
|
189 |
+
.user-message-container {
|
190 |
+
display: flex;
|
191 |
+
flex-direction: column;
|
192 |
+
align-items: flex-end;
|
193 |
+
max-width: 80%;
|
194 |
+
align-self: flex-end;
|
195 |
+
}
|
196 |
+
|
197 |
+
.user-message {
|
198 |
+
background-color: var(--accent-color);
|
199 |
+
color: var(--text-color-primary);
|
200 |
+
border-radius: var(--border-radius);
|
201 |
+
padding: 10px 14px;
|
202 |
+
word-break: break-word;
|
203 |
+
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
|
204 |
+
font-size: 0.95em;
|
205 |
+
margin-bottom: 5px;
|
206 |
+
}
|
207 |
+
|
208 |
+
.loading-indicator {
|
209 |
+
display: flex;
|
210 |
+
align-self: flex-end;
|
211 |
+
margin-top: 5px;
|
212 |
+
align-items: center;
|
213 |
+
font-size: 0.8em;
|
214 |
+
color: var(--text-color-secondary);
|
215 |
+
}
|
216 |
+
|
217 |
+
.loading-spinner {
|
218 |
+
border: 2px solid rgba(255, 255, 255, 0.1);
|
219 |
+
border-top: 2px solid var(--accent-color);
|
220 |
+
border-radius: 50%;
|
221 |
+
width: 14px;
|
222 |
+
height: 14px;
|
223 |
+
animation: spin 1s linear infinite;
|
224 |
+
margin-left: 6px;
|
225 |
+
}
|
226 |
+
|
227 |
+
@keyframes spin {
|
228 |
+
0% { transform: rotate(0deg); }
|
229 |
+
100% { transform: rotate(360deg); }
|
230 |
+
}
|
231 |
+
|
232 |
+
.input-area {
|
233 |
+
padding: 15px;
|
234 |
+
border-top: 1px solid #444d57;
|
235 |
+
display: flex;
|
236 |
+
align-items: center;
|
237 |
+
background-color: var(--secondary-color);
|
238 |
+
border-bottom-left-radius: var(--border-radius);
|
239 |
+
border-bottom-right-radius: var(--border-radius);
|
240 |
+
}
|
241 |
+
|
242 |
+
.input-field {
|
243 |
+
flex-grow: 1;
|
244 |
+
padding: 10px 12px;
|
245 |
+
border: 1px solid #555e68;
|
246 |
+
border-radius: var(--border-radius);
|
247 |
+
margin-right: 10px;
|
248 |
+
font-size: 0.95em;
|
249 |
+
color: var(--text-color-primary);
|
250 |
+
background-color: #2c343c;
|
251 |
+
}
|
252 |
+
|
253 |
+
.input-field:focus {
|
254 |
+
outline: none;
|
255 |
+
border-color: var(--accent-color);
|
256 |
+
box-shadow: 0 1px 3px rgba(var(--accent-color-rgb), 0.2);
|
257 |
+
}
|
258 |
+
|
259 |
+
.send-button {
|
260 |
+
background-color: var(--accent-color);
|
261 |
+
color: var(--text-color-primary);
|
262 |
+
border: none;
|
263 |
+
padding: 10px 15px;
|
264 |
+
border-radius: var(--border-radius);
|
265 |
+
cursor: pointer;
|
266 |
+
font-size: 0.95em;
|
267 |
+
transition: background-color var(--transition);
|
268 |
+
}
|
269 |
+
|
270 |
+
.send-button:hover {
|
271 |
+
background-color: #00838f; /* Cyan darken-1 */
|
272 |
+
}
|
273 |
+
</style>
|
274 |
+
<link href="https://fonts.googleapis.com/css2?family=Roboto+Mono:wght@400;500&display=swap" rel="stylesheet">
|
275 |
+
</head>
|
276 |
+
<body>
|
277 |
+
<div class="sidebar">
|
278 |
+
<div class="sidebar-header">
|
279 |
+
<h2>Amside AI</h2>
|
280 |
+
</div>
|
281 |
+
<button class="new-chat-button" id="new-chat-button">
|
282 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-plus-square-fill" viewBox="0 0 16 16">
|
283 |
+
<path d="M2 0a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V2a2 2 0 0 0-2-2H2zm6.5 4.5v3h3a.5.5 0 0 1 0 1h-3v3a.5.5 0 0 1-1 0v-3h-3a.5.5 0 0 1 0-1h3v-3a.5.5 0 0 1 1 0z"/>
|
284 |
+
</svg>
|
285 |
+
Nueva ConversaciΓ³n
|
286 |
+
</button>
|
287 |
+
<h3 class="chat-list-title">Historial</h3>
|
288 |
+
<ul class="chat-list" id="chat-list">
|
289 |
+
</ul>
|
290 |
+
</div>
|
291 |
+
<div class="chat-container">
|
292 |
+
<div class="chat-header">
|
293 |
+
<button id="new-conversation-header-button">
|
294 |
+
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="currentColor" class="bi bi-plus-square-fill" viewBox="0 0 16 16">
|
295 |
+
<path d="M2 0a2 2 0 0 0-2 2v12a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2V2a2 2 0 0 0-2-2H2zm6.5 4.5v3h3a.5.5 0 0 1 0 1h-3v3a.5.5 0 0 1-1 0v-3h-3a.5.5 0 0 1 0-1h3v-3a.5.5 0 0 1 1 0z"/>
|
296 |
+
</svg>
|
297 |
+
Nuevo
|
298 |
+
</button>
|
299 |
+
<h2>Chat</h2>
|
300 |
+
</div>
|
301 |
+
<div class="chat-body" id="chat-messages">
|
302 |
+
</div>
|
303 |
+
<div class="input-area">
|
304 |
+
<input type="text" class="input-field" placeholder="Escribe tu mensaje...">
|
305 |
+
<button class="send-button" id="send-button">Enviar</button>
|
306 |
+
</div>
|
307 |
+
</div>
|
308 |
+
|
309 |
+
<script>
|
310 |
+
const chatMessages = document.getElementById('chat-messages');
|
311 |
+
const inputField = document.querySelector('.input-field');
|
312 |
+
const sendButton = document.getElementById('send-button');
|
313 |
+
const chatList = document.getElementById('chat-list');
|
314 |
+
const newChatButtonSidebar = document.getElementById('new-chat-button');
|
315 |
+
const newChatButtonHeader = document.getElementById('new-conversation-header-button');
|
316 |
+
|
317 |
+
let chatHistory = [];
|
318 |
+
let currentChat = [];
|
319 |
+
|
320 |
+
// --- URL de tu servidor backend ---
|
321 |
+
// AsegΓΊrate de que esta URL coincida con la direcciΓ³n y el puerto donde se ejecuta tu app.py
|
322 |
+
const BACKEND_URL = 'http://127.0.0.1:5000/chat';
|
323 |
+
|
324 |
+
// FunciΓ³n para aΓ±adir un solo mensaje al chat
|
325 |
+
function addMessageToChat(msg) {
|
326 |
+
const messageContainer = document.createElement('div');
|
327 |
+
if (msg.sender === 'user') {
|
328 |
+
messageContainer.classList.add('user-message-container');
|
329 |
+
const userMessageDiv = document.createElement('div');
|
330 |
+
userMessageDiv.classList.add('user-message');
|
331 |
+
userMessageDiv.textContent = msg.text;
|
332 |
+
messageContainer.appendChild(userMessageDiv);
|
333 |
+
} else if (msg.sender === 'ai') {
|
334 |
+
const aiMessageDiv = document.createElement('div');
|
335 |
+
aiMessageDiv.classList.add('message');
|
336 |
+
aiMessageDiv.textContent = msg.text;
|
337 |
+
messageContainer.appendChild(aiMessageDiv);
|
338 |
+
} else if (msg.sender === 'error') {
|
339 |
+
const errorMessageDiv = document.createElement('div');
|
340 |
+
errorMessageDiv.classList.add('error-message');
|
341 |
+
errorMessageDiv.textContent = msg.text;
|
342 |
+
messageContainer.appendChild(errorMessageDiv);
|
343 |
+
}
|
344 |
+
chatMessages.appendChild(messageContainer);
|
345 |
+
scrollToBottom();
|
346 |
+
}
|
347 |
+
|
348 |
+
// Esta funciΓ³n se usa para cargar un historial completo (ej. al seleccionar un chat de la lista)
|
349 |
+
function displayChat(messages) {
|
350 |
+
chatMessages.innerHTML = ''; // Vaciar todo antes de cargar el historial
|
351 |
+
messages.forEach(msg => {
|
352 |
+
const messageContainer = document.createElement('div');
|
353 |
+
if (msg.sender === 'user') {
|
354 |
+
messageContainer.classList.add('user-message-container');
|
355 |
+
const userMessageDiv = document.createElement('div');
|
356 |
+
userMessageDiv.classList.add('user-message');
|
357 |
+
userMessageDiv.textContent = msg.text;
|
358 |
+
messageContainer.appendChild(userMessageDiv);
|
359 |
+
} else if (msg.sender === 'ai') {
|
360 |
+
const aiMessageDiv = document.createElement('div');
|
361 |
+
aiMessageDiv.classList.add('message');
|
362 |
+
aiMessageDiv.textContent = msg.text;
|
363 |
+
messageContainer.appendChild(aiMessageDiv);
|
364 |
+
} else if (msg.sender === 'error') {
|
365 |
+
const errorMessageDiv = document.createElement('div');
|
366 |
+
errorMessageDiv.classList.add('error-message');
|
367 |
+
errorMessageDiv.textContent = msg.text;
|
368 |
+
messageContainer.appendChild(errorMessageDiv);
|
369 |
+
}
|
370 |
+
chatMessages.appendChild(messageContainer);
|
371 |
+
});
|
372 |
+
scrollToBottom();
|
373 |
+
}
|
374 |
+
|
375 |
+
function updateChatList() {
|
376 |
+
chatList.innerHTML = '';
|
377 |
+
chatHistory.forEach((chat, index) => {
|
378 |
+
const listItem = document.createElement('li');
|
379 |
+
listItem.textContent = chat.name.substring(0, 22) + (chat.name.length > 22 ? '...' : '');
|
380 |
+
listItem.addEventListener('click', () => {
|
381 |
+
currentChat = chat.messages;
|
382 |
+
displayChat(currentChat);
|
383 |
+
});
|
384 |
+
chatList.appendChild(listItem);
|
385 |
+
});
|
386 |
+
}
|
387 |
+
|
388 |
+
function saveChat(firstUserMessage) {
|
389 |
+
if (currentChat.length > 0 && firstUserMessage) {
|
390 |
+
const chatName = firstUserMessage.substring(0, 25);
|
391 |
+
if (!chatHistory.some(chat => chat.messages === currentChat) && currentChat.length > 1) {
|
392 |
+
chatHistory.unshift({ name: chatName, messages: [...currentChat] });
|
393 |
+
updateChatList();
|
394 |
+
}
|
395 |
+
}
|
396 |
+
}
|
397 |
+
|
398 |
+
function startNewChat() {
|
399 |
+
if (currentChat.length > 1 && currentChat[0].sender === 'ai' && currentChat[1].sender === 'user') {
|
400 |
+
saveChat(currentChat[1].text);
|
401 |
+
}
|
402 |
+
currentChat = [{ sender: 'ai', text: 'Bienvenido a Amside AI. Estoy aquοΏ½οΏ½ para ayudarte. ΒΏQuΓ© te gustarΓa saber?' }];
|
403 |
+
displayChat(currentChat);
|
404 |
+
}
|
405 |
+
|
406 |
+
sendButton.addEventListener('click', async () => {
|
407 |
+
const messageText = inputField.value.trim();
|
408 |
+
if (messageText === '') {
|
409 |
+
return;
|
410 |
+
}
|
411 |
+
|
412 |
+
const isFirstUserMessageInChat = currentChat.length === 1 && currentChat[0].sender === 'ai';
|
413 |
+
|
414 |
+
const userMessage = { sender: 'user', text: messageText };
|
415 |
+
currentChat.push(userMessage);
|
416 |
+
|
417 |
+
const userMessageContainer = document.createElement('div');
|
418 |
+
userMessageContainer.classList.add('user-message-container');
|
419 |
+
const userMessageDiv = document.createElement('div');
|
420 |
+
userMessageDiv.classList.add('user-message');
|
421 |
+
userMessageDiv.textContent = userMessage.text;
|
422 |
+
userMessageContainer.appendChild(userMessageDiv);
|
423 |
+
|
424 |
+
const loadingIndicator = document.createElement('div');
|
425 |
+
loadingIndicator.classList.add('loading-indicator');
|
426 |
+
loadingIndicator.innerHTML = '<div class="loading-spinner"></div> Procesando...';
|
427 |
+
userMessageContainer.appendChild(loadingIndicator);
|
428 |
+
|
429 |
+
chatMessages.appendChild(userMessageContainer);
|
430 |
+
scrollToBottom();
|
431 |
+
|
432 |
+
inputField.value = '';
|
433 |
+
|
434 |
+
try {
|
435 |
+
// Preparamos los mensajes en el formato que espera el backend de Python
|
436 |
+
// (que a su vez los prepara para Hugging Face)
|
437 |
+
const messagesForBackend = currentChat.map(msg => ({
|
438 |
+
role: msg.sender === 'user' ? 'user' : 'assistant',
|
439 |
+
content: msg.text
|
440 |
+
}));
|
441 |
+
|
442 |
+
const response = await fetch(BACKEND_URL, {
|
443 |
+
method: 'POST',
|
444 |
+
headers: {
|
445 |
+
'Content-Type': 'application/json',
|
446 |
+
},
|
447 |
+
body: JSON.stringify({ messages: messagesForBackend }),
|
448 |
+
});
|
449 |
+
|
450 |
+
if (!response.ok) {
|
451 |
+
const errorData = await response.json();
|
452 |
+
throw new Error(errorData.error || `HTTP error! status: ${response.status}`);
|
453 |
+
}
|
454 |
+
|
455 |
+
const data = await response.json();
|
456 |
+
const aiResponseText = data.response; // La respuesta de la IA viene en 'response'
|
457 |
+
|
458 |
+
const aiResponse = { sender: 'ai', text: aiResponseText };
|
459 |
+
currentChat.push(aiResponse);
|
460 |
+
|
461 |
+
if (loadingIndicator && userMessageContainer.contains(loadingIndicator)) {
|
462 |
+
loadingIndicator.remove();
|
463 |
+
}
|
464 |
+
|
465 |
+
addMessageToChat(aiResponse);
|
466 |
+
|
467 |
+
if (isFirstUserMessageInChat) {
|
468 |
+
saveChat(messageText);
|
469 |
+
}
|
470 |
+
|
471 |
+
} catch (error) {
|
472 |
+
console.error("Error al obtener respuesta de la IA (backend):", error);
|
473 |
+
if (loadingIndicator && userMessageContainer.contains(loadingIndicator)) {
|
474 |
+
loadingIndicator.remove();
|
475 |
+
}
|
476 |
+
const errorMessage = { sender: 'error', text: `Lo siento, hubo un error al conectar con la IA. AsegΓΊrate de que el servidor estΓ‘ corriendo. Detalle: ${error.message}` };
|
477 |
+
currentChat.push(errorMessage);
|
478 |
+
addMessageToChat(errorMessage);
|
479 |
+
}
|
480 |
+
});
|
481 |
+
|
482 |
+
inputField.addEventListener('keypress', (event) => {
|
483 |
+
if (event.key === 'Enter') {
|
484 |
+
sendButton.click();
|
485 |
+
}
|
486 |
+
});
|
487 |
+
|
488 |
+
newChatButtonSidebar.addEventListener('click', startNewChat);
|
489 |
+
newChatButtonHeader.addEventListener('click', startNewChat);
|
490 |
+
|
491 |
+
function scrollToBottom() {
|
492 |
+
chatMessages.scrollTop = chatMessages.scrollHeight;
|
493 |
+
}
|
494 |
+
|
495 |
+
// Inicializar al cargar la pΓ‘gina
|
496 |
+
startNewChat();
|
497 |
+
updateChatList();
|
498 |
+
</script>
|
499 |
+
</body>
|
500 |
+
</html>
|
requirements.txt
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
absl-py==2.2.2
|
2 |
+
annotated-types==0.7.0
|
3 |
+
astunparse==1.6.3
|
4 |
+
attrs==25.3.0
|
5 |
+
blinker==1.9.0
|
6 |
+
certifi==2025.1.31
|
7 |
+
charset-normalizer==3.4.1
|
8 |
+
click==8.1.8
|
9 |
+
colorama==0.4.6
|
10 |
+
distro==1.9.0
|
11 |
+
filelock==3.18.0
|
12 |
+
Flask==3.1.0
|
13 |
+
flask-cors==5.0.1
|
14 |
+
flatbuffers==25.2.10
|
15 |
+
fsspec==2025.3.2
|
16 |
+
gast==0.6.0
|
17 |
+
google-pasta==0.2.0
|
18 |
+
grpcio==1.71.0
|
19 |
+
h11==0.14.0
|
20 |
+
h5py==3.13.0
|
21 |
+
huggingface-hub==0.30.2
|
22 |
+
idna==3.10
|
23 |
+
itsdangerous==2.2.0
|
24 |
+
Jinja2==3.1.6
|
25 |
+
jiter==0.9.0
|
26 |
+
keras==3.9.2
|
27 |
+
libclang==18.1.1
|
28 |
+
lxml==5.3.2
|
29 |
+
Markdown==3.8
|
30 |
+
markdown-it-py==3.0.0
|
31 |
+
MarkupSafe==3.0.2
|
32 |
+
mdurl==0.1.2
|
33 |
+
ml_dtypes==0.5.1
|
34 |
+
mpmath==1.3.0
|
35 |
+
namex==0.0.8
|
36 |
+
networkx==3.4.2
|
37 |
+
numpy==2.1.3
|
38 |
+
opt_einsum==3.4.0
|
39 |
+
optree==0.15.0
|
40 |
+
packaging==25.0
|
41 |
+
pillow==11.2.1
|
42 |
+
prompt_toolkit==3.0.51
|
43 |
+
protobuf==5.29.4
|
44 |
+
pycryptodomex==3.22.0
|
45 |
+
Pygments==2.19.1
|
46 |
+
python-dotenv==1.1.0
|
47 |
+
pytz==2025.2
|
48 |
+
PyYAML==6.0.2
|
49 |
+
regex==2024.11.6
|
50 |
+
requests==2.32.3
|
51 |
+
rich==14.0.0
|
52 |
+
rpds-py==0.24.0
|
53 |
+
safetensors==0.5.3
|
54 |
+
scipy==1.15.2
|
55 |
+
setuptools==79.0.0
|
56 |
+
six==1.17.0
|
57 |
+
sniffio==1.3.1
|
58 |
+
sympy==1.13.1
|
59 |
+
tensorboard==2.19.0
|
60 |
+
tensorboard-data-server==0.7.2
|
61 |
+
tensorflow==2.19.0
|
62 |
+
termcolor==3.0.1
|
63 |
+
tf_keras==2.19.0
|
64 |
+
tiktoken==0.9.0
|
65 |
+
tokenizers==0.21.1
|
66 |
+
torch==2.6.0
|
67 |
+
tqdm==4.67.1
|
68 |
+
transformers==4.52.3
|
69 |
+
typing-inspection==0.4.0
|
70 |
+
typing_extensions==4.13.2
|
71 |
+
tzdata==2025.2
|
72 |
+
urllib3==2.4.0
|
73 |
+
wcwidth==0.2.13
|
74 |
+
Werkzeug==3.1.3
|
75 |
+
wheel==0.45.1
|
76 |
+
wrapt==1.17.2
|