Futuresony commited on
Commit
1490ce9
·
verified ·
1 Parent(s): b78b527

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -5
app.py CHANGED
@@ -4,21 +4,25 @@ from huggingface_hub import InferenceClient
4
  client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
5
 
6
  def respond(message, history, max_tokens, temperature, top_p):
7
- formatted_prompt = message.strip() # Ensure clean input
 
8
 
9
  response = client.text_generation(
10
- formatted_prompt,
11
- max_new_tokens=int(max_tokens), # ✅ Ensure integer
12
  temperature=float(temperature),
13
  top_p=float(top_p),
14
  )
15
 
16
- yield response.strip() # Clean output
 
 
 
17
 
18
  demo = gr.ChatInterface(
19
  respond,
20
  additional_inputs=[
21
- gr.Slider(minimum=1, maximum=250, value=128, step=1, label="Max new tokens"), # ✅ Ensure integer input
22
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
23
  gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p"),
24
  ],
 
4
  client = InferenceClient("Futuresony/future_ai_12_10_2024.gguf")
5
 
6
  def respond(message, history, max_tokens, temperature, top_p):
7
+ message = message.strip() # Ondoa nafasi zisizo za lazima
8
+ history = history[-5:] # Chukua historia ya mwisho tu (kuepuka kujirudia)
9
 
10
  response = client.text_generation(
11
+ message,
12
+ max_new_tokens=int(max_tokens),
13
  temperature=float(temperature),
14
  top_p=float(top_p),
15
  )
16
 
17
+ # Safisha majibu, ondoa sehemu zisizo za lazima
18
+ response = response.replace("Instruction:", "").replace("Response:", "").strip()
19
+
20
+ return response
21
 
22
  demo = gr.ChatInterface(
23
  respond,
24
  additional_inputs=[
25
+ gr.Slider(minimum=1, maximum=250, value=128, step=1, label="Max new tokens"),
26
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
27
  gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p"),
28
  ],