akhaliq HF Staff commited on
Commit
71f1c5d
·
1 Parent(s): 7a20003

use groq for kimi

Browse files
Files changed (1) hide show
  1. app.py +12 -5
app.py CHANGED
@@ -213,11 +213,15 @@ DEMO_LIST = [
213
 
214
  # HF Inference Client
215
  HF_TOKEN = os.getenv('HF_TOKEN')
216
- client = InferenceClient(
217
- provider="auto",
218
- api_key=HF_TOKEN,
219
- bill_to="huggingface"
220
- )
 
 
 
 
221
 
222
  # Type definitions
223
  History = List[Tuple[str, str]]
@@ -977,6 +981,9 @@ This will help me create a better design for you."""
977
  # Enhance query with search if enabled
978
  enhanced_query = enhance_query_with_search(query, enable_search)
979
 
 
 
 
980
  if image is not None:
981
  messages.append(create_multimodal_message(enhanced_query, image))
982
  else:
 
213
 
214
  # HF Inference Client
215
  HF_TOKEN = os.getenv('HF_TOKEN')
216
+
217
+ def get_inference_client(model_id):
218
+ """Return an InferenceClient with provider based on model_id."""
219
+ provider = "groq" if model_id == "moonshotai/Kimi-K2-Instruct" else "auto"
220
+ return InferenceClient(
221
+ provider=provider,
222
+ api_key=HF_TOKEN,
223
+ bill_to="huggingface"
224
+ )
225
 
226
  # Type definitions
227
  History = List[Tuple[str, str]]
 
981
  # Enhance query with search if enabled
982
  enhanced_query = enhance_query_with_search(query, enable_search)
983
 
984
+ # Use dynamic client based on selected model
985
+ client = get_inference_client(_current_model["id"])
986
+
987
  if image is not None:
988
  messages.append(create_multimodal_message(enhanced_query, image))
989
  else: