sujalrajpoot commited on
Commit
daafb93
·
verified ·
1 Parent(s): 27b65c2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -8
app.py CHANGED
@@ -1,33 +1,41 @@
1
- from flask import Flask, request, jsonify
2
  from webscout.Provider.Deepinfra import DeepInfra
3
  import os
 
4
 
5
  # Initialize Flask app
6
  app = Flask(__name__)
7
 
8
  # Initialize System Prompt
9
  SYSTEM_PROMPT = os.getenv("SYSTEM_PROMPT")
10
- print(SYSTEM_PROMPT)
11
 
12
  # Instantiate the DeepInfra model
13
  BASE_MODEL = DeepInfra(is_conversation=False, update_file=False, system_prompt=SYSTEM_PROMPT)
14
 
15
  @app.route("/")
16
  def index():
17
- return "🚀 Hugging Face Space API is running!"
18
 
19
  @app.route("/chat", methods=["POST"])
20
  def chat():
21
  try:
22
  data = request.get_json()
23
  prompt = data.get("prompt", "")
 
24
 
25
  if not prompt:
26
  return jsonify({"error": "Missing prompt"}), 400
27
 
28
- # Generate response using your DeepInfra wrapper
29
- response = BASE_MODEL.chat(prompt=prompt)
 
 
 
30
 
 
 
 
 
31
  return jsonify({
32
  "prompt": prompt,
33
  "response": response
@@ -36,7 +44,5 @@ def chat():
36
  except Exception as e:
37
  return jsonify({"error": str(e)}), 500
38
 
39
- # Run locally (only useful for dev)
40
  if __name__ == "__main__":
41
- app.run(debug=True, host="0.0.0.0", port=7860)
42
-
 
1
+ from flask import Flask, request, jsonify, Response
2
  from webscout.Provider.Deepinfra import DeepInfra
3
  import os
4
+ import json
5
 
6
  # Initialize Flask app
7
  app = Flask(__name__)
8
 
9
  # Initialize System Prompt
10
  SYSTEM_PROMPT = os.getenv("SYSTEM_PROMPT")
 
11
 
12
  # Instantiate the DeepInfra model
13
  BASE_MODEL = DeepInfra(is_conversation=False, update_file=False, system_prompt=SYSTEM_PROMPT)
14
 
15
  @app.route("/")
16
  def index():
17
+ return "🚀 TrueSyncAI API is running!"
18
 
19
  @app.route("/chat", methods=["POST"])
20
  def chat():
21
  try:
22
  data = request.get_json()
23
  prompt = data.get("prompt", "")
24
+ stream = data.get("stream", False)
25
 
26
  if not prompt:
27
  return jsonify({"error": "Missing prompt"}), 400
28
 
29
+ # If streaming is enabled, return a streaming response
30
+ if stream:
31
+ def generate():
32
+ for chunk in BASE_MODEL.chat(prompt=prompt, stream=True):
33
+ yield f"data: {json.dumps({'response': chunk})}\n\n"
34
 
35
+ return Response(generate(), mimetype="text/event-stream")
36
+
37
+ # Non-streaming response
38
+ response = BASE_MODEL.chat(prompt=prompt, stream=False)
39
  return jsonify({
40
  "prompt": prompt,
41
  "response": response
 
44
  except Exception as e:
45
  return jsonify({"error": str(e)}), 500
46
 
 
47
  if __name__ == "__main__":
48
+ app.run(host="0.0.0.0", port=7860)