abdibrahem commited on
Commit
eea1422
·
1 Parent(s): 3483e5e

Add some updates

Browse files
Files changed (2) hide show
  1. Dockerfile +31 -3
  2. final.py +15 -15
Dockerfile CHANGED
@@ -50,19 +50,47 @@ COPY . .
50
  # Set proper ownership of all files
51
  RUN chown -R appuser:appuser /app
52
 
53
- # Create a startup script
54
  RUN echo '#!/bin/bash\n\
 
 
 
 
 
 
 
 
 
 
55
  # Start Ollama in the background\n\
 
56
  ollama serve &\n\
 
57
  \n\
58
  # Wait for Ollama to start\n\
59
- sleep 10\n\
 
 
 
 
 
 
 
 
 
 
 
60
  \n\
61
  # Pull Mistral model\n\
 
62
  ollama pull mistral\n\
63
  \n\
64
  # Start FastAPI application\n\
65
- cd /app && uvicorn main:app --host 0.0.0.0 --port 8000\n\
 
 
 
 
66
  ' > /app/start.sh && chmod +x /app/start.sh
67
 
68
  # Expose ports
 
50
  # Set proper ownership of all files
51
  RUN chown -R appuser:appuser /app
52
 
53
+ # Create a startup script with better logging and error handling
54
  RUN echo '#!/bin/bash\n\
55
+ set -e\n\
56
+ \n\
57
+ echo "Starting Healthcare AI Assistant..."\n\
58
+ \n\
59
+ # Function to check if Ollama is running\n\
60
+ check_ollama() {\n\
61
+ curl -s http://localhost:11434/api/tags > /dev/null\n\
62
+ return $?\n\
63
+ }\n\
64
+ \n\
65
  # Start Ollama in the background\n\
66
+ echo "Starting Ollama server..."\n\
67
  ollama serve &\n\
68
+ OLLAMA_PID=$!\n\
69
  \n\
70
  # Wait for Ollama to start\n\
71
+ echo "Waiting for Ollama to start..."\n\
72
+ for i in {1..30}; do\n\
73
+ if check_ollama; then\n\
74
+ echo "Ollama server is running"\n\
75
+ break\n\
76
+ fi\n\
77
+ if [ $i -eq 30 ]; then\n\
78
+ echo "Error: Ollama server failed to start"\n\
79
+ exit 1\n\
80
+ fi\n\
81
+ sleep 1\n\
82
+ done\n\
83
  \n\
84
  # Pull Mistral model\n\
85
+ echo "Pulling Mistral model..."\n\
86
  ollama pull mistral\n\
87
  \n\
88
  # Start FastAPI application\n\
89
+ echo "Starting FastAPI application..."\n\
90
+ cd /app\n\
91
+ \n\
92
+ # Keep the container running and show logs\n\
93
+ exec uvicorn main:app --host 0.0.0.0 --port 8000 --log-level info\n\
94
  ' > /app/start.sh && chmod +x /app/start.sh
95
 
96
  # Expose ports
final.py CHANGED
@@ -689,20 +689,20 @@ class AIAgent:
689
 
690
 
691
  # Test the agent directly
692
- if __name__ == "__main__":
693
- agent = AIAgent()
694
 
695
- # Test with English query
696
- # print("\n---Testing English Query---")
697
- # english_response = agent.process_user_query("I need to book an appointment with Dr. Smith tomorrow at 8 PM")
698
- # print("\nEnglish response:")
699
- # print(english_response["user_friendly_response"])
700
 
701
- # Test with Arabic query
702
- print("\n---Testing Arabic Query---")
703
- # arabic_response = agent.process_user_query(" اريد الغاء الحجز مع الدكتور Smith")
704
- arabic_response = agent.process_user_query("اريد حجز ميعاد غدا في الساعه الثامنه مساء مع الدكتور Smith")
705
- # arabic_response = agent.process_user_query("متى يفتح المستشفى؟")
706
- # arabic_response = agent.process_user_query("اريد معرفه كل الحجوزات الخاصه بي")
707
- print("\nArabic response:")
708
- print(arabic_response["user_friendly_response"])
 
689
 
690
 
691
  # Test the agent directly
692
+ # if __name__ == "__main__":
693
+ # agent = AIAgent()
694
 
695
+ # # Test with English query
696
+ # # print("\n---Testing English Query---")
697
+ # # english_response = agent.process_user_query("I need to book an appointment with Dr. Smith tomorrow at 8 PM")
698
+ # # print("\nEnglish response:")
699
+ # # print(english_response["user_friendly_response"])
700
 
701
+ # # Test with Arabic query
702
+ # print("\n---Testing Arabic Query---")
703
+ # # arabic_response = agent.process_user_query(" اريد الغاء الحجز مع الدكتور Smith")
704
+ # arabic_response = agent.process_user_query("اريد حجز ميعاد غدا في الساعه الثامنه مساء مع الدكتور Smith")
705
+ # # arabic_response = agent.process_user_query("متى يفتح المستشفى؟")
706
+ # # arabic_response = agent.process_user_query("اريد معرفه كل الحجوزات الخاصه بي")
707
+ # print("\nArabic response:")
708
+ # print(arabic_response["user_friendly_response"])