Update app.py
Browse files
app.py
CHANGED
|
@@ -89,8 +89,8 @@ MODEL_NAME = "gpt-3.5-turbo-16k"
|
|
| 89 |
|
| 90 |
#HuggingFace--------------------------------
|
| 91 |
#repo_id = "meta-llama/Llama-2-13b-chat-hf"
|
| 92 |
-
|
| 93 |
-
repo_id = "TheBloke/Yi-34B-Chat-GGUF"
|
| 94 |
#repo_id = "meta-llama/Llama-2-70b-chat-hf"
|
| 95 |
#repo_id = "tiiuae/falcon-40b"
|
| 96 |
#repo_id = "Vicuna-33b"
|
|
@@ -288,10 +288,10 @@ def invoke (prompt, history, rag_option, model_option, openai_api_key, temperat
|
|
| 288 |
global splittet
|
| 289 |
print(splittet)
|
| 290 |
#Prompt an history anhängen und einen Text daraus machen
|
| 291 |
-
history_text_und_prompt = generate_prompt_with_history(prompt, history)
|
| 292 |
|
| 293 |
#history für HuggingFace Models formatieren
|
| 294 |
-
|
| 295 |
|
| 296 |
#history für openAi formatieren
|
| 297 |
#history_text_und_prompt = generate_prompt_with_history_openai(prompt, history)
|
|
|
|
| 89 |
|
| 90 |
#HuggingFace--------------------------------
|
| 91 |
#repo_id = "meta-llama/Llama-2-13b-chat-hf"
|
| 92 |
+
repo_id = "HuggingFaceH4/zephyr-7b-alpha" #das Modell ist echt gut!!! Vom MIT
|
| 93 |
+
#repo_id = "TheBloke/Yi-34B-Chat-GGUF"
|
| 94 |
#repo_id = "meta-llama/Llama-2-70b-chat-hf"
|
| 95 |
#repo_id = "tiiuae/falcon-40b"
|
| 96 |
#repo_id = "Vicuna-33b"
|
|
|
|
| 288 |
global splittet
|
| 289 |
print(splittet)
|
| 290 |
#Prompt an history anhängen und einen Text daraus machen
|
| 291 |
+
#history_text_und_prompt = generate_prompt_with_history(prompt, history)
|
| 292 |
|
| 293 |
#history für HuggingFace Models formatieren
|
| 294 |
+
history_text_und_prompt = generate_prompt_with_history_hf(prompt, history)
|
| 295 |
|
| 296 |
#history für openAi formatieren
|
| 297 |
#history_text_und_prompt = generate_prompt_with_history_openai(prompt, history)
|