app.py
CHANGED
@@ -29,18 +29,18 @@ TEXT_GENERATION_MODELS = [
|
|
29 |
"model_id": "meta-llama/Llama-2-7b-chat-hf"
|
30 |
},
|
31 |
{
|
32 |
-
"name": "
|
33 |
-
"description": "
|
34 |
"chat_model": True,
|
35 |
"type": INFERENCE_API,
|
36 |
"model_id": "tinyllama/TinyLlama-1.1B-Chat-v1.0"
|
37 |
},
|
38 |
{
|
39 |
-
"name": "TinyLlama_v1.
|
40 |
-
"description": "TinyLlama_v1.
|
41 |
"chat_model": False,
|
42 |
"type": LOCAL,
|
43 |
-
"model_path": "TinyLlama/TinyLlama_v1.
|
44 |
}
|
45 |
]
|
46 |
|
|
|
29 |
"model_id": "meta-llama/Llama-2-7b-chat-hf"
|
30 |
},
|
31 |
{
|
32 |
+
"name": "TinyLlama-1.1B-Chat-v1.0",
|
33 |
+
"description": "TinyLlama-1.1B-Chat-v1.0",
|
34 |
"chat_model": True,
|
35 |
"type": INFERENCE_API,
|
36 |
"model_id": "tinyllama/TinyLlama-1.1B-Chat-v1.0"
|
37 |
},
|
38 |
{
|
39 |
+
"name": "TinyLlama_v1.1_chinese",
|
40 |
+
"description": "TinyLlama_v1.1_chinese",
|
41 |
"chat_model": False,
|
42 |
"type": LOCAL,
|
43 |
+
"model_path": "TinyLlama/TinyLlama_v1.1_chinese"
|
44 |
}
|
45 |
]
|
46 |
|