Spaces:
Sleeping
Sleeping
Update alternative.py
Browse files- alternative.py +15 -8
alternative.py
CHANGED
@@ -34,8 +34,8 @@ from dotenv import load_dotenv
|
|
34 |
from scrapegraphai.graphs import SmartScraperGraph
|
35 |
from scrapegraphai.utils import prettify_exec_info
|
36 |
|
37 |
-
from langchain_community.embeddings import HuggingFaceInferenceAPIEmbeddings
|
38 |
-
from
|
39 |
from langchain_community.document_loaders import (AsyncHtmlLoader,
|
40 |
NewsURLLoader, PubMedLoader,
|
41 |
PlaywrightURLLoader,
|
@@ -53,19 +53,26 @@ from langchain_community.document_loaders import (AsyncHtmlLoader,
|
|
53 |
load_dotenv()
|
54 |
HUGGINGFACEHUB_API_TOKEN = os.getenv('HUGGINGFACEHUB_API_TOKEN')
|
55 |
|
56 |
-
# Foudational Model HF repo ID
|
57 |
-
|
|
|
|
|
58 |
|
59 |
# Initialize the model instances
|
60 |
llm_model_instance = HuggingFaceEndpoint(
|
61 |
-
repo_id=
|
|
|
|
|
|
|
|
|
|
|
|
|
62 |
huggingfacehub_api_token = HUGGINGFACEHUB_API_TOKEN,
|
63 |
-
task = "conversational",
|
64 |
-
**model_kwargs={"temperature": 0.5, "max_length": 8192, "timeout": 6000},
|
65 |
)
|
66 |
|
67 |
embedder_model_instance = HuggingFaceInferenceAPIEmbeddings(
|
68 |
-
api_key=HUGGINGFACEHUB_API_TOKEN,
|
|
|
69 |
)
|
70 |
|
71 |
graph_config = {
|
|
|
34 |
from scrapegraphai.graphs import SmartScraperGraph
|
35 |
from scrapegraphai.utils import prettify_exec_info
|
36 |
|
37 |
+
from langchain_community.embeddings.huggingface import HuggingFaceInferenceAPIEmbeddings
|
38 |
+
from langchain_huggingface.llms.huggingface_endpoint import HuggingFaceEndpoint
|
39 |
from langchain_community.document_loaders import (AsyncHtmlLoader,
|
40 |
NewsURLLoader, PubMedLoader,
|
41 |
PlaywrightURLLoader,
|
|
|
53 |
load_dotenv()
|
54 |
HUGGINGFACEHUB_API_TOKEN = os.getenv('HUGGINGFACEHUB_API_TOKEN')
|
55 |
|
56 |
+
# Foudational Model and Embeeding Model HF repo ID
|
57 |
+
FM_REPO_ID = "mistralai/Mistral-7B-Instruct-v0.3"
|
58 |
+
EM_REPO_ID="sentence-transformers/all-MiniLM-l6-v2"
|
59 |
+
|
60 |
|
61 |
# Initialize the model instances
|
62 |
llm_model_instance = HuggingFaceEndpoint(
|
63 |
+
repo_id=FM_REPO_ID,
|
64 |
+
max_new_tokens=8192,
|
65 |
+
top_k=10,
|
66 |
+
top_p=0.95,
|
67 |
+
typical_p=0.95,
|
68 |
+
temperature=0.1,
|
69 |
+
repetition_penalty=1.03,
|
70 |
huggingfacehub_api_token = HUGGINGFACEHUB_API_TOKEN,
|
|
|
|
|
71 |
)
|
72 |
|
73 |
embedder_model_instance = HuggingFaceInferenceAPIEmbeddings(
|
74 |
+
api_key=HUGGINGFACEHUB_API_TOKEN,
|
75 |
+
model_name=EM_REPO_ID
|
76 |
)
|
77 |
|
78 |
graph_config = {
|