Spaces:
Sleeping
Sleeping
Update alternative.py
Browse files- alternative.py +12 -5
alternative.py
CHANGED
@@ -28,13 +28,14 @@ import subprocess
|
|
28 |
from typing import Tuple
|
29 |
|
30 |
import gradio as gr
|
|
|
31 |
from dotenv import load_dotenv
|
32 |
-
|
33 |
-
from langchain_community.llms import HuggingFaceEndpoint
|
34 |
from scrapegraphai.graphs import SmartScraperGraph
|
35 |
from scrapegraphai.utils import prettify_exec_info
|
36 |
|
37 |
-
from
|
|
|
38 |
from langchain_community.document_loaders import (AsyncHtmlLoader,
|
39 |
NewsURLLoader, PubMedLoader,
|
40 |
PlaywrightURLLoader,
|
@@ -49,12 +50,18 @@ from langchain_community.document_loaders import (AsyncHtmlLoader,
|
|
49 |
# ------------------------------------------------------------------------------
|
50 |
|
51 |
# Load environment variables
|
|
|
52 |
HUGGINGFACEHUB_API_TOKEN = os.getenv('HUGGINGFACEHUB_API_TOKEN')
|
53 |
|
|
|
|
|
|
|
54 |
# Initialize the model instances
|
55 |
-
repo_id = "mistralai/Mistral-7B-Instruct-v0.2"
|
56 |
llm_model_instance = HuggingFaceEndpoint(
|
57 |
-
repo_id=repo_id,
|
|
|
|
|
|
|
58 |
)
|
59 |
|
60 |
embedder_model_instance = HuggingFaceInferenceAPIEmbeddings(
|
|
|
28 |
from typing import Tuple
|
29 |
|
30 |
import gradio as gr
|
31 |
+
from bs4 import BeautifulSoup as Soup
|
32 |
from dotenv import load_dotenv
|
33 |
+
|
|
|
34 |
from scrapegraphai.graphs import SmartScraperGraph
|
35 |
from scrapegraphai.utils import prettify_exec_info
|
36 |
|
37 |
+
from langchain_community.embeddings import HuggingFaceInferenceAPIEmbeddings
|
38 |
+
from langchain_community.llms.huggingface_endpoint import HuggingFaceEndpoint
|
39 |
from langchain_community.document_loaders import (AsyncHtmlLoader,
|
40 |
NewsURLLoader, PubMedLoader,
|
41 |
PlaywrightURLLoader,
|
|
|
50 |
# ------------------------------------------------------------------------------
|
51 |
|
52 |
# Load environment variables
|
53 |
+
load_dotenv()
|
54 |
HUGGINGFACEHUB_API_TOKEN = os.getenv('HUGGINGFACEHUB_API_TOKEN')
|
55 |
|
56 |
+
# Foudational Model HF repo ID
|
57 |
+
REPO_ID = "mistralai/Mistral-7B-Instruct-v0.3"
|
58 |
+
|
59 |
# Initialize the model instances
|
|
|
60 |
llm_model_instance = HuggingFaceEndpoint(
|
61 |
+
repo_id=repo_id,
|
62 |
+
huggingfacehub_api_token = HUGGINGFACEHUB_API_TOKEN,
|
63 |
+
task = "conversational",
|
64 |
+
**model_kwargs={"temperature": 0.5, "max_length": 8192, "timeout": 6000},
|
65 |
)
|
66 |
|
67 |
embedder_model_instance = HuggingFaceInferenceAPIEmbeddings(
|