id
stringlengths
14
16
text
stringlengths
31
2.41k
source
stringlengths
53
121
df187fd6ef46-3
logger.error("Unable to write chat history messages to cassandra") raise error [docs] def clear(self) -> None: """Clear session memory from Cassandra""" from cassandra import OperationTimedOut, Unavailable try: self.session.execute( f"DELETE FROM {self.table_name} WHERE session_id = '{self.session_id}';" ) except (Unavailable, OperationTimedOut) as error: logger.error("Unable to clear chat history messages from cassandra") raise error def __del__(self) -> None: if self.session: self.session.shutdown() if self.cluster: self.cluster.shutdown()
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/cassandra.html
6295b158960f-0
Source code for langchain.memory.chat_message_histories.cosmos_db """Azure CosmosDB Memory History.""" from __future__ import annotations import logging from types import TracebackType from typing import TYPE_CHECKING, Any, List, Optional, Type from langchain.schema import ( BaseChatMessageHistory, BaseMessage, messages_from_dict, messages_to_dict, ) logger = logging.getLogger(__name__) if TYPE_CHECKING: from azure.cosmos import ContainerProxy [docs]class CosmosDBChatMessageHistory(BaseChatMessageHistory): """Chat history backed by Azure CosmosDB.""" def __init__( self, cosmos_endpoint: str, cosmos_database: str, cosmos_container: str, session_id: str, user_id: str, credential: Any = None, connection_string: Optional[str] = None, ttl: Optional[int] = None, cosmos_client_kwargs: Optional[dict] = None, ): """ Initializes a new instance of the CosmosDBChatMessageHistory class. Make sure to call prepare_cosmos or use the context manager to make sure your database is ready. Either a credential or a connection string must be provided. :param cosmos_endpoint: The connection endpoint for the Azure Cosmos DB account. :param cosmos_database: The name of the database to use. :param cosmos_container: The name of the container to use. :param session_id: The session ID to use, can be overwritten while loading. :param user_id: The user ID to use, can be overwritten while loading. :param credential: The credential to use to authenticate to Azure Cosmos DB. :param connection_string: The connection string to use to authenticate.
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/cosmos_db.html
6295b158960f-1
:param connection_string: The connection string to use to authenticate. :param ttl: The time to live (in seconds) to use for documents in the container. :param cosmos_client_kwargs: Additional kwargs to pass to the CosmosClient. """ self.cosmos_endpoint = cosmos_endpoint self.cosmos_database = cosmos_database self.cosmos_container = cosmos_container self.credential = credential self.conn_string = connection_string self.session_id = session_id self.user_id = user_id self.ttl = ttl self.messages: List[BaseMessage] = [] try: from azure.cosmos import ( # pylint: disable=import-outside-toplevel # noqa: E501 CosmosClient, ) except ImportError as exc: raise ImportError( "You must install the azure-cosmos package to use the CosmosDBChatMessageHistory." # noqa: E501 ) from exc if self.credential: self._client = CosmosClient( url=self.cosmos_endpoint, credential=self.credential, **cosmos_client_kwargs or {}, ) elif self.conn_string: self._client = CosmosClient.from_connection_string( conn_str=self.conn_string, **cosmos_client_kwargs or {}, ) else: raise ValueError("Either a connection string or a credential must be set.") self._container: Optional[ContainerProxy] = None [docs] def prepare_cosmos(self) -> None: """Prepare the CosmosDB client. Use this function or the context manager to make sure your database is ready. """ try: from azure.cosmos import ( # pylint: disable=import-outside-toplevel # noqa: E501
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/cosmos_db.html
6295b158960f-2
PartitionKey, ) except ImportError as exc: raise ImportError( "You must install the azure-cosmos package to use the CosmosDBChatMessageHistory." # noqa: E501 ) from exc database = self._client.create_database_if_not_exists(self.cosmos_database) self._container = database.create_container_if_not_exists( self.cosmos_container, partition_key=PartitionKey("/user_id"), default_ttl=self.ttl, ) self.load_messages() def __enter__(self) -> "CosmosDBChatMessageHistory": """Context manager entry point.""" self._client.__enter__() self.prepare_cosmos() return self def __exit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: """Context manager exit""" self.upsert_messages() self._client.__exit__(exc_type, exc_val, traceback) [docs] def load_messages(self) -> None: """Retrieve the messages from Cosmos""" if not self._container: raise ValueError("Container not initialized") try: from azure.cosmos.exceptions import ( # pylint: disable=import-outside-toplevel # noqa: E501 CosmosHttpResponseError, ) except ImportError as exc: raise ImportError( "You must install the azure-cosmos package to use the CosmosDBChatMessageHistory." # noqa: E501 ) from exc try: item = self._container.read_item( item=self.session_id, partition_key=self.user_id ) except CosmosHttpResponseError:
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/cosmos_db.html
6295b158960f-3
) except CosmosHttpResponseError: logger.info("no session found") return if "messages" in item and len(item["messages"]) > 0: self.messages = messages_from_dict(item["messages"]) [docs] def add_message(self, message: BaseMessage) -> None: """Add a self-created message to the store""" self.messages.append(message) self.upsert_messages() [docs] def upsert_messages(self) -> None: """Update the cosmosdb item.""" if not self._container: raise ValueError("Container not initialized") self._container.upsert_item( body={ "id": self.session_id, "user_id": self.user_id, "messages": messages_to_dict(self.messages), } ) [docs] def clear(self) -> None: """Clear session memory from this memory and cosmos.""" self.messages = [] if self._container: self._container.delete_item( item=self.session_id, partition_key=self.user_id )
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/cosmos_db.html
f02b0b96b3da-0
Source code for langchain.memory.chat_message_histories.mongodb import json import logging from typing import List from langchain.schema import ( BaseChatMessageHistory, BaseMessage, _message_to_dict, messages_from_dict, ) logger = logging.getLogger(__name__) DEFAULT_DBNAME = "chat_history" DEFAULT_COLLECTION_NAME = "message_store" [docs]class MongoDBChatMessageHistory(BaseChatMessageHistory): """Chat message history that stores history in MongoDB. Args: connection_string: connection string to connect to MongoDB session_id: arbitrary key that is used to store the messages of a single chat session. database_name: name of the database to use collection_name: name of the collection to use """ def __init__( self, connection_string: str, session_id: str, database_name: str = DEFAULT_DBNAME, collection_name: str = DEFAULT_COLLECTION_NAME, ): from pymongo import MongoClient, errors self.connection_string = connection_string self.session_id = session_id self.database_name = database_name self.collection_name = collection_name try: self.client: MongoClient = MongoClient(connection_string) except errors.ConnectionFailure as error: logger.error(error) self.db = self.client[database_name] self.collection = self.db[collection_name] self.collection.create_index("SessionId") @property def messages(self) -> List[BaseMessage]: # type: ignore """Retrieve the messages from MongoDB""" from pymongo import errors try: cursor = self.collection.find({"SessionId": self.session_id}) except errors.OperationFailure as error: logger.error(error) if cursor:
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/mongodb.html
f02b0b96b3da-1
except errors.OperationFailure as error: logger.error(error) if cursor: items = [json.loads(document["History"]) for document in cursor] else: items = [] messages = messages_from_dict(items) return messages [docs] def add_message(self, message: BaseMessage) -> None: """Append the message to the record in MongoDB""" from pymongo import errors try: self.collection.insert_one( { "SessionId": self.session_id, "History": json.dumps(_message_to_dict(message)), } ) except errors.WriteError as err: logger.error(err) [docs] def clear(self) -> None: """Clear session memory from MongoDB""" from pymongo import errors try: self.collection.delete_many({"SessionId": self.session_id}) except errors.WriteError as err: logger.error(err)
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/mongodb.html
e021669d7bd8-0
Source code for langchain.memory.chat_message_histories.sql import json import logging from typing import List from sqlalchemy import Column, Integer, Text, create_engine try: from sqlalchemy.orm import declarative_base except ImportError: from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker from langchain.schema import ( BaseChatMessageHistory, BaseMessage, _message_to_dict, messages_from_dict, ) logger = logging.getLogger(__name__) def create_message_model(table_name, DynamicBase): # type: ignore """ Create a message model for a given table name. Args: table_name: The name of the table to use. DynamicBase: The base class to use for the model. Returns: The model class. """ # Model decleared inside a function to have a dynamic table name class Message(DynamicBase): __tablename__ = table_name id = Column(Integer, primary_key=True) session_id = Column(Text) message = Column(Text) return Message [docs]class SQLChatMessageHistory(BaseChatMessageHistory): """Chat message history stored in an SQL database.""" def __init__( self, session_id: str, connection_string: str, table_name: str = "message_store", ): self.table_name = table_name self.connection_string = connection_string self.engine = create_engine(connection_string, echo=False) self._create_table_if_not_exists() self.session_id = session_id self.Session = sessionmaker(self.engine) def _create_table_if_not_exists(self) -> None: DynamicBase = declarative_base()
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/sql.html
e021669d7bd8-1
DynamicBase = declarative_base() self.Message = create_message_model(self.table_name, DynamicBase) # Create all does the check for us in case the table exists. DynamicBase.metadata.create_all(self.engine) @property def messages(self) -> List[BaseMessage]: # type: ignore """Retrieve all messages from db""" with self.Session() as session: result = session.query(self.Message).where( self.Message.session_id == self.session_id ) items = [json.loads(record.message) for record in result] messages = messages_from_dict(items) return messages [docs] def add_message(self, message: BaseMessage) -> None: """Append the message to the record in db""" with self.Session() as session: jsonstr = json.dumps(_message_to_dict(message)) session.add(self.Message(session_id=self.session_id, message=jsonstr)) session.commit() [docs] def clear(self) -> None: """Clear session memory from db""" with self.Session() as session: session.query(self.Message).filter( self.Message.session_id == self.session_id ).delete() session.commit()
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/sql.html
dfe8b980e8fd-0
Source code for langchain.memory.chat_message_histories.momento from __future__ import annotations import json from datetime import timedelta from typing import TYPE_CHECKING, Any, Optional from langchain.schema import ( BaseChatMessageHistory, BaseMessage, _message_to_dict, messages_from_dict, ) from langchain.utils import get_from_env if TYPE_CHECKING: import momento def _ensure_cache_exists(cache_client: momento.CacheClient, cache_name: str) -> None: """Create cache if it doesn't exist. Raises: SdkException: Momento service or network error Exception: Unexpected response """ from momento.responses import CreateCache create_cache_response = cache_client.create_cache(cache_name) if isinstance(create_cache_response, CreateCache.Success) or isinstance( create_cache_response, CreateCache.CacheAlreadyExists ): return None elif isinstance(create_cache_response, CreateCache.Error): raise create_cache_response.inner_exception else: raise Exception(f"Unexpected response cache creation: {create_cache_response}") [docs]class MomentoChatMessageHistory(BaseChatMessageHistory): """Chat message history cache that uses Momento as a backend. See https://gomomento.com/""" def __init__( self, session_id: str, cache_client: momento.CacheClient, cache_name: str, *, key_prefix: str = "message_store:", ttl: Optional[timedelta] = None, ensure_cache_exists: bool = True, ): """Instantiate a chat message history cache that uses Momento as a backend. Note: to instantiate the cache client passed to MomentoChatMessageHistory,
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/momento.html
dfe8b980e8fd-1
Note: to instantiate the cache client passed to MomentoChatMessageHistory, you must have a Momento account at https://gomomento.com/. Args: session_id (str): The session ID to use for this chat session. cache_client (CacheClient): The Momento cache client. cache_name (str): The name of the cache to use to store the messages. key_prefix (str, optional): The prefix to apply to the cache key. Defaults to "message_store:". ttl (Optional[timedelta], optional): The TTL to use for the messages. Defaults to None, ie the default TTL of the cache will be used. ensure_cache_exists (bool, optional): Create the cache if it doesn't exist. Defaults to True. Raises: ImportError: Momento python package is not installed. TypeError: cache_client is not of type momento.CacheClientObject """ try: from momento import CacheClient from momento.requests import CollectionTtl except ImportError: raise ImportError( "Could not import momento python package. " "Please install it with `pip install momento`." ) if not isinstance(cache_client, CacheClient): raise TypeError("cache_client must be a momento.CacheClient object.") if ensure_cache_exists: _ensure_cache_exists(cache_client, cache_name) self.key = key_prefix + session_id self.cache_client = cache_client self.cache_name = cache_name if ttl is not None: self.ttl = CollectionTtl.of(ttl) else: self.ttl = CollectionTtl.from_cache_ttl() [docs] @classmethod def from_client_params( cls, session_id: str,
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/momento.html
dfe8b980e8fd-2
def from_client_params( cls, session_id: str, cache_name: str, ttl: timedelta, *, configuration: Optional[momento.config.Configuration] = None, auth_token: Optional[str] = None, **kwargs: Any, ) -> MomentoChatMessageHistory: """Construct cache from CacheClient parameters.""" try: from momento import CacheClient, Configurations, CredentialProvider except ImportError: raise ImportError( "Could not import momento python package. " "Please install it with `pip install momento`." ) if configuration is None: configuration = Configurations.Laptop.v1() auth_token = auth_token or get_from_env("auth_token", "MOMENTO_AUTH_TOKEN") credentials = CredentialProvider.from_string(auth_token) cache_client = CacheClient(configuration, credentials, default_ttl=ttl) return cls(session_id, cache_client, cache_name, ttl=ttl, **kwargs) @property def messages(self) -> list[BaseMessage]: # type: ignore[override] """Retrieve the messages from Momento. Raises: SdkException: Momento service or network error Exception: Unexpected response Returns: list[BaseMessage]: List of cached messages """ from momento.responses import CacheListFetch fetch_response = self.cache_client.list_fetch(self.cache_name, self.key) if isinstance(fetch_response, CacheListFetch.Hit): items = [json.loads(m) for m in fetch_response.value_list_string] return messages_from_dict(items) elif isinstance(fetch_response, CacheListFetch.Miss): return [] elif isinstance(fetch_response, CacheListFetch.Error):
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/momento.html
dfe8b980e8fd-3
return [] elif isinstance(fetch_response, CacheListFetch.Error): raise fetch_response.inner_exception else: raise Exception(f"Unexpected response: {fetch_response}") [docs] def add_message(self, message: BaseMessage) -> None: """Store a message in the cache. Args: message (BaseMessage): The message object to store. Raises: SdkException: Momento service or network error. Exception: Unexpected response. """ from momento.responses import CacheListPushBack item = json.dumps(_message_to_dict(message)) push_response = self.cache_client.list_push_back( self.cache_name, self.key, item, ttl=self.ttl ) if isinstance(push_response, CacheListPushBack.Success): return None elif isinstance(push_response, CacheListPushBack.Error): raise push_response.inner_exception else: raise Exception(f"Unexpected response: {push_response}") [docs] def clear(self) -> None: """Remove the session's messages from the cache. Raises: SdkException: Momento service or network error. Exception: Unexpected response. """ from momento.responses import CacheDelete delete_response = self.cache_client.delete(self.cache_name, self.key) if isinstance(delete_response, CacheDelete.Success): return None elif isinstance(delete_response, CacheDelete.Error): raise delete_response.inner_exception else: raise Exception(f"Unexpected response: {delete_response}")
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/momento.html
335a0c8065b9-0
Source code for langchain.memory.chat_message_histories.postgres import json import logging from typing import List from langchain.schema import ( BaseChatMessageHistory, BaseMessage, _message_to_dict, messages_from_dict, ) logger = logging.getLogger(__name__) DEFAULT_CONNECTION_STRING = "postgresql://postgres:mypassword@localhost/chat_history" [docs]class PostgresChatMessageHistory(BaseChatMessageHistory): """Chat message history stored in a Postgres database.""" def __init__( self, session_id: str, connection_string: str = DEFAULT_CONNECTION_STRING, table_name: str = "message_store", ): import psycopg from psycopg.rows import dict_row try: self.connection = psycopg.connect(connection_string) self.cursor = self.connection.cursor(row_factory=dict_row) except psycopg.OperationalError as error: logger.error(error) self.session_id = session_id self.table_name = table_name self._create_table_if_not_exists() def _create_table_if_not_exists(self) -> None: create_table_query = f"""CREATE TABLE IF NOT EXISTS {self.table_name} ( id SERIAL PRIMARY KEY, session_id TEXT NOT NULL, message JSONB NOT NULL );""" self.cursor.execute(create_table_query) self.connection.commit() @property def messages(self) -> List[BaseMessage]: # type: ignore """Retrieve the messages from PostgreSQL""" query = f"SELECT message FROM {self.table_name} WHERE session_id = %s;" self.cursor.execute(query, (self.session_id,)) items = [record["message"] for record in self.cursor.fetchall()] messages = messages_from_dict(items)
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/postgres.html
335a0c8065b9-1
messages = messages_from_dict(items) return messages [docs] def add_message(self, message: BaseMessage) -> None: """Append the message to the record in PostgreSQL""" from psycopg import sql query = sql.SQL("INSERT INTO {} (session_id, message) VALUES (%s, %s);").format( sql.Identifier(self.table_name) ) self.cursor.execute( query, (self.session_id, json.dumps(_message_to_dict(message))) ) self.connection.commit() [docs] def clear(self) -> None: """Clear session memory from PostgreSQL""" query = f"DELETE FROM {self.table_name} WHERE session_id = %s;" self.cursor.execute(query, (self.session_id,)) self.connection.commit() def __del__(self) -> None: if self.cursor: self.cursor.close() if self.connection: self.connection.close()
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/postgres.html
84575f8f66ce-0
Source code for langchain.memory.chat_message_histories.zep from __future__ import annotations import logging from typing import TYPE_CHECKING, Dict, List, Optional from langchain.schema import ( AIMessage, BaseChatMessageHistory, BaseMessage, HumanMessage, ) if TYPE_CHECKING: from zep_python import Memory, MemorySearchResult, Message, NotFoundError logger = logging.getLogger(__name__) [docs]class ZepChatMessageHistory(BaseChatMessageHistory): """A ChatMessageHistory implementation that uses Zep as a backend. Recommended usage:: # Set up Zep Chat History zep_chat_history = ZepChatMessageHistory( session_id=session_id, url=ZEP_API_URL, ) # Use a standard ConversationBufferMemory to encapsulate the Zep chat history memory = ConversationBufferMemory( memory_key="chat_history", chat_memory=zep_chat_history ) Zep provides long-term conversation storage for LLM apps. The server stores, summarizes, embeds, indexes, and enriches conversational AI chat histories, and exposes them via simple, low-latency APIs. For server installation instructions and more, see: https://getzep.github.io/ This class is a thin wrapper around the zep-python package. Additional Zep functionality is exposed via the `zep_summary` and `zep_messages` properties. For more information on the zep-python package, see: https://github.com/getzep/zep-python """ def __init__( self, session_id: str, url: str = "http://localhost:8000", ) -> None: try:
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/zep.html
84575f8f66ce-1
) -> None: try: from zep_python import ZepClient except ImportError: raise ValueError( "Could not import zep-python package. " "Please install it with `pip install zep-python`." ) self.zep_client = ZepClient(base_url=url) self.session_id = session_id @property def messages(self) -> List[BaseMessage]: # type: ignore """Retrieve messages from Zep memory""" zep_memory: Optional[Memory] = self._get_memory() if not zep_memory: return [] messages: List[BaseMessage] = [] # Extract summary, if present, and messages if zep_memory.summary: if len(zep_memory.summary.content) > 0: messages.append(HumanMessage(content=zep_memory.summary.content)) if zep_memory.messages: msg: Message for msg in zep_memory.messages: if msg.role == "ai": messages.append(AIMessage(content=msg.content)) else: messages.append(HumanMessage(content=msg.content)) return messages @property def zep_messages(self) -> List[Message]: """Retrieve summary from Zep memory""" zep_memory: Optional[Memory] = self._get_memory() if not zep_memory: return [] return zep_memory.messages @property def zep_summary(self) -> Optional[str]: """Retrieve summary from Zep memory""" zep_memory: Optional[Memory] = self._get_memory() if not zep_memory or not zep_memory.summary: return None return zep_memory.summary.content
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/zep.html
84575f8f66ce-2
return None return zep_memory.summary.content def _get_memory(self) -> Optional[Memory]: """Retrieve memory from Zep""" from zep_python import NotFoundError try: zep_memory: Memory = self.zep_client.get_memory(self.session_id) except NotFoundError: logger.warning( f"Session {self.session_id} not found in Zep. Returning None" ) return None return zep_memory [docs] def add_message(self, message: BaseMessage) -> None: """Append the message to the Zep memory history""" from zep_python import Memory, Message zep_message: Message if isinstance(message, HumanMessage): zep_message = Message(content=message.content, role="human") else: zep_message = Message(content=message.content, role="ai") zep_memory = Memory(messages=[zep_message]) self.zep_client.add_memory(self.session_id, zep_memory) [docs] def search( self, query: str, metadata: Optional[Dict] = None, limit: Optional[int] = None ) -> List[MemorySearchResult]: """Search Zep memory for messages matching the query""" from zep_python import MemorySearchPayload payload: MemorySearchPayload = MemorySearchPayload( text=query, metadata=metadata ) return self.zep_client.search_memory(self.session_id, payload, limit=limit) [docs] def clear(self) -> None: """Clear session memory from Zep. Note that Zep is long-term storage for memory and this is not advised unless you have specific data retention requirements. """ try:
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/zep.html
84575f8f66ce-3
""" try: self.zep_client.delete_memory(self.session_id) except NotFoundError: logger.warning( f"Session {self.session_id} not found in Zep. Skipping delete." )
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/zep.html
cafbb4658a62-0
Source code for langchain.memory.chat_message_histories.dynamodb import logging from typing import List, Optional from langchain.schema import ( BaseChatMessageHistory, BaseMessage, _message_to_dict, messages_from_dict, messages_to_dict, ) logger = logging.getLogger(__name__) [docs]class DynamoDBChatMessageHistory(BaseChatMessageHistory): """Chat message history that stores history in AWS DynamoDB. This class expects that a DynamoDB table with name `table_name` and a partition Key of `SessionId` is present. Args: table_name: name of the DynamoDB table session_id: arbitrary key that is used to store the messages of a single chat session. endpoint_url: URL of the AWS endpoint to connect to. This argument is optional and useful for test purposes, like using Localstack. If you plan to use AWS cloud service, you normally don't have to worry about setting the endpoint_url. """ def __init__( self, table_name: str, session_id: str, endpoint_url: Optional[str] = None ): import boto3 if endpoint_url: client = boto3.resource("dynamodb", endpoint_url=endpoint_url) else: client = boto3.resource("dynamodb") self.table = client.Table(table_name) self.session_id = session_id @property def messages(self) -> List[BaseMessage]: # type: ignore """Retrieve the messages from DynamoDB""" from botocore.exceptions import ClientError response = None try: response = self.table.get_item(Key={"SessionId": self.session_id}) except ClientError as error:
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/dynamodb.html
cafbb4658a62-1
except ClientError as error: if error.response["Error"]["Code"] == "ResourceNotFoundException": logger.warning("No record found with session id: %s", self.session_id) else: logger.error(error) if response and "Item" in response: items = response["Item"]["History"] else: items = [] messages = messages_from_dict(items) return messages [docs] def add_message(self, message: BaseMessage) -> None: """Append the message to the record in DynamoDB""" from botocore.exceptions import ClientError messages = messages_to_dict(self.messages) _message = _message_to_dict(message) messages.append(_message) try: self.table.put_item( Item={"SessionId": self.session_id, "History": messages} ) except ClientError as err: logger.error(err) [docs] def clear(self) -> None: """Clear session memory from DynamoDB""" from botocore.exceptions import ClientError try: self.table.delete_item(Key={"SessionId": self.session_id}) except ClientError as err: logger.error(err)
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/dynamodb.html
f15a12d81b16-0
Source code for langchain.memory.chat_message_histories.in_memory from typing import List from pydantic import BaseModel from langchain.schema import ( BaseChatMessageHistory, BaseMessage, ) [docs]class ChatMessageHistory(BaseChatMessageHistory, BaseModel): messages: List[BaseMessage] = [] [docs] def add_message(self, message: BaseMessage) -> None: """Add a self-created message to the store""" self.messages.append(message) [docs] def clear(self) -> None: self.messages = []
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/in_memory.html
50d047f5e5c6-0
Source code for langchain.memory.chat_message_histories.redis import json import logging from typing import List, Optional from langchain.schema import ( BaseChatMessageHistory, BaseMessage, _message_to_dict, messages_from_dict, ) logger = logging.getLogger(__name__) [docs]class RedisChatMessageHistory(BaseChatMessageHistory): """Chat message history stored in a Redis database.""" def __init__( self, session_id: str, url: str = "redis://localhost:6379/0", key_prefix: str = "message_store:", ttl: Optional[int] = None, ): try: import redis except ImportError: raise ImportError( "Could not import redis python package. " "Please install it with `pip install redis`." ) try: self.redis_client = redis.Redis.from_url(url=url) except redis.exceptions.ConnectionError as error: logger.error(error) self.session_id = session_id self.key_prefix = key_prefix self.ttl = ttl @property def key(self) -> str: """Construct the record key to use""" return self.key_prefix + self.session_id @property def messages(self) -> List[BaseMessage]: # type: ignore """Retrieve the messages from Redis""" _items = self.redis_client.lrange(self.key, 0, -1) items = [json.loads(m.decode("utf-8")) for m in _items[::-1]] messages = messages_from_dict(items) return messages [docs] def add_message(self, message: BaseMessage) -> None: """Append the message to the record in Redis"""
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/redis.html
50d047f5e5c6-1
"""Append the message to the record in Redis""" self.redis_client.lpush(self.key, json.dumps(_message_to_dict(message))) if self.ttl: self.redis_client.expire(self.key, self.ttl) [docs] def clear(self) -> None: """Clear session memory from Redis""" self.redis_client.delete(self.key)
https://api.python.langchain.com/en/stable/_modules/langchain/memory/chat_message_histories/redis.html
0d347772b03e-0
Source code for langchain.tools.plugin from __future__ import annotations import json from typing import Optional, Type import requests import yaml from pydantic import BaseModel from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool class ApiConfig(BaseModel): type: str url: str has_user_authentication: Optional[bool] = False class AIPlugin(BaseModel): """AI Plugin Definition.""" schema_version: str name_for_model: str name_for_human: str description_for_model: str description_for_human: str auth: Optional[dict] = None api: ApiConfig logo_url: Optional[str] contact_email: Optional[str] legal_info_url: Optional[str] @classmethod def from_url(cls, url: str) -> AIPlugin: """Instantiate AIPlugin from a URL.""" response = requests.get(url).json() return cls(**response) def marshal_spec(txt: str) -> dict: """Convert the yaml or json serialized spec to a dict. Args: txt: The yaml or json serialized spec. Returns: dict: The spec as a dict. """ try: return json.loads(txt) except json.JSONDecodeError: return yaml.safe_load(txt) class AIPluginToolSchema(BaseModel): """AIPLuginToolSchema.""" tool_input: Optional[str] = "" [docs]class AIPluginTool(BaseTool): plugin: AIPlugin api_spec: str args_schema: Type[AIPluginToolSchema] = AIPluginToolSchema [docs] @classmethod
https://api.python.langchain.com/en/stable/_modules/langchain/tools/plugin.html
0d347772b03e-1
[docs] @classmethod def from_plugin_url(cls, url: str) -> AIPluginTool: plugin = AIPlugin.from_url(url) description = ( f"Call this tool to get the OpenAPI spec (and usage guide) " f"for interacting with the {plugin.name_for_human} API. " f"You should only call this ONCE! What is the " f"{plugin.name_for_human} API useful for? " ) + plugin.description_for_human open_api_spec_str = requests.get(plugin.api.url).text open_api_spec = marshal_spec(open_api_spec_str) api_spec = ( f"Usage Guide: {plugin.description_for_model}\n\n" f"OpenAPI Spec: {open_api_spec}" ) return cls( name=plugin.name_for_model, description=description, plugin=plugin, api_spec=api_spec, ) def _run( self, tool_input: Optional[str] = "", run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" return self.api_spec async def _arun( self, tool_input: Optional[str] = None, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool asynchronously.""" return self.api_spec
https://api.python.langchain.com/en/stable/_modules/langchain/tools/plugin.html
092c2d47647e-0
Source code for langchain.tools.ifttt """From https://github.com/SidU/teams-langchain-js/wiki/Connecting-IFTTT-Services. # Creating a webhook - Go to https://ifttt.com/create # Configuring the "If This" - Click on the "If This" button in the IFTTT interface. - Search for "Webhooks" in the search bar. - Choose the first option for "Receive a web request with a JSON payload." - Choose an Event Name that is specific to the service you plan to connect to. This will make it easier for you to manage the webhook URL. For example, if you're connecting to Spotify, you could use "Spotify" as your Event Name. - Click the "Create Trigger" button to save your settings and create your webhook. # Configuring the "Then That" - Tap on the "Then That" button in the IFTTT interface. - Search for the service you want to connect, such as Spotify. - Choose an action from the service, such as "Add track to a playlist". - Configure the action by specifying the necessary details, such as the playlist name, e.g., "Songs from AI". - Reference the JSON Payload received by the Webhook in your action. For the Spotify scenario, choose "{{JsonPayload}}" as your search query. - Tap the "Create Action" button to save your action settings. - Once you have finished configuring your action, click the "Finish" button to complete the setup. - Congratulations! You have successfully connected the Webhook to the desired service, and you're ready to start receiving data and triggering actions 🎉 # Finishing up - To get your webhook URL go to https://ifttt.com/maker_webhooks/settings
https://api.python.langchain.com/en/stable/_modules/langchain/tools/ifttt.html
092c2d47647e-1
- To get your webhook URL go to https://ifttt.com/maker_webhooks/settings - Copy the IFTTT key value from there. The URL is of the form https://maker.ifttt.com/use/YOUR_IFTTT_KEY. Grab the YOUR_IFTTT_KEY value. """ from typing import Optional import requests from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool [docs]class IFTTTWebhook(BaseTool): """IFTTT Webhook. Args: name: name of the tool description: description of the tool url: url to hit with the json event. """ url: str def _run( self, tool_input: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: body = {"this": tool_input} response = requests.post(self.url, data=body) return response.text async def _arun( self, tool_input: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: raise NotImplementedError("Not implemented.")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/ifttt.html
8f07691fd4aa-0
Source code for langchain.tools.convert_to_openai from typing import TypedDict from langchain.tools import BaseTool, StructuredTool class FunctionDescription(TypedDict): """Representation of a callable function to the OpenAI API.""" name: str """The name of the function.""" description: str """A description of the function.""" parameters: dict """The parameters of the function.""" [docs]def format_tool_to_openai_function(tool: BaseTool) -> FunctionDescription: """Format tool into the OpenAI function API.""" if isinstance(tool, StructuredTool): schema_ = tool.args_schema.schema() # Bug with required missing for structured tools. required = sorted(schema_["properties"]) # BUG WORKAROUND return { "name": tool.name, "description": tool.description, "parameters": { "type": "object", "properties": schema_["properties"], "required": required, }, } else: if tool.args_schema: parameters = tool.args_schema.schema() else: parameters = { # This is a hack to get around the fact that some tools # do not expose an args_schema, and expect an argument # which is a string. # And Open AI does not support an array type for the # parameters. "properties": { "__arg1": {"title": "__arg1", "type": "string"}, }, "required": ["__arg1"], "type": "object", } return { "name": tool.name, "description": tool.description, "parameters": parameters, }
https://api.python.langchain.com/en/stable/_modules/langchain/tools/convert_to_openai.html
9531d858c62f-0
Source code for langchain.tools.base """Base implementation for tools or skills.""" from __future__ import annotations import warnings from abc import ABC, abstractmethod from inspect import signature from typing import Any, Awaitable, Callable, Dict, Optional, Tuple, Type, Union from pydantic import ( BaseModel, Extra, Field, create_model, root_validator, validate_arguments, ) from pydantic.main import ModelMetaclass from langchain.callbacks.base import BaseCallbackManager from langchain.callbacks.manager import ( AsyncCallbackManager, AsyncCallbackManagerForToolRun, CallbackManager, CallbackManagerForToolRun, Callbacks, ) class SchemaAnnotationError(TypeError): """Raised when 'args_schema' is missing or has an incorrect type annotation.""" class ToolMetaclass(ModelMetaclass): """Metaclass for BaseTool to ensure the provided args_schema doesn't silently ignored.""" def __new__( cls: Type[ToolMetaclass], name: str, bases: Tuple[Type, ...], dct: dict ) -> ToolMetaclass: """Create the definition of the new tool class.""" schema_type: Optional[Type[BaseModel]] = dct.get("args_schema") if schema_type is not None: schema_annotations = dct.get("__annotations__", {}) args_schema_type = schema_annotations.get("args_schema", None) if args_schema_type is None or args_schema_type == BaseModel: # Throw errors for common mis-annotations. # TODO: Use get_args / get_origin and fully # specify valid annotations. typehint_mandate = """ class ChildTool(BaseTool): ... args_schema: Type[BaseModel] = SchemaClass ..."""
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-1
... args_schema: Type[BaseModel] = SchemaClass ...""" raise SchemaAnnotationError( f"Tool definition for {name} must include valid type annotations" f" for argument 'args_schema' to behave as expected.\n" f"Expected annotation of 'Type[BaseModel]'" f" but got '{args_schema_type}'.\n" f"Expected class looks like:\n" f"{typehint_mandate}" ) # Pass through to Pydantic's metaclass return super().__new__(cls, name, bases, dct) def _create_subset_model( name: str, model: BaseModel, field_names: list ) -> Type[BaseModel]: """Create a pydantic model with only a subset of model's fields.""" fields = {} for field_name in field_names: field = model.__fields__[field_name] fields[field_name] = (field.type_, field.field_info) return create_model(name, **fields) # type: ignore def _get_filtered_args( inferred_model: Type[BaseModel], func: Callable, ) -> dict: """Get the arguments from a function's signature.""" schema = inferred_model.schema()["properties"] valid_keys = signature(func).parameters return {k: schema[k] for k in valid_keys if k not in ("run_manager", "callbacks")} class _SchemaConfig: """Configuration for the pydantic model.""" extra = Extra.forbid arbitrary_types_allowed = True def create_schema_from_function( model_name: str, func: Callable, ) -> Type[BaseModel]: """Create a pydantic schema from a function's signature.
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-2
"""Create a pydantic schema from a function's signature. Args: model_name: Name to assign to the generated pydandic schema func: Function to generate the schema from Returns: A pydantic model with the same arguments as the function """ # https://docs.pydantic.dev/latest/usage/validation_decorator/ validated = validate_arguments(func, config=_SchemaConfig) # type: ignore inferred_model = validated.model # type: ignore if "run_manager" in inferred_model.__fields__: del inferred_model.__fields__["run_manager"] if "callbacks" in inferred_model.__fields__: del inferred_model.__fields__["callbacks"] # Pydantic adds placeholder virtual fields we need to strip valid_properties = _get_filtered_args(inferred_model, func) return _create_subset_model( f"{model_name}Schema", inferred_model, list(valid_properties) ) class ToolException(Exception): """An optional exception that tool throws when execution error occurs. When this exception is thrown, the agent will not stop working, but will handle the exception according to the handle_tool_error variable of the tool, and the processing result will be returned to the agent as observation, and printed in red on the console. """ pass [docs]class BaseTool(ABC, BaseModel, metaclass=ToolMetaclass): """Interface LangChain tools must implement.""" name: str """The unique name of the tool that clearly communicates its purpose.""" description: str """Used to tell the model how/when/why to use the tool. You can provide few-shot examples as a part of the description. """
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-3
You can provide few-shot examples as a part of the description. """ args_schema: Optional[Type[BaseModel]] = None """Pydantic model class to validate and parse the tool's input arguments.""" return_direct: bool = False """Whether to return the tool's output directly. Setting this to True means that after the tool is called, the AgentExecutor will stop looping. """ verbose: bool = False """Whether to log the tool's progress.""" callbacks: Callbacks = Field(default=None, exclude=True) """Callbacks to be called during tool execution.""" callback_manager: Optional[BaseCallbackManager] = Field(default=None, exclude=True) """Deprecated. Please use callbacks instead.""" handle_tool_error: Optional[ Union[bool, str, Callable[[ToolException], str]] ] = False """Handle the content of the ToolException thrown.""" class Config: """Configuration for this pydantic object.""" extra = Extra.forbid arbitrary_types_allowed = True @property def is_single_input(self) -> bool: """Whether the tool only accepts a single input.""" keys = {k for k in self.args if k != "kwargs"} return len(keys) == 1 @property def args(self) -> dict: if self.args_schema is not None: return self.args_schema.schema()["properties"] else: schema = create_schema_from_function(self.name, self._run) return schema.schema()["properties"] def _parse_input( self, tool_input: Union[str, Dict], ) -> Union[str, Dict[str, Any]]: """Convert tool input to pydantic model."""
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-4
"""Convert tool input to pydantic model.""" input_args = self.args_schema if isinstance(tool_input, str): if input_args is not None: key_ = next(iter(input_args.__fields__.keys())) input_args.validate({key_: tool_input}) return tool_input else: if input_args is not None: result = input_args.parse_obj(tool_input) return {k: v for k, v in result.dict().items() if k in tool_input} return tool_input @root_validator() def raise_deprecation(cls, values: Dict) -> Dict: """Raise deprecation warning if callback_manager is used.""" if values.get("callback_manager") is not None: warnings.warn( "callback_manager is deprecated. Please use callbacks instead.", DeprecationWarning, ) values["callbacks"] = values.pop("callback_manager", None) return values @abstractmethod def _run( self, *args: Any, **kwargs: Any, ) -> Any: """Use the tool. Add run_manager: Optional[CallbackManagerForToolRun] = None to child implementations to enable tracing, """ @abstractmethod async def _arun( self, *args: Any, **kwargs: Any, ) -> Any: """Use the tool asynchronously. Add run_manager: Optional[AsyncCallbackManagerForToolRun] = None to child implementations to enable tracing, """ def _to_args_and_kwargs(self, tool_input: Union[str, Dict]) -> Tuple[Tuple, Dict]: # For backwards compatibility, if run_input is a string,
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-5
# For backwards compatibility, if run_input is a string, # pass as a positional argument. if isinstance(tool_input, str): return (tool_input,), {} else: return (), tool_input [docs] def run( self, tool_input: Union[str, Dict], verbose: Optional[bool] = None, start_color: Optional[str] = "green", color: Optional[str] = "green", callbacks: Callbacks = None, **kwargs: Any, ) -> Any: """Run the tool.""" parsed_input = self._parse_input(tool_input) if not self.verbose and verbose is not None: verbose_ = verbose else: verbose_ = self.verbose callback_manager = CallbackManager.configure( callbacks, self.callbacks, verbose=verbose_ ) # TODO: maybe also pass through run_manager is _run supports kwargs new_arg_supported = signature(self._run).parameters.get("run_manager") run_manager = callback_manager.on_tool_start( {"name": self.name, "description": self.description}, tool_input if isinstance(tool_input, str) else str(tool_input), color=start_color, **kwargs, ) try: tool_args, tool_kwargs = self._to_args_and_kwargs(parsed_input) observation = ( self._run(*tool_args, run_manager=run_manager, **tool_kwargs) if new_arg_supported else self._run(*tool_args, **tool_kwargs) ) except ToolException as e: if not self.handle_tool_error: run_manager.on_tool_error(e) raise e elif isinstance(self.handle_tool_error, bool):
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-6
raise e elif isinstance(self.handle_tool_error, bool): if e.args: observation = e.args[0] else: observation = "Tool execution error" elif isinstance(self.handle_tool_error, str): observation = self.handle_tool_error elif callable(self.handle_tool_error): observation = self.handle_tool_error(e) else: raise ValueError( f"Got unexpected type of `handle_tool_error`. Expected bool, str " f"or callable. Received: {self.handle_tool_error}" ) run_manager.on_tool_end( str(observation), color="red", name=self.name, **kwargs ) return observation except (Exception, KeyboardInterrupt) as e: run_manager.on_tool_error(e) raise e else: run_manager.on_tool_end( str(observation), color=color, name=self.name, **kwargs ) return observation [docs] async def arun( self, tool_input: Union[str, Dict], verbose: Optional[bool] = None, start_color: Optional[str] = "green", color: Optional[str] = "green", callbacks: Callbacks = None, **kwargs: Any, ) -> Any: """Run the tool asynchronously.""" parsed_input = self._parse_input(tool_input) if not self.verbose and verbose is not None: verbose_ = verbose else: verbose_ = self.verbose callback_manager = AsyncCallbackManager.configure( callbacks, self.callbacks, verbose=verbose_ ) new_arg_supported = signature(self._arun).parameters.get("run_manager") run_manager = await callback_manager.on_tool_start(
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-7
run_manager = await callback_manager.on_tool_start( {"name": self.name, "description": self.description}, tool_input if isinstance(tool_input, str) else str(tool_input), color=start_color, **kwargs, ) try: # We then call the tool on the tool input to get an observation tool_args, tool_kwargs = self._to_args_and_kwargs(parsed_input) observation = ( await self._arun(*tool_args, run_manager=run_manager, **tool_kwargs) if new_arg_supported else await self._arun(*tool_args, **tool_kwargs) ) except ToolException as e: if not self.handle_tool_error: await run_manager.on_tool_error(e) raise e elif isinstance(self.handle_tool_error, bool): if e.args: observation = e.args[0] else: observation = "Tool execution error" elif isinstance(self.handle_tool_error, str): observation = self.handle_tool_error elif callable(self.handle_tool_error): observation = self.handle_tool_error(e) else: raise ValueError( f"Got unexpected type of `handle_tool_error`. Expected bool, str " f"or callable. Received: {self.handle_tool_error}" ) await run_manager.on_tool_end( str(observation), color="red", name=self.name, **kwargs ) return observation except (Exception, KeyboardInterrupt) as e: await run_manager.on_tool_error(e) raise e else: await run_manager.on_tool_end( str(observation), color=color, name=self.name, **kwargs ) return observation
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-8
) return observation def __call__(self, tool_input: str, callbacks: Callbacks = None) -> str: """Make tool callable.""" return self.run(tool_input, callbacks=callbacks) [docs]class Tool(BaseTool): """Tool that takes in function or coroutine directly.""" description: str = "" func: Callable[..., str] """The function to run when the tool is called.""" coroutine: Optional[Callable[..., Awaitable[str]]] = None """The asynchronous version of the function.""" @property def args(self) -> dict: """The tool's input arguments.""" if self.args_schema is not None: return self.args_schema.schema()["properties"] # For backwards compatibility, if the function signature is ambiguous, # assume it takes a single string input. return {"tool_input": {"type": "string"}} def _to_args_and_kwargs(self, tool_input: Union[str, Dict]) -> Tuple[Tuple, Dict]: """Convert tool input to pydantic model.""" args, kwargs = super()._to_args_and_kwargs(tool_input) # For backwards compatibility. The tool must be run with a single input all_args = list(args) + list(kwargs.values()) if len(all_args) != 1: raise ToolException( f"Too many arguments to single-input tool {self.name}." f" Args: {all_args}" ) return tuple(all_args), {} def _run( self, *args: Any, run_manager: Optional[CallbackManagerForToolRun] = None, **kwargs: Any, ) -> Any: """Use the tool."""
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-9
**kwargs: Any, ) -> Any: """Use the tool.""" new_argument_supported = signature(self.func).parameters.get("callbacks") return ( self.func( *args, callbacks=run_manager.get_child() if run_manager else None, **kwargs, ) if new_argument_supported else self.func(*args, **kwargs) ) async def _arun( self, *args: Any, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, **kwargs: Any, ) -> Any: """Use the tool asynchronously.""" if self.coroutine: new_argument_supported = signature(self.coroutine).parameters.get( "callbacks" ) return ( await self.coroutine( *args, callbacks=run_manager.get_child() if run_manager else None, **kwargs, ) if new_argument_supported else await self.coroutine(*args, **kwargs) ) raise NotImplementedError("Tool does not support async") # TODO: this is for backwards compatibility, remove in future def __init__( self, name: str, func: Callable, description: str, **kwargs: Any ) -> None: """Initialize tool.""" super(Tool, self).__init__( name=name, func=func, description=description, **kwargs ) [docs] @classmethod def from_function( cls, func: Callable, name: str, # We keep these required to support backwards compatibility description: str, return_direct: bool = False, args_schema: Optional[Type[BaseModel]] = None,
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-10
args_schema: Optional[Type[BaseModel]] = None, **kwargs: Any, ) -> Tool: """Initialize tool from a function.""" return cls( name=name, func=func, description=description, return_direct=return_direct, args_schema=args_schema, **kwargs, ) [docs]class StructuredTool(BaseTool): """Tool that can operate on any number of inputs.""" description: str = "" args_schema: Type[BaseModel] = Field(..., description="The tool schema.") """The input arguments' schema.""" func: Callable[..., Any] """The function to run when the tool is called.""" coroutine: Optional[Callable[..., Awaitable[Any]]] = None """The asynchronous version of the function.""" @property def args(self) -> dict: """The tool's input arguments.""" return self.args_schema.schema()["properties"] def _run( self, *args: Any, run_manager: Optional[CallbackManagerForToolRun] = None, **kwargs: Any, ) -> Any: """Use the tool.""" new_argument_supported = signature(self.func).parameters.get("callbacks") return ( self.func( *args, callbacks=run_manager.get_child() if run_manager else None, **kwargs, ) if new_argument_supported else self.func(*args, **kwargs) ) async def _arun( self, *args: Any, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, **kwargs: Any, ) -> str:
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-11
**kwargs: Any, ) -> str: """Use the tool asynchronously.""" if self.coroutine: new_argument_supported = signature(self.coroutine).parameters.get( "callbacks" ) return ( await self.coroutine( *args, callbacks=run_manager.get_child() if run_manager else None, **kwargs, ) if new_argument_supported else await self.coroutine(*args, **kwargs) ) raise NotImplementedError("Tool does not support async") [docs] @classmethod def from_function( cls, func: Callable, name: Optional[str] = None, description: Optional[str] = None, return_direct: bool = False, args_schema: Optional[Type[BaseModel]] = None, infer_schema: bool = True, **kwargs: Any, ) -> StructuredTool: """Create tool from a given function. A classmethod that helps to create a tool from a function. Args: func: The function from which to create a tool name: The name of the tool. Defaults to the function name description: The description of the tool. Defaults to the function docstring return_direct: Whether to return the result directly or as a callback args_schema: The schema of the tool's input arguments infer_schema: Whether to infer the schema from the function's signature **kwargs: Additional arguments to pass to the tool Returns: The tool Examples: ... code-block:: python def add(a: int, b: int) -> int: \"\"\"Add two numbers\"\"\" return a + b
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-12
\"\"\"Add two numbers\"\"\" return a + b tool = StructuredTool.from_function(add) tool.run(1, 2) # 3 """ name = name or func.__name__ description = description or func.__doc__ assert ( description is not None ), "Function must have a docstring if description not provided." # Description example: # search_api(query: str) - Searches the API for the query. description = f"{name}{signature(func)} - {description.strip()}" _args_schema = args_schema if _args_schema is None and infer_schema: _args_schema = create_schema_from_function(f"{name}Schema", func) return cls( name=name, func=func, args_schema=_args_schema, description=description, return_direct=return_direct, **kwargs, ) [docs]def tool( *args: Union[str, Callable], return_direct: bool = False, args_schema: Optional[Type[BaseModel]] = None, infer_schema: bool = True, ) -> Callable: """Make tools out of functions, can be used with or without arguments. Args: *args: The arguments to the tool. return_direct: Whether to return directly from the tool rather than continuing the agent loop. args_schema: optional argument schema for user to specify infer_schema: Whether to infer the schema of the arguments from the function's signature. This also makes the resultant tool accept a dictionary input to its `run()` function. Requires: - Function must be of type (str) -> str - Function must have a docstring
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-13
- Function must have a docstring Examples: .. code-block:: python @tool def search_api(query: str) -> str: # Searches the API for the query. return @tool("search", return_direct=True) def search_api(query: str) -> str: # Searches the API for the query. return """ def _make_with_name(tool_name: str) -> Callable: def _make_tool(func: Callable) -> BaseTool: if infer_schema or args_schema is not None: return StructuredTool.from_function( func, name=tool_name, return_direct=return_direct, args_schema=args_schema, infer_schema=infer_schema, ) # If someone doesn't want a schema applied, we must treat it as # a simple string->string function assert func.__doc__ is not None, "Function must have a docstring" return Tool( name=tool_name, func=func, description=f"{tool_name} tool", return_direct=return_direct, ) return _make_tool if len(args) == 1 and isinstance(args[0], str): # if the argument is a string, then we use the string as the tool name # Example usage: @tool("search", return_direct=True) return _make_with_name(args[0]) elif len(args) == 1 and callable(args[0]): # if the argument is a function, then we use the function name as the tool name # Example usage: @tool return _make_with_name(args[0].__name__)(args[0]) elif len(args) == 0:
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
9531d858c62f-14
elif len(args) == 0: # if there are no arguments, then we use the function name as the tool name # Example usage: @tool(return_direct=True) def _partial(func: Callable[[str], str]) -> BaseTool: return _make_with_name(func.__name__)(func) return _partial else: raise ValueError("Too many arguments for tool decorator")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/base.html
b30576d02baa-0
Source code for langchain.tools.playwright.extract_hyperlinks from __future__ import annotations import json from typing import TYPE_CHECKING, Any, Optional, Type from pydantic import BaseModel, Field, root_validator from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.playwright.base import BaseBrowserTool from langchain.tools.playwright.utils import aget_current_page, get_current_page if TYPE_CHECKING: pass class ExtractHyperlinksToolInput(BaseModel): """Input for ExtractHyperlinksTool.""" absolute_urls: bool = Field( default=False, description="Return absolute URLs instead of relative URLs", ) [docs]class ExtractHyperlinksTool(BaseBrowserTool): """Extract all hyperlinks on the page.""" name: str = "extract_hyperlinks" description: str = "Extract all hyperlinks on the current webpage" args_schema: Type[BaseModel] = ExtractHyperlinksToolInput @root_validator def check_bs_import(cls, values: dict) -> dict: """Check that the arguments are valid.""" try: from bs4 import BeautifulSoup # noqa: F401 except ImportError: raise ValueError( "The 'beautifulsoup4' package is required to use this tool." " Please install it with 'pip install beautifulsoup4'." ) return values [docs] @staticmethod def scrape_page(page: Any, html_content: str, absolute_urls: bool) -> str: from urllib.parse import urljoin from bs4 import BeautifulSoup # Parse the HTML content with BeautifulSoup soup = BeautifulSoup(html_content, "lxml") # Find all the anchor elements and extract their href attributes
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/extract_hyperlinks.html
b30576d02baa-1
# Find all the anchor elements and extract their href attributes anchors = soup.find_all("a") if absolute_urls: base_url = page.url links = [urljoin(base_url, anchor.get("href", "")) for anchor in anchors] else: links = [anchor.get("href", "") for anchor in anchors] # Return the list of links as a JSON string return json.dumps(links) def _run( self, absolute_urls: bool = False, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" if self.sync_browser is None: raise ValueError(f"Synchronous browser not provided to {self.name}") page = get_current_page(self.sync_browser) html_content = page.content() return self.scrape_page(page, html_content, absolute_urls) async def _arun( self, absolute_urls: bool = False, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool asynchronously.""" if self.async_browser is None: raise ValueError(f"Asynchronous browser not provided to {self.name}") page = await aget_current_page(self.async_browser) html_content = await page.content() return self.scrape_page(page, html_content, absolute_urls)
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/extract_hyperlinks.html
8bbf2e91c875-0
Source code for langchain.tools.playwright.current_page from __future__ import annotations from typing import Optional, Type from pydantic import BaseModel from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.playwright.base import BaseBrowserTool from langchain.tools.playwright.utils import aget_current_page, get_current_page [docs]class CurrentWebPageTool(BaseBrowserTool): name: str = "current_webpage" description: str = "Returns the URL of the current page" args_schema: Type[BaseModel] = BaseModel def _run( self, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" if self.sync_browser is None: raise ValueError(f"Synchronous browser not provided to {self.name}") page = get_current_page(self.sync_browser) return str(page.url) async def _arun( self, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" if self.async_browser is None: raise ValueError(f"Asynchronous browser not provided to {self.name}") page = await aget_current_page(self.async_browser) return str(page.url)
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/current_page.html
19028a9a9a34-0
Source code for langchain.tools.playwright.extract_text from __future__ import annotations from typing import Optional, Type from pydantic import BaseModel, root_validator from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.playwright.base import BaseBrowserTool from langchain.tools.playwright.utils import aget_current_page, get_current_page [docs]class ExtractTextTool(BaseBrowserTool): name: str = "extract_text" description: str = "Extract all the text on the current webpage" args_schema: Type[BaseModel] = BaseModel @root_validator def check_acheck_bs_importrgs(cls, values: dict) -> dict: """Check that the arguments are valid.""" try: from bs4 import BeautifulSoup # noqa: F401 except ImportError: raise ValueError( "The 'beautifulsoup4' package is required to use this tool." " Please install it with 'pip install beautifulsoup4'." ) return values def _run(self, run_manager: Optional[CallbackManagerForToolRun] = None) -> str: """Use the tool.""" # Use Beautiful Soup since it's faster than looping through the elements from bs4 import BeautifulSoup if self.sync_browser is None: raise ValueError(f"Synchronous browser not provided to {self.name}") page = get_current_page(self.sync_browser) html_content = page.content() # Parse the HTML content with BeautifulSoup soup = BeautifulSoup(html_content, "lxml") return " ".join(text for text in soup.stripped_strings) async def _arun( self, run_manager: Optional[AsyncCallbackManagerForToolRun] = None
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/extract_text.html
19028a9a9a34-1
self, run_manager: Optional[AsyncCallbackManagerForToolRun] = None ) -> str: """Use the tool.""" if self.async_browser is None: raise ValueError(f"Asynchronous browser not provided to {self.name}") # Use Beautiful Soup since it's faster than looping through the elements from bs4 import BeautifulSoup page = await aget_current_page(self.async_browser) html_content = await page.content() # Parse the HTML content with BeautifulSoup soup = BeautifulSoup(html_content, "lxml") return " ".join(text for text in soup.stripped_strings)
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/extract_text.html
685dc7114cce-0
Source code for langchain.tools.playwright.get_elements from __future__ import annotations import json from typing import TYPE_CHECKING, List, Optional, Sequence, Type from pydantic import BaseModel, Field from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.playwright.base import BaseBrowserTool from langchain.tools.playwright.utils import aget_current_page, get_current_page if TYPE_CHECKING: from playwright.async_api import Page as AsyncPage from playwright.sync_api import Page as SyncPage class GetElementsToolInput(BaseModel): """Input for GetElementsTool.""" selector: str = Field( ..., description="CSS selector, such as '*', 'div', 'p', 'a', #id, .classname", ) attributes: List[str] = Field( default_factory=lambda: ["innerText"], description="Set of attributes to retrieve for each element", ) async def _aget_elements( page: AsyncPage, selector: str, attributes: Sequence[str] ) -> List[dict]: """Get elements matching the given CSS selector.""" elements = await page.query_selector_all(selector) results = [] for element in elements: result = {} for attribute in attributes: if attribute == "innerText": val: Optional[str] = await element.inner_text() else: val = await element.get_attribute(attribute) if val is not None and val.strip() != "": result[attribute] = val if result: results.append(result) return results def _get_elements( page: SyncPage, selector: str, attributes: Sequence[str] ) -> List[dict]:
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/get_elements.html
685dc7114cce-1
) -> List[dict]: """Get elements matching the given CSS selector.""" elements = page.query_selector_all(selector) results = [] for element in elements: result = {} for attribute in attributes: if attribute == "innerText": val: Optional[str] = element.inner_text() else: val = element.get_attribute(attribute) if val is not None and val.strip() != "": result[attribute] = val if result: results.append(result) return results [docs]class GetElementsTool(BaseBrowserTool): name: str = "get_elements" description: str = ( "Retrieve elements in the current web page matching the given CSS selector" ) args_schema: Type[BaseModel] = GetElementsToolInput def _run( self, selector: str, attributes: Sequence[str] = ["innerText"], run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" if self.sync_browser is None: raise ValueError(f"Synchronous browser not provided to {self.name}") page = get_current_page(self.sync_browser) # Navigate to the desired webpage before using this tool results = _get_elements(page, selector, attributes) return json.dumps(results, ensure_ascii=False) async def _arun( self, selector: str, attributes: Sequence[str] = ["innerText"], run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" if self.async_browser is None: raise ValueError(f"Asynchronous browser not provided to {self.name}")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/get_elements.html
685dc7114cce-2
raise ValueError(f"Asynchronous browser not provided to {self.name}") page = await aget_current_page(self.async_browser) # Navigate to the desired webpage before using this tool results = await _aget_elements(page, selector, attributes) return json.dumps(results, ensure_ascii=False)
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/get_elements.html
4475478e82a3-0
Source code for langchain.tools.playwright.navigate_back from __future__ import annotations from typing import Optional, Type from pydantic import BaseModel from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.playwright.base import BaseBrowserTool from langchain.tools.playwright.utils import ( aget_current_page, get_current_page, ) [docs]class NavigateBackTool(BaseBrowserTool): """Navigate back to the previous page in the browser history.""" name: str = "previous_webpage" description: str = "Navigate back to the previous page in the browser history" args_schema: Type[BaseModel] = BaseModel def _run(self, run_manager: Optional[CallbackManagerForToolRun] = None) -> str: """Use the tool.""" if self.sync_browser is None: raise ValueError(f"Synchronous browser not provided to {self.name}") page = get_current_page(self.sync_browser) response = page.go_back() if response: return ( f"Navigated back to the previous page with URL '{response.url}'." f" Status code {response.status}" ) else: return "Unable to navigate back; no previous page in the history" async def _arun( self, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" if self.async_browser is None: raise ValueError(f"Asynchronous browser not provided to {self.name}") page = await aget_current_page(self.async_browser) response = await page.go_back() if response: return (
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/navigate_back.html
4475478e82a3-1
response = await page.go_back() if response: return ( f"Navigated back to the previous page with URL '{response.url}'." f" Status code {response.status}" ) else: return "Unable to navigate back; no previous page in the history"
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/navigate_back.html
2ead3afa7a36-0
Source code for langchain.tools.playwright.click from __future__ import annotations from typing import Optional, Type from pydantic import BaseModel, Field from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.playwright.base import BaseBrowserTool from langchain.tools.playwright.utils import ( aget_current_page, get_current_page, ) class ClickToolInput(BaseModel): """Input for ClickTool.""" selector: str = Field(..., description="CSS selector for the element to click") [docs]class ClickTool(BaseBrowserTool): name: str = "click_element" description: str = "Click on an element with the given CSS selector" args_schema: Type[BaseModel] = ClickToolInput visible_only: bool = True """Whether to consider only visible elements.""" playwright_strict: bool = False """Whether to employ Playwright's strict mode when clicking on elements.""" playwright_timeout: float = 1_000 """Timeout (in ms) for Playwright to wait for element to be ready.""" def _selector_effective(self, selector: str) -> str: if not self.visible_only: return selector return f"{selector} >> visible=1" def _run( self, selector: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" if self.sync_browser is None: raise ValueError(f"Synchronous browser not provided to {self.name}") page = get_current_page(self.sync_browser) # Navigate to the desired webpage before using this tool
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/click.html
2ead3afa7a36-1
# Navigate to the desired webpage before using this tool selector_effective = self._selector_effective(selector=selector) from playwright.sync_api import TimeoutError as PlaywrightTimeoutError try: page.click( selector_effective, strict=self.playwright_strict, timeout=self.playwright_timeout, ) except PlaywrightTimeoutError: return f"Unable to click on element '{selector}'" return f"Clicked element '{selector}'" async def _arun( self, selector: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" if self.async_browser is None: raise ValueError(f"Asynchronous browser not provided to {self.name}") page = await aget_current_page(self.async_browser) # Navigate to the desired webpage before using this tool selector_effective = self._selector_effective(selector=selector) from playwright.async_api import TimeoutError as PlaywrightTimeoutError try: await page.click( selector_effective, strict=self.playwright_strict, timeout=self.playwright_timeout, ) except PlaywrightTimeoutError: return f"Unable to click on element '{selector}'" return f"Clicked element '{selector}'"
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/click.html
5689eb9aaec8-0
Source code for langchain.tools.playwright.navigate from __future__ import annotations from typing import Optional, Type from pydantic import BaseModel, Field from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.playwright.base import BaseBrowserTool from langchain.tools.playwright.utils import ( aget_current_page, get_current_page, ) class NavigateToolInput(BaseModel): """Input for NavigateToolInput.""" url: str = Field(..., description="url to navigate to") [docs]class NavigateTool(BaseBrowserTool): name: str = "navigate_browser" description: str = "Navigate a browser to the specified URL" args_schema: Type[BaseModel] = NavigateToolInput def _run( self, url: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" if self.sync_browser is None: raise ValueError(f"Synchronous browser not provided to {self.name}") page = get_current_page(self.sync_browser) response = page.goto(url) status = response.status if response else "unknown" return f"Navigating to {url} returned status code {status}" async def _arun( self, url: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" if self.async_browser is None: raise ValueError(f"Asynchronous browser not provided to {self.name}") page = await aget_current_page(self.async_browser) response = await page.goto(url)
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/navigate.html
5689eb9aaec8-1
response = await page.goto(url) status = response.status if response else "unknown" return f"Navigating to {url} returned status code {status}"
https://api.python.langchain.com/en/stable/_modules/langchain/tools/playwright/navigate.html
1fc56f53e8c1-0
Source code for langchain.tools.requests.tool # flake8: noqa """Tools for making requests to an API endpoint.""" import json from typing import Any, Dict, Optional from pydantic import BaseModel from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.requests import TextRequestsWrapper from langchain.tools.base import BaseTool def _parse_input(text: str) -> Dict[str, Any]: """Parse the json string into a dict.""" return json.loads(text) def _clean_url(url: str) -> str: """Strips quotes from the url.""" return url.strip("\"'") [docs]class BaseRequestsTool(BaseModel): """Base class for requests tools.""" requests_wrapper: TextRequestsWrapper [docs]class RequestsGetTool(BaseRequestsTool, BaseTool): """Tool for making a GET request to an API endpoint.""" name = "requests_get" description = "A portal to the internet. Use this when you need to get specific content from a website. Input should be a url (i.e. https://www.google.com). The output will be the text response of the GET request." def _run( self, url: str, run_manager: Optional[CallbackManagerForToolRun] = None ) -> str: """Run the tool.""" return self.requests_wrapper.get(_clean_url(url)) async def _arun( self, url: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Run the tool asynchronously.""" return await self.requests_wrapper.aget(_clean_url(url)) [docs]class RequestsPostTool(BaseRequestsTool, BaseTool):
https://api.python.langchain.com/en/stable/_modules/langchain/tools/requests/tool.html
1fc56f53e8c1-1
[docs]class RequestsPostTool(BaseRequestsTool, BaseTool): """Tool for making a POST request to an API endpoint.""" name = "requests_post" description = """Use this when you want to POST to a website. Input should be a json string with two keys: "url" and "data". The value of "url" should be a string, and the value of "data" should be a dictionary of key-value pairs you want to POST to the url. Be careful to always use double quotes for strings in the json string The output will be the text response of the POST request. """ def _run( self, text: str, run_manager: Optional[CallbackManagerForToolRun] = None ) -> str: """Run the tool.""" try: data = _parse_input(text) return self.requests_wrapper.post(_clean_url(data["url"]), data["data"]) except Exception as e: return repr(e) async def _arun( self, text: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Run the tool asynchronously.""" try: data = _parse_input(text) return await self.requests_wrapper.apost( _clean_url(data["url"]), data["data"] ) except Exception as e: return repr(e) [docs]class RequestsPatchTool(BaseRequestsTool, BaseTool): """Tool for making a PATCH request to an API endpoint.""" name = "requests_patch" description = """Use this when you want to PATCH to a website. Input should be a json string with two keys: "url" and "data".
https://api.python.langchain.com/en/stable/_modules/langchain/tools/requests/tool.html
1fc56f53e8c1-2
Input should be a json string with two keys: "url" and "data". The value of "url" should be a string, and the value of "data" should be a dictionary of key-value pairs you want to PATCH to the url. Be careful to always use double quotes for strings in the json string The output will be the text response of the PATCH request. """ def _run( self, text: str, run_manager: Optional[CallbackManagerForToolRun] = None ) -> str: """Run the tool.""" try: data = _parse_input(text) return self.requests_wrapper.patch(_clean_url(data["url"]), data["data"]) except Exception as e: return repr(e) async def _arun( self, text: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Run the tool asynchronously.""" try: data = _parse_input(text) return await self.requests_wrapper.apatch( _clean_url(data["url"]), data["data"] ) except Exception as e: return repr(e) [docs]class RequestsPutTool(BaseRequestsTool, BaseTool): """Tool for making a PUT request to an API endpoint.""" name = "requests_put" description = """Use this when you want to PUT to a website. Input should be a json string with two keys: "url" and "data". The value of "url" should be a string, and the value of "data" should be a dictionary of key-value pairs you want to PUT to the url.
https://api.python.langchain.com/en/stable/_modules/langchain/tools/requests/tool.html
1fc56f53e8c1-3
key-value pairs you want to PUT to the url. Be careful to always use double quotes for strings in the json string. The output will be the text response of the PUT request. """ def _run( self, text: str, run_manager: Optional[CallbackManagerForToolRun] = None ) -> str: """Run the tool.""" try: data = _parse_input(text) return self.requests_wrapper.put(_clean_url(data["url"]), data["data"]) except Exception as e: return repr(e) async def _arun( self, text: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Run the tool asynchronously.""" try: data = _parse_input(text) return await self.requests_wrapper.aput( _clean_url(data["url"]), data["data"] ) except Exception as e: return repr(e) [docs]class RequestsDeleteTool(BaseRequestsTool, BaseTool): """Tool for making a DELETE request to an API endpoint.""" name = "requests_delete" description = "A portal to the internet. Use this when you need to make a DELETE request to a URL. Input should be a specific url, and the output will be the text response of the DELETE request." def _run( self, url: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Run the tool.""" return self.requests_wrapper.delete(_clean_url(url)) async def _arun( self, url: str,
https://api.python.langchain.com/en/stable/_modules/langchain/tools/requests/tool.html
1fc56f53e8c1-4
async def _arun( self, url: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Run the tool asynchronously.""" return await self.requests_wrapper.adelete(_clean_url(url))
https://api.python.langchain.com/en/stable/_modules/langchain/tools/requests/tool.html
17b995ace12c-0
Source code for langchain.tools.steamship_image_generation.tool """This tool allows agents to generate images using Steamship. Steamship offers access to different third party image generation APIs using a single API key. Today the following models are supported: - Dall-E - Stable Diffusion To use this tool, you must first set as environment variables: STEAMSHIP_API_KEY ``` """ from __future__ import annotations from enum import Enum from typing import TYPE_CHECKING, Dict, Optional from pydantic import root_validator from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools import BaseTool from langchain.tools.steamship_image_generation.utils import make_image_public from langchain.utils import get_from_dict_or_env if TYPE_CHECKING: pass class ModelName(str, Enum): """Supported Image Models for generation.""" DALL_E = "dall-e" STABLE_DIFFUSION = "stable-diffusion" SUPPORTED_IMAGE_SIZES = { ModelName.DALL_E: ("256x256", "512x512", "1024x1024"), ModelName.STABLE_DIFFUSION: ("512x512", "768x768"), } [docs]class SteamshipImageGenerationTool(BaseTool): try: from steamship import Steamship except ImportError: pass """Tool used to generate images from a text-prompt.""" model_name: ModelName size: Optional[str] = "512x512" steamship: Steamship return_urls: Optional[bool] = False name = "GenerateImage" description = ( "Useful for when you need to generate an image."
https://api.python.langchain.com/en/stable/_modules/langchain/tools/steamship_image_generation/tool.html
17b995ace12c-1
description = ( "Useful for when you need to generate an image." "Input: A detailed text-2-image prompt describing an image" "Output: the UUID of a generated image" ) @root_validator(pre=True) def validate_size(cls, values: Dict) -> Dict: if "size" in values: size = values["size"] model_name = values["model_name"] if size not in SUPPORTED_IMAGE_SIZES[model_name]: raise RuntimeError(f"size {size} is not supported by {model_name}") return values @root_validator(pre=True) def validate_environment(cls, values: Dict) -> Dict: """Validate that api key and python package exists in environment.""" steamship_api_key = get_from_dict_or_env( values, "steamship_api_key", "STEAMSHIP_API_KEY" ) try: from steamship import Steamship except ImportError: raise ImportError( "steamship is not installed. " "Please install it with `pip install steamship`" ) steamship = Steamship( api_key=steamship_api_key, ) values["steamship"] = steamship if "steamship_api_key" in values: del values["steamship_api_key"] return values def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" image_generator = self.steamship.use_plugin( plugin_handle=self.model_name.value, config={"n": 1, "size": self.size} )
https://api.python.langchain.com/en/stable/_modules/langchain/tools/steamship_image_generation/tool.html
17b995ace12c-2
) task = image_generator.generate(text=query, append_output_to_file=True) task.wait() blocks = task.output.blocks if len(blocks) > 0: if self.return_urls: return make_image_public(self.steamship, blocks[0]) else: return blocks[0].id raise RuntimeError(f"[{self.name}] Tool unable to generate image!") async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool asynchronously.""" raise NotImplementedError("GenerateImageTool does not support async")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/steamship_image_generation/tool.html
cc89c4b6a0c1-0
Source code for langchain.tools.ddg_search.tool """Tool for the DuckDuckGo search API.""" import warnings from typing import Any, Optional from pydantic import Field from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool from langchain.utilities.duckduckgo_search import DuckDuckGoSearchAPIWrapper [docs]class DuckDuckGoSearchRun(BaseTool): """Tool that adds the capability to query the DuckDuckGo search API.""" name = "duckduckgo_search" description = ( "A wrapper around DuckDuckGo Search. " "Useful for when you need to answer questions about current events. " "Input should be a search query." ) api_wrapper: DuckDuckGoSearchAPIWrapper = Field( default_factory=DuckDuckGoSearchAPIWrapper ) def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" return self.api_wrapper.run(query) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool asynchronously.""" raise NotImplementedError("DuckDuckGoSearch does not support async") [docs]class DuckDuckGoSearchResults(BaseTool): """Tool that queries the Duck Duck Go Search API and get back json.""" name = "DuckDuckGo Results JSON" description = ( "A wrapper around Duck Duck Go Search. "
https://api.python.langchain.com/en/stable/_modules/langchain/tools/ddg_search/tool.html
cc89c4b6a0c1-1
description = ( "A wrapper around Duck Duck Go Search. " "Useful for when you need to answer questions about current events. " "Input should be a search query. Output is a JSON array of the query results" ) num_results: int = 4 api_wrapper: DuckDuckGoSearchAPIWrapper = Field( default_factory=DuckDuckGoSearchAPIWrapper ) def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" return str(self.api_wrapper.results(query, self.num_results)) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool asynchronously.""" raise NotImplementedError("DuckDuckGoSearchResults does not support async") def DuckDuckGoSearchTool(*args: Any, **kwargs: Any) -> DuckDuckGoSearchRun: """ Deprecated. Use DuckDuckGoSearchRun instead. Args: *args: **kwargs: Returns: DuckDuckGoSearchRun """ warnings.warn( "DuckDuckGoSearchTool will be deprecated in the future. " "Please use DuckDuckGoSearchRun instead.", DeprecationWarning, ) return DuckDuckGoSearchRun(*args, **kwargs)
https://api.python.langchain.com/en/stable/_modules/langchain/tools/ddg_search/tool.html
840595601380-0
Source code for langchain.tools.openweathermap.tool """Tool for the OpenWeatherMap API.""" from typing import Optional from pydantic import Field from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool from langchain.utilities import OpenWeatherMapAPIWrapper [docs]class OpenWeatherMapQueryRun(BaseTool): """Tool that adds the capability to query using the OpenWeatherMap API.""" api_wrapper: OpenWeatherMapAPIWrapper = Field( default_factory=OpenWeatherMapAPIWrapper ) name = "OpenWeatherMap" description = ( "A wrapper around OpenWeatherMap API. " "Useful for fetching current weather information for a specified location. " "Input should be a location string (e.g. London,GB)." ) def _run( self, location: str, run_manager: Optional[CallbackManagerForToolRun] = None ) -> str: """Use the OpenWeatherMap tool.""" return self.api_wrapper.run(location) async def _arun( self, location: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the OpenWeatherMap tool asynchronously.""" raise NotImplementedError("OpenWeatherMapQueryRun does not support async")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openweathermap/tool.html
1d9e40bc5407-0
Source code for langchain.tools.graphql.tool import json from typing import Optional from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool from langchain.utilities.graphql import GraphQLAPIWrapper [docs]class BaseGraphQLTool(BaseTool): """Base tool for querying a GraphQL API.""" graphql_wrapper: GraphQLAPIWrapper name = "query_graphql" description = """\ Input to this tool is a detailed and correct GraphQL query, output is a result from the API. If the query is not correct, an error message will be returned. If an error is returned with 'Bad request' in it, rewrite the query and try again. If an error is returned with 'Unauthorized' in it, do not try again, but tell the user to change their authentication. Example Input: query {{ allUsers {{ id, name, email }} }}\ """ # noqa: E501 class Config: """Configuration for this pydantic object.""" arbitrary_types_allowed = True def _run( self, tool_input: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: result = self.graphql_wrapper.run(tool_input) return json.dumps(result, indent=2) async def _arun( self, tool_input: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the Graphql tool asynchronously.""" raise NotImplementedError("GraphQLAPIWrapper does not support async")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/graphql/tool.html
f87a918fa056-0
Source code for langchain.tools.arxiv.tool """Tool for the Arxiv API.""" from typing import Optional from pydantic import Field from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool from langchain.utilities.arxiv import ArxivAPIWrapper [docs]class ArxivQueryRun(BaseTool): """Tool that adds the capability to search using the Arxiv API.""" name = "arxiv" description = ( "A wrapper around Arxiv.org " "Useful for when you need to answer questions about Physics, Mathematics, " "Computer Science, Quantitative Biology, Quantitative Finance, Statistics, " "Electrical Engineering, and Economics " "from scientific articles on arxiv.org. " "Input should be a search query." ) api_wrapper: ArxivAPIWrapper = Field(default_factory=ArxivAPIWrapper) def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the Arxiv tool.""" return self.api_wrapper.run(query) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the Arxiv tool asynchronously.""" raise NotImplementedError("ArxivAPIWrapper does not support async")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/arxiv/tool.html
ce3a8c00ae22-0
Source code for langchain.tools.wikipedia.tool """Tool for the Wikipedia API.""" from typing import Optional from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool from langchain.utilities.wikipedia import WikipediaAPIWrapper [docs]class WikipediaQueryRun(BaseTool): """Tool that adds the capability to search using the Wikipedia API.""" name = "Wikipedia" description = ( "A wrapper around Wikipedia. " "Useful for when you need to answer general questions about " "people, places, companies, facts, historical events, or other subjects. " "Input should be a search query." ) api_wrapper: WikipediaAPIWrapper def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the Wikipedia tool.""" return self.api_wrapper.run(query) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the Wikipedia tool asynchronously.""" raise NotImplementedError("WikipediaQueryRun does not support async")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/wikipedia/tool.html
ce256fa05b4e-0
Source code for langchain.tools.openapi.utils.api_models """Pydantic models for parsing an OpenAPI spec.""" import logging from enum import Enum from typing import Any, Dict, List, Optional, Sequence, Tuple, Type, Union from openapi_schema_pydantic import MediaType, Parameter, Reference, RequestBody, Schema from pydantic import BaseModel, Field from langchain.tools.openapi.utils.openapi_utils import HTTPVerb, OpenAPISpec logger = logging.getLogger(__name__) PRIMITIVE_TYPES = { "integer": int, "number": float, "string": str, "boolean": bool, "array": List, "object": Dict, "null": None, } # See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.1.0.md#parameterIn # for more info. class APIPropertyLocation(Enum): """The location of the property.""" QUERY = "query" PATH = "path" HEADER = "header" COOKIE = "cookie" # Not yet supported @classmethod def from_str(cls, location: str) -> "APIPropertyLocation": """Parse an APIPropertyLocation.""" try: return cls(location) except ValueError: raise ValueError( f"Invalid APIPropertyLocation. Valid values are {cls.__members__}" ) _SUPPORTED_MEDIA_TYPES = ("application/json",) SUPPORTED_LOCATIONS = { APIPropertyLocation.QUERY, APIPropertyLocation.PATH, } INVALID_LOCATION_TEMPL = ( 'Unsupported APIPropertyLocation "{location}"' " for parameter {name}. " + f"Valid values are {[loc.value for loc in SUPPORTED_LOCATIONS]}" )
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openapi/utils/api_models.html
ce256fa05b4e-1
) SCHEMA_TYPE = Union[str, Type, tuple, None, Enum] class APIPropertyBase(BaseModel): """Base model for an API property.""" # The name of the parameter is required and is case-sensitive. # If "in" is "path", the "name" field must correspond to a template expression # within the path field in the Paths Object. # If "in" is "header" and the "name" field is "Accept", "Content-Type", # or "Authorization", the parameter definition is ignored. # For all other cases, the "name" corresponds to the parameter # name used by the "in" property. name: str = Field(alias="name") """The name of the property.""" required: bool = Field(alias="required") """Whether the property is required.""" type: SCHEMA_TYPE = Field(alias="type") """The type of the property. Either a primitive type, a component/parameter type, or an array or 'object' (dict) of the above.""" default: Optional[Any] = Field(alias="default", default=None) """The default value of the property.""" description: Optional[str] = Field(alias="description", default=None) """The description of the property.""" class APIProperty(APIPropertyBase): """A model for a property in the query, path, header, or cookie params.""" location: APIPropertyLocation = Field(alias="location") """The path/how it's being passed to the endpoint.""" @staticmethod def _cast_schema_list_type(schema: Schema) -> Optional[Union[str, Tuple[str, ...]]]: type_ = schema.type if not isinstance(type_, list): return type_
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openapi/utils/api_models.html
ce256fa05b4e-2
if not isinstance(type_, list): return type_ else: return tuple(type_) @staticmethod def _get_schema_type_for_enum(parameter: Parameter, schema: Schema) -> Enum: """Get the schema type when the parameter is an enum.""" param_name = f"{parameter.name}Enum" return Enum(param_name, {str(v): v for v in schema.enum}) @staticmethod def _get_schema_type_for_array( schema: Schema, ) -> Optional[Union[str, Tuple[str, ...]]]: items = schema.items if isinstance(items, Schema): schema_type = APIProperty._cast_schema_list_type(items) elif isinstance(items, Reference): ref_name = items.ref.split("/")[-1] schema_type = ref_name # TODO: Add ref definitions to make his valid else: raise ValueError(f"Unsupported array items: {items}") if isinstance(schema_type, str): # TODO: recurse schema_type = (schema_type,) return schema_type @staticmethod def _get_schema_type(parameter: Parameter, schema: Optional[Schema]) -> SCHEMA_TYPE: if schema is None: return None schema_type: SCHEMA_TYPE = APIProperty._cast_schema_list_type(schema) if schema_type == "array": schema_type = APIProperty._get_schema_type_for_array(schema) elif schema_type == "object": # TODO: Resolve array and object types to components. raise NotImplementedError("Objects not yet supported") elif schema_type in PRIMITIVE_TYPES: if schema.enum: schema_type = APIProperty._get_schema_type_for_enum(parameter, schema) else: # Directly use the primitive type pass
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openapi/utils/api_models.html
ce256fa05b4e-3
else: # Directly use the primitive type pass else: raise NotImplementedError(f"Unsupported type: {schema_type}") return schema_type @staticmethod def _validate_location(location: APIPropertyLocation, name: str) -> None: if location not in SUPPORTED_LOCATIONS: raise NotImplementedError( INVALID_LOCATION_TEMPL.format(location=location, name=name) ) @staticmethod def _validate_content(content: Optional[Dict[str, MediaType]]) -> None: if content: raise ValueError( "API Properties with media content not supported. " "Media content only supported within APIRequestBodyProperty's" ) @staticmethod def _get_schema(parameter: Parameter, spec: OpenAPISpec) -> Optional[Schema]: schema = parameter.param_schema if isinstance(schema, Reference): schema = spec.get_referenced_schema(schema) elif schema is None: return None elif not isinstance(schema, Schema): raise ValueError(f"Error dereferencing schema: {schema}") return schema @staticmethod def is_supported_location(location: str) -> bool: """Return whether the provided location is supported.""" try: return APIPropertyLocation.from_str(location) in SUPPORTED_LOCATIONS except ValueError: return False @classmethod def from_parameter(cls, parameter: Parameter, spec: OpenAPISpec) -> "APIProperty": """Instantiate from an OpenAPI Parameter.""" location = APIPropertyLocation.from_str(parameter.param_in) cls._validate_location( location, parameter.name, ) cls._validate_content(parameter.content) schema = cls._get_schema(parameter, spec)
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openapi/utils/api_models.html
ce256fa05b4e-4
schema = cls._get_schema(parameter, spec) schema_type = cls._get_schema_type(parameter, schema) default_val = schema.default if schema is not None else None return cls( name=parameter.name, location=location, default=default_val, description=parameter.description, required=parameter.required, type=schema_type, ) class APIRequestBodyProperty(APIPropertyBase): """A model for a request body property.""" properties: List["APIRequestBodyProperty"] = Field(alias="properties") """The sub-properties of the property.""" # This is useful for handling nested property cycles. # We can define separate types in that case. references_used: List[str] = Field(alias="references_used") """The references used by the property.""" @classmethod def _process_object_schema( cls, schema: Schema, spec: OpenAPISpec, references_used: List[str] ) -> Tuple[Union[str, List[str], None], List["APIRequestBodyProperty"]]: properties = [] required_props = schema.required or [] if schema.properties is None: raise ValueError( f"No properties found when processing object schema: {schema}" ) for prop_name, prop_schema in schema.properties.items(): if isinstance(prop_schema, Reference): ref_name = prop_schema.ref.split("/")[-1] if ref_name not in references_used: references_used.append(ref_name) prop_schema = spec.get_referenced_schema(prop_schema) else: continue properties.append( cls.from_schema( schema=prop_schema, name=prop_name, required=prop_name in required_props, spec=spec,
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openapi/utils/api_models.html
ce256fa05b4e-5
required=prop_name in required_props, spec=spec, references_used=references_used, ) ) return schema.type, properties @classmethod def _process_array_schema( cls, schema: Schema, name: str, spec: OpenAPISpec, references_used: List[str] ) -> str: items = schema.items if items is not None: if isinstance(items, Reference): ref_name = items.ref.split("/")[-1] if ref_name not in references_used: references_used.append(ref_name) items = spec.get_referenced_schema(items) else: pass return f"Array<{ref_name}>" else: pass if isinstance(items, Schema): array_type = cls.from_schema( schema=items, name=f"{name}Item", required=True, # TODO: Add required spec=spec, references_used=references_used, ) return f"Array<{array_type.type}>" return "array" @classmethod def from_schema( cls, schema: Schema, name: str, required: bool, spec: OpenAPISpec, references_used: Optional[List[str]] = None, ) -> "APIRequestBodyProperty": """Recursively populate from an OpenAPI Schema.""" if references_used is None: references_used = [] schema_type = schema.type properties: List[APIRequestBodyProperty] = [] if schema_type == "object" and schema.properties: schema_type, properties = cls._process_object_schema( schema, spec, references_used ) elif schema_type == "array":
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openapi/utils/api_models.html
ce256fa05b4e-6
schema, spec, references_used ) elif schema_type == "array": schema_type = cls._process_array_schema(schema, name, spec, references_used) elif schema_type in PRIMITIVE_TYPES: # Use the primitive type directly pass elif schema_type is None: # No typing specified/parsed. WIll map to 'any' pass else: raise ValueError(f"Unsupported type: {schema_type}") return cls( name=name, required=required, type=schema_type, default=schema.default, description=schema.description, properties=properties, references_used=references_used, ) class APIRequestBody(BaseModel): """A model for a request body.""" description: Optional[str] = Field(alias="description") """The description of the request body.""" properties: List[APIRequestBodyProperty] = Field(alias="properties") # E.g., application/json - we only support JSON at the moment. media_type: str = Field(alias="media_type") """The media type of the request body.""" @classmethod def _process_supported_media_type( cls, media_type_obj: MediaType, spec: OpenAPISpec, ) -> List[APIRequestBodyProperty]: """Process the media type of the request body.""" references_used = [] schema = media_type_obj.media_type_schema if isinstance(schema, Reference): references_used.append(schema.ref.split("/")[-1]) schema = spec.get_referenced_schema(schema) if schema is None: raise ValueError( f"Could not resolve schema for media type: {media_type_obj}" ) api_request_body_properties = []
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openapi/utils/api_models.html
ce256fa05b4e-7
) api_request_body_properties = [] required_properties = schema.required or [] if schema.type == "object" and schema.properties: for prop_name, prop_schema in schema.properties.items(): if isinstance(prop_schema, Reference): prop_schema = spec.get_referenced_schema(prop_schema) api_request_body_properties.append( APIRequestBodyProperty.from_schema( schema=prop_schema, name=prop_name, required=prop_name in required_properties, spec=spec, ) ) else: api_request_body_properties.append( APIRequestBodyProperty( name="body", required=True, type=schema.type, default=schema.default, description=schema.description, properties=[], references_used=references_used, ) ) return api_request_body_properties @classmethod def from_request_body( cls, request_body: RequestBody, spec: OpenAPISpec ) -> "APIRequestBody": """Instantiate from an OpenAPI RequestBody.""" properties = [] for media_type, media_type_obj in request_body.content.items(): if media_type not in _SUPPORTED_MEDIA_TYPES: continue api_request_body_properties = cls._process_supported_media_type( media_type_obj, spec, ) properties.extend(api_request_body_properties) return cls( description=request_body.description, properties=properties, media_type=media_type, ) [docs]class APIOperation(BaseModel): """A model for a single API operation.""" operation_id: str = Field(alias="operation_id") """The unique identifier of the operation.""" description: Optional[str] = Field(alias="description")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openapi/utils/api_models.html
ce256fa05b4e-8
description: Optional[str] = Field(alias="description") """The description of the operation.""" base_url: str = Field(alias="base_url") """The base URL of the operation.""" path: str = Field(alias="path") """The path of the operation.""" method: HTTPVerb = Field(alias="method") """The HTTP method of the operation.""" properties: Sequence[APIProperty] = Field(alias="properties") # TODO: Add parse in used components to be able to specify what type of # referenced object it is. # """The properties of the operation.""" # components: Dict[str, BaseModel] = Field(alias="components") request_body: Optional[APIRequestBody] = Field(alias="request_body") """The request body of the operation.""" @staticmethod def _get_properties_from_parameters( parameters: List[Parameter], spec: OpenAPISpec ) -> List[APIProperty]: """Get the properties of the operation.""" properties = [] for param in parameters: if APIProperty.is_supported_location(param.param_in): properties.append(APIProperty.from_parameter(param, spec)) elif param.required: raise ValueError( INVALID_LOCATION_TEMPL.format( location=param.param_in, name=param.name ) ) else: logger.warning( INVALID_LOCATION_TEMPL.format( location=param.param_in, name=param.name ) + " Ignoring optional parameter" ) pass return properties [docs] @classmethod def from_openapi_url( cls, spec_url: str, path: str, method: str, ) -> "APIOperation":
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openapi/utils/api_models.html
ce256fa05b4e-9
path: str, method: str, ) -> "APIOperation": """Create an APIOperation from an OpenAPI URL.""" spec = OpenAPISpec.from_url(spec_url) return cls.from_openapi_spec(spec, path, method) [docs] @classmethod def from_openapi_spec( cls, spec: OpenAPISpec, path: str, method: str, ) -> "APIOperation": """Create an APIOperation from an OpenAPI spec.""" operation = spec.get_operation(path, method) parameters = spec.get_parameters_for_operation(operation) properties = cls._get_properties_from_parameters(parameters, spec) operation_id = OpenAPISpec.get_cleaned_operation_id(operation, path, method) request_body = spec.get_request_body_for_operation(operation) api_request_body = ( APIRequestBody.from_request_body(request_body, spec) if request_body is not None else None ) description = operation.description or operation.summary if not description and spec.paths is not None: description = spec.paths[path].description or spec.paths[path].summary return cls( operation_id=operation_id, description=description, base_url=spec.base_url, path=path, method=method, properties=properties, request_body=api_request_body, ) [docs] @staticmethod def ts_type_from_python(type_: SCHEMA_TYPE) -> str: if type_ is None: # TODO: Handle Nones better. These often result when # parsing specs that are < v3 return "any" elif isinstance(type_, str): return { "str": "string",
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openapi/utils/api_models.html
ce256fa05b4e-10
elif isinstance(type_, str): return { "str": "string", "integer": "number", "float": "number", "date-time": "string", }.get(type_, type_) elif isinstance(type_, tuple): return f"Array<{APIOperation.ts_type_from_python(type_[0])}>" elif isinstance(type_, type) and issubclass(type_, Enum): return " | ".join([f"'{e.value}'" for e in type_]) else: return str(type_) def _format_nested_properties( self, properties: List[APIRequestBodyProperty], indent: int = 2 ) -> str: """Format nested properties.""" formatted_props = [] for prop in properties: prop_name = prop.name prop_type = self.ts_type_from_python(prop.type) prop_required = "" if prop.required else "?" prop_desc = f"/* {prop.description} */" if prop.description else "" if prop.properties: nested_props = self._format_nested_properties( prop.properties, indent + 2 ) prop_type = f"{{\n{nested_props}\n{' ' * indent}}}" formatted_props.append( f"{prop_desc}\n{' ' * indent}{prop_name}{prop_required}: {prop_type}," ) return "\n".join(formatted_props) [docs] def to_typescript(self) -> str: """Get typescript string representation of the operation.""" operation_name = self.operation_id params = [] if self.request_body: formatted_request_body_props = self._format_nested_properties( self.request_body.properties ) params.append(formatted_request_body_props)
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openapi/utils/api_models.html
ce256fa05b4e-11
self.request_body.properties ) params.append(formatted_request_body_props) for prop in self.properties: prop_name = prop.name prop_type = self.ts_type_from_python(prop.type) prop_required = "" if prop.required else "?" prop_desc = f"/* {prop.description} */" if prop.description else "" params.append(f"{prop_desc}\n\t\t{prop_name}{prop_required}: {prop_type},") formatted_params = "\n".join(params).strip() description_str = f"/* {self.description} */" if self.description else "" typescript_definition = f""" {description_str} type {operation_name} = (_: {{ {formatted_params} }}) => any; """ return typescript_definition.strip() @property def query_params(self) -> List[str]: return [ property.name for property in self.properties if property.location == APIPropertyLocation.QUERY ] @property def path_params(self) -> List[str]: return [ property.name for property in self.properties if property.location == APIPropertyLocation.PATH ] @property def body_params(self) -> List[str]: if self.request_body is None: return [] return [prop.name for prop in self.request_body.properties]
https://api.python.langchain.com/en/stable/_modules/langchain/tools/openapi/utils/api_models.html
6a5d00eb4830-0
Source code for langchain.tools.pubmed.tool """Tool for the Pubmed API.""" from typing import Optional from pydantic import Field from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool from langchain.utilities.pupmed import PubMedAPIWrapper [docs]class PubmedQueryRun(BaseTool): """Tool that adds the capability to search using the PubMed API.""" name = "PubMed" description = ( "A wrapper around PubMed.org " "Useful for when you need to answer questions about Physics, Mathematics, " "Computer Science, Quantitative Biology, Quantitative Finance, Statistics, " "Electrical Engineering, and Economics " "from scientific articles on PubMed.org. " "Input should be a search query." ) api_wrapper: PubMedAPIWrapper = Field(default_factory=PubMedAPIWrapper) def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the Arxiv tool.""" return self.api_wrapper.run(query) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the PubMed tool asynchronously.""" raise NotImplementedError("PubMedAPIWrapper does not support async")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/pubmed/tool.html
bd03486c720e-0
Source code for langchain.tools.metaphor_search.tool """Tool for the Metaphor search API.""" from typing import Dict, List, Optional, Union from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool from langchain.utilities.metaphor_search import MetaphorSearchAPIWrapper [docs]class MetaphorSearchResults(BaseTool): """Tool that has capability to query the Metaphor Search API and get back json.""" name = "metaphor_search_results_json" description = ( "A wrapper around Metaphor Search. " "Input should be a Metaphor-optimized query. " "Output is a JSON array of the query results" ) api_wrapper: MetaphorSearchAPIWrapper def _run( self, query: str, num_results: int, include_domains: Optional[List[str]] = None, exclude_domains: Optional[List[str]] = None, start_crawl_date: Optional[str] = None, end_crawl_date: Optional[str] = None, start_published_date: Optional[str] = None, end_published_date: Optional[str] = None, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> Union[List[Dict], str]: """Use the tool.""" try: return self.api_wrapper.results( query, num_results, include_domains, exclude_domains, start_crawl_date, end_crawl_date, start_published_date, end_published_date, ) except Exception as e: return repr(e) async def _arun(
https://api.python.langchain.com/en/stable/_modules/langchain/tools/metaphor_search/tool.html
bd03486c720e-1
return repr(e) async def _arun( self, query: str, num_results: int, include_domains: Optional[List[str]] = None, exclude_domains: Optional[List[str]] = None, start_crawl_date: Optional[str] = None, end_crawl_date: Optional[str] = None, start_published_date: Optional[str] = None, end_published_date: Optional[str] = None, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> Union[List[Dict], str]: """Use the tool asynchronously.""" try: return await self.api_wrapper.results_async( query, num_results, include_domains, exclude_domains, start_crawl_date, end_crawl_date, start_published_date, end_published_date, ) except Exception as e: return repr(e)
https://api.python.langchain.com/en/stable/_modules/langchain/tools/metaphor_search/tool.html
d3119af48072-0
Source code for langchain.tools.interaction.tool """Tools for interacting with the user.""" import warnings from typing import Any from langchain.tools.human.tool import HumanInputRun [docs]def StdInInquireTool(*args: Any, **kwargs: Any) -> HumanInputRun: """Tool for asking the user for input.""" warnings.warn( "StdInInquireTool will be deprecated in the future. " "Please use HumanInputRun instead.", DeprecationWarning, ) return HumanInputRun(*args, **kwargs)
https://api.python.langchain.com/en/stable/_modules/langchain/tools/interaction/tool.html
847e91f24f34-0
Source code for langchain.tools.python.tool """A tool for running python code in a REPL.""" import ast import re import sys from contextlib import redirect_stdout from io import StringIO from typing import Any, Dict, Optional from pydantic import Field, root_validator from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool from langchain.utilities import PythonREPL def _get_default_python_repl() -> PythonREPL: return PythonREPL(_globals=globals(), _locals=None) def sanitize_input(query: str) -> str: """Sanitize input to the python REPL. Remove whitespace, backtick & python (if llm mistakes python console as terminal) Args: query: The query to sanitize Returns: str: The sanitized query """ # Removes `, whitespace & python from start query = re.sub(r"^(\s|`)*(?i:python)?\s*", "", query) # Removes whitespace & ` from end query = re.sub(r"(\s|`)*$", "", query) return query [docs]class PythonREPLTool(BaseTool): """A tool for running python code in a REPL.""" name = "Python_REPL" description = ( "A Python shell. Use this to execute python commands. " "Input should be a valid python command. " "If you want to see the output of a value, you should print it out " "with `print(...)`." ) python_repl: PythonREPL = Field(default_factory=_get_default_python_repl) sanitize_input: bool = True def _run( self,
https://api.python.langchain.com/en/stable/_modules/langchain/tools/python/tool.html
847e91f24f34-1
sanitize_input: bool = True def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> Any: """Use the tool.""" if self.sanitize_input: query = sanitize_input(query) return self.python_repl.run(query) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> Any: """Use the tool asynchronously.""" raise NotImplementedError("PythonReplTool does not support async") [docs]class PythonAstREPLTool(BaseTool): """A tool for running python code in a REPL.""" name = "python_repl_ast" description = ( "A Python shell. Use this to execute python commands. " "Input should be a valid python command. " "When using this tool, sometimes output is abbreviated - " "make sure it does not look abbreviated before using it in your answer." ) globals: Optional[Dict] = Field(default_factory=dict) locals: Optional[Dict] = Field(default_factory=dict) sanitize_input: bool = True @root_validator(pre=True) def validate_python_version(cls, values: Dict) -> Dict: """Validate valid python version.""" if sys.version_info < (3, 9): raise ValueError( "This tool relies on Python 3.9 or higher " "(as it uses new functionality in the `ast` module, " f"you have Python version: {sys.version}" ) return values def _run( self, query: str,
https://api.python.langchain.com/en/stable/_modules/langchain/tools/python/tool.html
847e91f24f34-2
return values def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" try: if self.sanitize_input: query = sanitize_input(query) tree = ast.parse(query) module = ast.Module(tree.body[:-1], type_ignores=[]) exec(ast.unparse(module), self.globals, self.locals) # type: ignore module_end = ast.Module(tree.body[-1:], type_ignores=[]) module_end_str = ast.unparse(module_end) # type: ignore io_buffer = StringIO() try: with redirect_stdout(io_buffer): ret = eval(module_end_str, self.globals, self.locals) if ret is None: return io_buffer.getvalue() else: return ret except Exception: with redirect_stdout(io_buffer): exec(module_end_str, self.globals, self.locals) return io_buffer.getvalue() except Exception as e: return "{}: {}".format(type(e).__name__, str(e)) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool asynchronously.""" raise NotImplementedError("PythonReplTool does not support async")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/python/tool.html
33fbb3a947aa-0
Source code for langchain.tools.searx_search.tool """Tool for the SearxNG search API.""" from typing import Optional from pydantic import Extra from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool, Field from langchain.utilities.searx_search import SearxSearchWrapper [docs]class SearxSearchRun(BaseTool): """Tool that adds the capability to query a Searx instance.""" name = "searx_search" description = ( "A meta search engine." "Useful for when you need to answer questions about current events." "Input should be a search query." ) wrapper: SearxSearchWrapper kwargs: dict = Field(default_factory=dict) def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" return self.wrapper.run(query, **self.kwargs) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool asynchronously.""" return await self.wrapper.arun(query, **self.kwargs) [docs]class SearxSearchResults(BaseTool): """Tool that has the capability to query a Searx instance and get back json.""" name = "Searx Search Results" description = ( "A meta search engine." "Useful for when you need to answer questions about current events."
https://api.python.langchain.com/en/stable/_modules/langchain/tools/searx_search/tool.html
33fbb3a947aa-1
"Useful for when you need to answer questions about current events." "Input should be a search query. Output is a JSON array of the query results" ) wrapper: SearxSearchWrapper num_results: int = 4 kwargs: dict = Field(default_factory=dict) class Config: """Pydantic config.""" extra = Extra.allow def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" return str(self.wrapper.results(query, self.num_results, **self.kwargs)) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool asynchronously.""" return ( await self.wrapper.aresults(query, self.num_results, **self.kwargs) ).__str__()
https://api.python.langchain.com/en/stable/_modules/langchain/tools/searx_search/tool.html
db7e943ff331-0
Source code for langchain.tools.scenexplain.tool """Tool for the SceneXplain API.""" from typing import Optional from pydantic import BaseModel, Field from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool from langchain.utilities.scenexplain import SceneXplainAPIWrapper class SceneXplainInput(BaseModel): """Input for SceneXplain.""" query: str = Field(..., description="The link to the image to explain") [docs]class SceneXplainTool(BaseTool): """Tool that adds the capability to explain images.""" name = "image_explainer" description = ( "An Image Captioning Tool: Use this tool to generate a detailed caption " "for an image. The input can be an image file of any format, and " "the output will be a text description that covers every detail of the image." ) api_wrapper: SceneXplainAPIWrapper = Field(default_factory=SceneXplainAPIWrapper) def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None ) -> str: """Use the tool.""" return self.api_wrapper.run(query) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None ) -> str: """Use the tool asynchronously.""" raise NotImplementedError("SceneXplainTool does not support async")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/scenexplain/tool.html
49328d06313d-0
Source code for langchain.tools.bing_search.tool """Tool for the Bing search API.""" from typing import Optional from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.tools.base import BaseTool from langchain.utilities.bing_search import BingSearchAPIWrapper [docs]class BingSearchRun(BaseTool): """Tool that adds the capability to query the Bing search API.""" name = "bing_search" description = ( "A wrapper around Bing Search. " "Useful for when you need to answer questions about current events. " "Input should be a search query." ) api_wrapper: BingSearchAPIWrapper def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" return self.api_wrapper.run(query) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool asynchronously.""" raise NotImplementedError("BingSearchRun does not support async") [docs]class BingSearchResults(BaseTool): """Tool that has capability to query the Bing Search API and get back json.""" name = "Bing Search Results JSON" description = ( "A wrapper around Bing Search. " "Useful for when you need to answer questions about current events. " "Input should be a search query. Output is a JSON array of the query results" ) num_results: int = 4 api_wrapper: BingSearchAPIWrapper def _run( self,
https://api.python.langchain.com/en/stable/_modules/langchain/tools/bing_search/tool.html
49328d06313d-1
api_wrapper: BingSearchAPIWrapper def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the tool.""" return str(self.api_wrapper.results(query, self.num_results)) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: """Use the tool asynchronously.""" raise NotImplementedError("BingSearchResults does not support async")
https://api.python.langchain.com/en/stable/_modules/langchain/tools/bing_search/tool.html
d92c01dedab2-0
Source code for langchain.tools.sql_database.tool # flake8: noqa """Tools for interacting with a SQL database.""" from typing import Any, Dict, Optional from pydantic import BaseModel, Extra, Field, root_validator from langchain.base_language import BaseLanguageModel from langchain.callbacks.manager import ( AsyncCallbackManagerForToolRun, CallbackManagerForToolRun, ) from langchain.chains.llm import LLMChain from langchain.prompts import PromptTemplate from langchain.sql_database import SQLDatabase from langchain.tools.base import BaseTool from langchain.tools.sql_database.prompt import QUERY_CHECKER [docs]class BaseSQLDatabaseTool(BaseModel): """Base tool for interacting with a SQL database.""" db: SQLDatabase = Field(exclude=True) # Override BaseTool.Config to appease mypy # See https://github.com/pydantic/pydantic/issues/4173 class Config(BaseTool.Config): """Configuration for this pydantic object.""" arbitrary_types_allowed = True extra = Extra.forbid [docs]class QuerySQLDataBaseTool(BaseSQLDatabaseTool, BaseTool): """Tool for querying a SQL database.""" name = "sql_db_query" description = """ Input to this tool is a detailed and correct SQL query, output is a result from the database. If the query is not correct, an error message will be returned. If an error is returned, rewrite the query, check the query, and try again. """ def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Execute the query, return the results or an error message.""" return self.db.run_no_throw(query)
https://api.python.langchain.com/en/stable/_modules/langchain/tools/sql_database/tool.html
d92c01dedab2-1
return self.db.run_no_throw(query) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: raise NotImplementedError("QuerySqlDbTool does not support async") [docs]class InfoSQLDatabaseTool(BaseSQLDatabaseTool, BaseTool): """Tool for getting metadata about a SQL database.""" name = "sql_db_schema" description = """ Input to this tool is a comma-separated list of tables, output is the schema and sample rows for those tables. Example Input: "table1, table2, table3" """ def _run( self, table_names: str, run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Get the schema for tables in a comma-separated list.""" return self.db.get_table_info_no_throw(table_names.split(", ")) async def _arun( self, table_name: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: raise NotImplementedError("SchemaSqlDbTool does not support async") [docs]class ListSQLDatabaseTool(BaseSQLDatabaseTool, BaseTool): """Tool for getting tables names.""" name = "sql_db_list_tables" description = "Input is an empty string, output is a comma separated list of tables in the database." def _run( self, tool_input: str = "", run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Get the schema for a specific table.""" return ", ".join(self.db.get_usable_table_names())
https://api.python.langchain.com/en/stable/_modules/langchain/tools/sql_database/tool.html
d92c01dedab2-2
return ", ".join(self.db.get_usable_table_names()) async def _arun( self, tool_input: str = "", run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: raise NotImplementedError("ListTablesSqlDbTool does not support async") [docs]class QuerySQLCheckerTool(BaseSQLDatabaseTool, BaseTool): """Use an LLM to check if a query is correct. Adapted from https://www.patterns.app/blog/2023/01/18/crunchbot-sql-analyst-gpt/""" template: str = QUERY_CHECKER llm: BaseLanguageModel llm_chain: LLMChain = Field(init=False) name = "sql_db_query_checker" description = """ Use this tool to double check if your query is correct before executing it. Always use this tool before executing a query with query_sql_db! """ @root_validator(pre=True) def initialize_llm_chain(cls, values: Dict[str, Any]) -> Dict[str, Any]: if "llm_chain" not in values: values["llm_chain"] = LLMChain( llm=values.get("llm"), prompt=PromptTemplate( template=QUERY_CHECKER, input_variables=["query", "dialect"] ), ) if values["llm_chain"].prompt.input_variables != ["query", "dialect"]: raise ValueError( "LLM chain for QueryCheckerTool must have input variables ['query', 'dialect']" ) return values def _run( self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None,
https://api.python.langchain.com/en/stable/_modules/langchain/tools/sql_database/tool.html
d92c01dedab2-3
run_manager: Optional[CallbackManagerForToolRun] = None, ) -> str: """Use the LLM to check the query.""" return self.llm_chain.predict(query=query, dialect=self.db.dialect) async def _arun( self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None, ) -> str: return await self.llm_chain.apredict(query=query, dialect=self.db.dialect)
https://api.python.langchain.com/en/stable/_modules/langchain/tools/sql_database/tool.html