from typing import Any, Dict, List, Optional, Union
from aiohttp import ClientSession
from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
)
from langchain_core.language_models.chat_models import (
BaseChatModel,
)
from langchain_core.messages import (
AIMessage,
BaseMessage,
)
from langchain_core.outputs import ChatGeneration, ChatResult
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
from langchain_community.utilities.requests import Requests
def _format_dappier_messages(
messages: List[BaseMessage],
) -> List[Dict[str, Union[str, List[Union[str, Dict[Any, Any]]]]]]:
formatted_messages = []
for message in messages:
if message.type == "human":
formatted_messages.append({"role": "user", "content": message.content})
elif message.type == "system":
formatted_messages.append({"role": "system", "content": message.content})
return formatted_messages
[docs]class ChatDappierAI(BaseChatModel):
"""`Dappier`聊天大型语言模型。
`Dappier`是一个平台,可以访问各种实时数据模型。
使用Dappier的预训练、LLM-ready数据模型增强您的AI应用程序,
并通过减少不准确性确保准确、及时的响应。
要使用我们的Dappier AI数据模型之一,您将需要一个API密钥。
请访问Dappier平台(https://platform.dappier.com/)登录
并在您的个人资料中创建一个API密钥。
示例:
.. code-block:: python
from langchain_community.chat_models import ChatDappierAI
from langchain_core.messages import HumanMessage
# 使用所需的配置初始化`ChatDappierAI`
chat = ChatDappierAI(
dappier_endpoint="https://api.dappier.com/app/datamodel/dm_01hpsxyfm2fwdt2zet9cg6fdxt",
dappier_api_key="<YOUR_KEY>")
# 创建一个消息列表以与模型交互
messages = [HumanMessage(content="hello")]
# 使用提供的消息调用模型
chat.invoke(messages)
您可以在这里找到更多详细信息:https://docs.dappier.com/introduction"""
dappier_endpoint: str = "https://api.dappier.com/app/datamodelconversation"
dappier_model: str = "dm_01hpsxyfm2fwdt2zet9cg6fdxt"
dappier_api_key: Optional[SecretStr] = Field(None, description="Dappier API Token")
class Config:
"""此pydantic对象的配置。"""
extra = Extra.forbid
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""验证环境中是否存在API密钥。"""
values["dappier_api_key"] = convert_to_secret_str(
get_from_dict_or_env(values, "dappier_api_key", "DAPPIER_API_KEY")
)
return values
[docs] @staticmethod
def get_user_agent() -> str:
from langchain_community import __version__
return f"langchain/{__version__}"
@property
def _llm_type(self) -> str:
"""聊天模型的返回类型。"""
return "dappier-realtimesearch-chat"
@property
def _api_key(self) -> str:
if self.dappier_api_key:
return self.dappier_api_key.get_secret_value()
return ""
def _generate(
self,
messages: List[BaseMessage],
stop: Optional[List[str]] = None,
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> ChatResult:
url = f"{self.dappier_endpoint}"
headers = {
"Authorization": f"Bearer {self._api_key}",
"User-Agent": self.get_user_agent(),
}
user_query = _format_dappier_messages(messages=messages)
payload: Dict[str, Any] = {
"model": self.dappier_model,
"conversation": user_query,
}
request = Requests(headers=headers)
response = request.post(url=url, data=payload)
response.raise_for_status()
data = response.json()
message_response = data["message"]
return ChatResult(
generations=[ChatGeneration(message=AIMessage(content=message_response))]
)
async def _agenerate(
self,
messages: List[BaseMessage],
stop: Optional[List[str]] = None,
run_manager: Optional[AsyncCallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> ChatResult:
url = f"{self.dappier_endpoint}"
headers = {
"Authorization": f"Bearer {self._api_key}",
"User-Agent": self.get_user_agent(),
}
user_query = _format_dappier_messages(messages=messages)
payload: Dict[str, Any] = {
"model": self.dappier_model,
"conversation": user_query,
}
async with ClientSession() as session:
async with session.post(url, json=payload, headers=headers) as response:
response.raise_for_status()
data = await response.json()
message_response = data["message"]
return ChatResult(
generations=[
ChatGeneration(message=AIMessage(content=message_response))
]
)