Source code for langchain_community.chat_models.azure_openai

"""Azure OpenAI聊天封装。"""
from __future__ import annotations

import logging
import os
import warnings
from typing import Any, Callable, Dict, List, Union

from langchain_core._api.deprecation import deprecated
from langchain_core.outputs import ChatResult
from langchain_core.pydantic_v1 import BaseModel, Field, root_validator
from langchain_core.utils import get_from_dict_or_env

from langchain_community.chat_models.openai import ChatOpenAI
from langchain_community.utils.openai import is_openai_v1

logger = logging.getLogger(__name__)


[docs]@deprecated( since="0.0.10", removal="0.3.0", alternative_import="langchain_openai.AzureChatOpenAI", ) class AzureChatOpenAI(ChatOpenAI): """Azure OpenAI聊天完成API。 要使用这个类,你必须在Azure OpenAI上部署了一个模型。在构造函数中使用`deployment_name`来引用Azure门户中的“模型部署名称”。 此外,你应该已经安装了``openai`` python包,并设置了以下环境变量或以小写形式传递给构造函数: - ``AZURE_OPENAI_API_KEY`` - ``AZURE_OPENAI_ENDPOINT`` - ``AZURE_OPENAI_AD_TOKEN`` - ``OPENAI_API_VERSION`` - ``OPENAI_PROXY`` 例如,如果你部署了`gpt-35-turbo`,并且部署名称为`35-turbo-dev`,构造函数应该如下所示: ```python AzureChatOpenAI( azure_deployment="35-turbo-dev", openai_api_version="2023-05-15", ) ``` 请注意API版本可能会更改。 你还可以使用``model_version``构造函数参数指定模型的版本,因为Azure OpenAI不会在响应中返回模型版本。 默认为空。当你指定版本时,它将附加到响应中的模型名称。设置正确的版本将帮助你正确计算成本。模型版本不会被验证,所以确保你设置正确以获取正确的成本。 可以传递任何可以传递给openai.create调用的参数,即使在这个类中没有明确保存。""" azure_endpoint: Union[str, None] = None """您的Azure端点,包括资源。 如果未提供,则会自动从环境变量`AZURE_OPENAI_ENDPOINT`中推断。 示例:`https://example-resource.azure.openai.com/`""" deployment_name: Union[str, None] = Field(default=None, alias="azure_deployment") """模型部署。 如果给定,则将基本客户端URL设置为包括`/deployments/{azure_deployment}`。 注意:这意味着您将无法使用非部署端点。""" openai_api_version: str = Field(default="", alias="api_version") """如果未提供,将自动从环境变量`OPENAI_API_VERSION`中推断。""" openai_api_key: Union[str, None] = Field(default=None, alias="api_key") """如果未提供,将自动从环境变量`AZURE_OPENAI_API_KEY`中推断。""" azure_ad_token: Union[str, None] = None """你的Azure Active Directory令牌。 如果未提供,将自动从环境变量`AZURE_OPENAI_AD_TOKEN`中推断。 了解更多: https://www.microsoft.com/en-us/security/business/identity-access/microsoft-entra-id.""" # noqa: E501 azure_ad_token_provider: Union[Callable[[], str], None] = None """一个返回Azure Active Directory令牌的函数。 将在每个请求上被调用。""" model_version: str = "" """遗留代码,用于支持openai<1.0.0。""" openai_api_type: str = "" """遗留代码,用于支持openai<1.0.0。""" validate_base_url: bool = True """为了向后兼容。如果传入了旧的值openai_api_base,请尝试推断它是base_url还是azure_endpoint,并相应地进行更新。"""
[docs] @classmethod def get_lc_namespace(cls) -> List[str]: """获取langchain对象的命名空间。""" return ["langchain", "chat_models", "azure_openai"]
@root_validator() def validate_environment(cls, values: Dict) -> Dict: """验证环境中是否存在API密钥和Python包。""" if values["n"] < 1: raise ValueError("n must be at least 1.") if values["n"] > 1 and values["streaming"]: raise ValueError("n must be 1 when streaming.") # Check OPENAI_KEY for backwards compatibility. # TODO: Remove OPENAI_API_KEY support to avoid possible conflict when using # other forms of azure credentials. values["openai_api_key"] = ( values["openai_api_key"] or os.getenv("AZURE_OPENAI_API_KEY") or os.getenv("OPENAI_API_KEY") ) values["openai_api_base"] = values["openai_api_base"] or os.getenv( "OPENAI_API_BASE" ) values["openai_api_version"] = values["openai_api_version"] or os.getenv( "OPENAI_API_VERSION" ) # Check OPENAI_ORGANIZATION for backwards compatibility. values["openai_organization"] = ( values["openai_organization"] or os.getenv("OPENAI_ORG_ID") or os.getenv("OPENAI_ORGANIZATION") ) values["azure_endpoint"] = values["azure_endpoint"] or os.getenv( "AZURE_OPENAI_ENDPOINT" ) values["azure_ad_token"] = values["azure_ad_token"] or os.getenv( "AZURE_OPENAI_AD_TOKEN" ) values["openai_api_type"] = get_from_dict_or_env( values, "openai_api_type", "OPENAI_API_TYPE", default="azure" ) values["openai_proxy"] = get_from_dict_or_env( values, "openai_proxy", "OPENAI_PROXY", default="" ) try: import openai except ImportError: raise ImportError( "Could not import openai python package. " "Please install it with `pip install openai`." ) if is_openai_v1(): # For backwards compatibility. Before openai v1, no distinction was made # between azure_endpoint and base_url (openai_api_base). openai_api_base = values["openai_api_base"] if openai_api_base and values["validate_base_url"]: if "/openai" not in openai_api_base: values["openai_api_base"] = ( values["openai_api_base"].rstrip("/") + "/openai" ) warnings.warn( "As of openai>=1.0.0, Azure endpoints should be specified via " f"the `azure_endpoint` param not `openai_api_base` " f"(or alias `base_url`). Updating `openai_api_base` from " f"{openai_api_base} to {values['openai_api_base']}." ) if values["deployment_name"]: warnings.warn( "As of openai>=1.0.0, if `deployment_name` (or alias " "`azure_deployment`) is specified then " "`openai_api_base` (or alias `base_url`) should not be. " "Instead use `deployment_name` (or alias `azure_deployment`) " "and `azure_endpoint`." ) if values["deployment_name"] not in values["openai_api_base"]: warnings.warn( "As of openai>=1.0.0, if `openai_api_base` " "(or alias `base_url`) is specified it is expected to be " "of the form " "https://example-resource.azure.openai.com/openai/deployments/example-deployment. " # noqa: E501 f"Updating {openai_api_base} to " f"{values['openai_api_base']}." ) values["openai_api_base"] += ( "/deployments/" + values["deployment_name"] ) values["deployment_name"] = None client_params = { "api_version": values["openai_api_version"], "azure_endpoint": values["azure_endpoint"], "azure_deployment": values["deployment_name"], "api_key": values["openai_api_key"], "azure_ad_token": values["azure_ad_token"], "azure_ad_token_provider": values["azure_ad_token_provider"], "organization": values["openai_organization"], "base_url": values["openai_api_base"], "timeout": values["request_timeout"], "max_retries": values["max_retries"], "default_headers": values["default_headers"], "default_query": values["default_query"], "http_client": values["http_client"], } values["client"] = openai.AzureOpenAI(**client_params).chat.completions values["async_client"] = openai.AsyncAzureOpenAI( **client_params ).chat.completions else: values["client"] = openai.ChatCompletion return values @property def _default_params(self) -> Dict[str, Any]: """获取调用OpenAI API的默认参数。""" if is_openai_v1(): return super()._default_params else: return { **super()._default_params, "engine": self.deployment_name, } @property def _identifying_params(self) -> Dict[str, Any]: """获取识别参数。""" return {**self._default_params} @property def _client_params(self) -> Dict[str, Any]: """获取用于openai客户端的配置参数。""" if is_openai_v1(): return super()._client_params else: return { **super()._client_params, "api_type": self.openai_api_type, "api_version": self.openai_api_version, } @property def _llm_type(self) -> str: return "azure-openai-chat" @property def lc_attributes(self) -> Dict[str, Any]: return { "openai_api_type": self.openai_api_type, "openai_api_version": self.openai_api_version, } def _create_chat_result(self, response: Union[dict, BaseModel]) -> ChatResult: if not isinstance(response, dict): response = response.dict() for res in response["choices"]: if res.get("finish_reason", None) == "content_filter": raise ValueError( "Azure has not provided the response due to a content filter " "being triggered" ) chat_result = super()._create_chat_result(response) if "model" in response: model = response["model"] if self.model_version: model = f"{model}-{self.model_version}" if chat_result.llm_output is not None and isinstance( chat_result.llm_output, dict ): chat_result.llm_output["model_name"] = model return chat_result