Source code for langchain_community.llms.cerebriumai

import logging
from typing import Any, Dict, List, Mapping, Optional, cast

import requests
from langchain_core.callbacks import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM
from langchain_core.pydantic_v1 import Extra, Field, SecretStr, root_validator
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env

from langchain_community.llms.utils import enforce_stop_tokens

logger = logging.getLogger(__name__)


[docs]class CerebriumAI(LLM): """CerebriumAI大型语言模型。 要使用,您应该已安装``cerebrium`` python包。 您还应该设置环境变量``CEREBRIUMAI_API_KEY`` 为您的API密钥,或将其作为构造函数中的命名参数传递。 任何可以传递给调用的有效参数 都可以传递,即使在此类上没有明确保存。 示例: .. code-block:: python from langchain_community.llms import CerebriumAI cerebrium = CerebriumAI(endpoint_url="", cerebriumai_api_key="my-api-key")""" endpoint_url: str = "" """使用的模型端点""" model_kwargs: Dict[str, Any] = Field(default_factory=dict) """包含在`create`调用中有效但未明确指定的任何模型参数。""" cerebriumai_api_key: Optional[SecretStr] = None class Config: """这是用于pydantic配置的设置。""" extra = Extra.forbid @root_validator(pre=True) def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]: """从传入的额外参数构建额外的kwargs。""" all_required_field_names = {field.alias for field in cls.__fields__.values()} extra = values.get("model_kwargs", {}) for field_name in list(values): if field_name not in all_required_field_names: if field_name in extra: raise ValueError(f"Found {field_name} supplied twice.") logger.warning( f"""{field_name} was transferred to model_kwargs. Please confirm that {field_name} is what you intended.""" ) extra[field_name] = values.pop(field_name) values["model_kwargs"] = extra return values @root_validator() def validate_environment(cls, values: Dict) -> Dict: """验证环境中是否存在API密钥和Python包。""" cerebriumai_api_key = convert_to_secret_str( get_from_dict_or_env(values, "cerebriumai_api_key", "CEREBRIUMAI_API_KEY") ) values["cerebriumai_api_key"] = cerebriumai_api_key return values @property def _identifying_params(self) -> Mapping[str, Any]: """获取识别参数。""" return { **{"endpoint_url": self.endpoint_url}, **{"model_kwargs": self.model_kwargs}, } @property def _llm_type(self) -> str: """llm的返回类型。""" return "cerebriumai" def _call( self, prompt: str, stop: Optional[List[str]] = None, run_manager: Optional[CallbackManagerForLLMRun] = None, **kwargs: Any, ) -> str: headers: Dict = { "Authorization": cast( SecretStr, self.cerebriumai_api_key ).get_secret_value(), "Content-Type": "application/json", } params = self.model_kwargs or {} payload = {"prompt": prompt, **params, **kwargs} response = requests.post(self.endpoint_url, json=payload, headers=headers) if response.status_code == 200: data = response.json() text = data["result"] if stop is not None: # I believe this is required since the stop tokens # are not enforced by the model parameters text = enforce_stop_tokens(text, stop) return text else: response.raise_for_status() return ""