Source code for langchain_community.embeddings.oci_generative_ai
from enum import Enum
from typing import Any, Dict, List, Mapping, Optional
from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra, root_validator
CUSTOM_ENDPOINT_PREFIX = "ocid1.generativeaiendpoint"
[docs]class OCIAuthType(Enum):
"""OCI身份验证类型作为枚举器。"""
API_KEY = 1
SECURITY_TOKEN = 2
INSTANCE_PRINCIPAL = 3
RESOURCE_PRINCIPAL = 4
[docs]class OCIGenAIEmbeddings(BaseModel, Embeddings):
"""OCI嵌入式模型。
要进行身份验证,OCI客户端使用https://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdk_authentication_methods.htm中描述的方法。
身份验证方法通过auth_type传递,应为以下之一:
API_KEY(默认),SECURITY_TOKEN,INSTANCE_PRINCIPLE,RESOURCE_PRINCIPLE
确保您具有访问OCI生成AI服务所需的策略(配置文件/角色)。如果使用特定的配置文件配置文件,则必须通过auth_profile传递配置文件的名称(~/.oci/config)。
要使用,必须在构造函数的命名参数中提供区段ID、端点URL和模型ID。
示例:
.. code-block:: python
from langchain.embeddings import OCIGenAIEmbeddings
embeddings = OCIGenAIEmbeddings(
model_id="MY_EMBEDDING_MODEL",
service_endpoint="https://inference.generativeai.us-chicago-1.oci.oraclecloud.com",
compartment_id="MY_OCID"
)"""
client: Any #: :meta private:
service_models: Any #: :meta private:
auth_type: Optional[str] = "API_KEY"
"""身份验证类型,可以是
API_KEY,
SECURITY_TOKEN,
INSTANCE_PRINCIPLE,
RESOURCE_PRINCIPLE
如果未指定,将使用API_KEY
"""
auth_profile: Optional[str] = "DEFAULT"
"""在~/.oci/config中的配置文件名称
如果未指定,则将使用DEFAULT"""
model_id: str = None # type: ignore[assignment]
"""要调用的模型的ID,例如,cohere.embed-english-light-v2.0"""
model_kwargs: Optional[Dict] = None
"""传递给模型的关键字参数"""
service_endpoint: str = None # type: ignore[assignment]
"""服务端点URL"""
compartment_id: str = None # type: ignore[assignment]
"""区段的OCID"""
truncate: Optional[str] = "END"
"""从开头或结尾截断过长的嵌入("NONE"|"START"|"END")"""
class Config:
"""此pydantic对象的配置。"""
extra = Extra.forbid
@root_validator()
def validate_environment(cls, values: Dict) -> Dict: # pylint: disable=no-self-argument
"""验证OCI配置和Python包是否存在于环境中。"""
# Skip creating new client if passed in constructor
if values["client"] is not None:
return values
try:
import oci
client_kwargs = {
"config": {},
"signer": None,
"service_endpoint": values["service_endpoint"],
"retry_strategy": oci.retry.DEFAULT_RETRY_STRATEGY,
"timeout": (10, 240), # default timeout config for OCI Gen AI service
}
if values["auth_type"] == OCIAuthType(1).name:
client_kwargs["config"] = oci.config.from_file(
profile_name=values["auth_profile"]
)
client_kwargs.pop("signer", None)
elif values["auth_type"] == OCIAuthType(2).name:
def make_security_token_signer(oci_config): # type: ignore[no-untyped-def]
pk = oci.signer.load_private_key_from_file(
oci_config.get("key_file"), None
)
with open(
oci_config.get("security_token_file"), encoding="utf-8"
) as f:
st_string = f.read()
return oci.auth.signers.SecurityTokenSigner(st_string, pk)
client_kwargs["config"] = oci.config.from_file(
profile_name=values["auth_profile"]
)
client_kwargs["signer"] = make_security_token_signer(
oci_config=client_kwargs["config"]
)
elif values["auth_type"] == OCIAuthType(3).name:
client_kwargs[
"signer"
] = oci.auth.signers.InstancePrincipalsSecurityTokenSigner()
elif values["auth_type"] == OCIAuthType(4).name:
client_kwargs[
"signer"
] = oci.auth.signers.get_resource_principals_signer()
else:
raise ValueError("Please provide valid value to auth_type")
values["client"] = oci.generative_ai_inference.GenerativeAiInferenceClient(
**client_kwargs
)
except ImportError as ex:
raise ImportError(
"Could not import oci python package. "
"Please make sure you have the oci package installed."
) from ex
except Exception as e:
raise ValueError(
"Could not authenticate with OCI client. "
"Please check if ~/.oci/config exists. "
"If INSTANCE_PRINCIPLE or RESOURCE_PRINCIPLE is used, "
"Please check the specified "
"auth_profile and auth_type are valid."
) from e
return values
@property
def _identifying_params(self) -> Mapping[str, Any]:
"""获取识别参数。"""
_model_kwargs = self.model_kwargs or {}
return {
**{"model_kwargs": _model_kwargs},
}
[docs] def embed_documents(self, texts: List[str]) -> List[List[float]]:
"""调用OCIGenAI的嵌入端点。
参数:
texts:要嵌入的文本列表。
返回:
每个文本的嵌入列表。
"""
from oci.generative_ai_inference import models
if self.model_id.startswith(CUSTOM_ENDPOINT_PREFIX):
serving_mode = models.DedicatedServingMode(endpoint_id=self.model_id)
else:
serving_mode = models.OnDemandServingMode(model_id=self.model_id)
invocation_obj = models.EmbedTextDetails(
serving_mode=serving_mode,
compartment_id=self.compartment_id,
truncate=self.truncate,
inputs=texts,
)
response = self.client.embed_text(invocation_obj)
return response.data.embeddings
[docs] def embed_query(self, text: str) -> List[float]:
"""调用OCIGenAI的嵌入端点。
参数:
text:要嵌入的文本。
返回:
文本的嵌入。
"""
return self.embed_documents([text])[0]