Source code for langchain.chains.hyde.base

"""假设性文档嵌入。

https://arxiv.org/abs/2212.10496
"""
from __future__ import annotations

from typing import Any, Dict, List, Optional

import numpy as np
from langchain_core.callbacks import CallbackManagerForChainRun
from langchain_core.embeddings import Embeddings
from langchain_core.language_models import BaseLanguageModel
from langchain_core.prompts import BasePromptTemplate
from langchain_core.pydantic_v1 import Extra

from langchain.chains.base import Chain
from langchain.chains.hyde.prompts import PROMPT_MAP
from langchain.chains.llm import LLMChain


[docs]class HypotheticalDocumentEmbedder(Chain, Embeddings): """生成用于查询的假设文档,然后嵌入其中。 基于 https://arxiv.org/abs/2212.10496 """ base_embeddings: Embeddings llm_chain: LLMChain class Config: """这个pydantic对象的配置。""" extra = Extra.forbid arbitrary_types_allowed = True @property def input_keys(self) -> List[str]: """为Hyde的LLM链输入密钥。""" return self.llm_chain.input_keys @property def output_keys(self) -> List[str]: """Hyde的LLM链的输出密钥。""" return self.llm_chain.output_keys
[docs] def embed_documents(self, texts: List[str]) -> List[List[float]]: """调用基础嵌入。""" return self.base_embeddings.embed_documents(texts)
[docs] def combine_embeddings(self, embeddings: List[List[float]]) -> List[float]: """将嵌入组合成最终嵌入。""" return list(np.array(embeddings).mean(axis=0))
[docs] def embed_query(self, text: str) -> List[float]: """生成一个假设文档并嵌入其中。""" var_name = self.llm_chain.input_keys[0] result = self.llm_chain.generate([{var_name: text}]) documents = [generation.text for generation in result.generations[0]] embeddings = self.embed_documents(documents) return self.combine_embeddings(embeddings)
def _call( self, inputs: Dict[str, Any], run_manager: Optional[CallbackManagerForChainRun] = None, ) -> Dict[str, str]: """调用内部的llm链。""" _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager() return self.llm_chain(inputs, callbacks=_run_manager.get_child())
[docs] @classmethod def from_llm( cls, llm: BaseLanguageModel, base_embeddings: Embeddings, prompt_key: Optional[str] = None, custom_prompt: Optional[BasePromptTemplate] = None, **kwargs: Any, ) -> HypotheticalDocumentEmbedder: """使用LLMChain加载并使用特定提示键或自定义提示。""" if custom_prompt is not None: prompt = custom_prompt elif prompt_key is not None and prompt_key in PROMPT_MAP: prompt = PROMPT_MAP[prompt_key] else: raise ValueError( f"Must specify prompt_key if custom_prompt not provided. Should be one " f"of {list(PROMPT_MAP.keys())}." ) llm_chain = LLMChain(llm=llm, prompt=prompt) return cls(base_embeddings=base_embeddings, llm_chain=llm_chain, **kwargs)
@property def _chain_type(self) -> str: return "hyde_chain"