Source code for langchain_experimental.tot.thought_generation

"""
我们为思维树(ToT)框架提供了两种生成思维的策略,以避免重复:

这些策略确保语言模型生成多样且不重复的思维,这对于需要探索的解决问题任务至关重要。
"""
from abc import abstractmethod
from typing import Any, Dict, List, Tuple

from langchain.chains.llm import LLMChain
from langchain_core.prompts.base import BasePromptTemplate

from langchain_experimental.pydantic_v1 import Field
from langchain_experimental.tot.prompts import get_cot_prompt, get_propose_prompt


[docs]class BaseThoughtGenerationStrategy(LLMChain): """ 用于思维生成策略的基类。 """ c: int = 3 """每一步提议的子思维数量。"""
[docs] @abstractmethod def next_thought( self, problem_description: str, thoughts_path: Tuple[str, ...] = (), **kwargs: Any, ) -> str: """根据问题描述和迄今为止生成的思考,生成下一个思考。 """
[docs]class SampleCoTStrategy(BaseThoughtGenerationStrategy): """来自“Chain-of-Thought(CoT)”提示的示例策略。 当思维空间丰富时,例如每个思维都是一个段落时,这种策略效果更好。独立且同分布的样本导致多样性,有助于避免重复。""" prompt: BasePromptTemplate = Field(default_factory=get_cot_prompt)
[docs] def next_thought( self, problem_description: str, thoughts_path: Tuple[str, ...] = (), **kwargs: Any, ) -> str: response_text = self.predict_and_parse( problem_description=problem_description, thoughts=thoughts_path, **kwargs ) return response_text if isinstance(response_text, str) else ""
[docs]class ProposePromptStrategy(BaseThoughtGenerationStrategy): """顺序使用“提议提示”的策略。 当思维空间更受限制时,例如每个思维只是一个词或一行时,这种策略效果更好。在同一个提示完成中提出不同的思维有助于避免重复。""" prompt: BasePromptTemplate = Field(default_factory=get_propose_prompt) tot_memory: Dict[Tuple[str, ...], List[str]] = Field(default_factory=dict)
[docs] def next_thought( self, problem_description: str, thoughts_path: Tuple[str, ...] = (), **kwargs: Any, ) -> str: if thoughts_path not in self.tot_memory or not self.tot_memory[thoughts_path]: new_thoughts = self.predict_and_parse( problem_description=problem_description, thoughts=thoughts_path, n=self.c, **kwargs, ) if not new_thoughts: return "" if isinstance(new_thoughts, list): self.tot_memory[thoughts_path] = new_thoughts[::-1] else: return "" return self.tot_memory[thoughts_path].pop()