Skip to content

Self discover

SelfDiscoverPack #

Bases: BaseLlamaPack

自我发现包。

Source code in llama_index/packs/self_discover/base.py
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
class SelfDiscoverPack(BaseLlamaPack):
    """自我发现包。"""

    def __init__(
        self,
        llm: Optional[Any] = None,
        verbose: bool = True,
    ) -> None:
        """初始化参数。"""
        self.llm = llm or OpenAI(model="gpt-3.5-turbo")
        self.reasoning_modules = _REASONING_MODULES
        self.verbose = verbose

    def get_modules(self) -> Dict[str, Any]:
        """获取模块。"""
        return {"llm": self.llm, "reasoning_modules": self.reasoning_modules}

    def run(self, task):
        """运行配置的管道,针对指定的任务和推理模块。"""
        configurator = PipelineConfigurator(
            task, self.reasoning_modules, self.verbose, self.llm
        )
        pipeline = configurator.configure()
        return pipeline.run(task=task, reasoning_modules=self.reasoning_modules)

get_modules #

get_modules() -> Dict[str, Any]

获取模块。

Source code in llama_index/packs/self_discover/base.py
157
158
159
def get_modules(self) -> Dict[str, Any]:
    """获取模块。"""
    return {"llm": self.llm, "reasoning_modules": self.reasoning_modules}

run #

run(task)

运行配置的管道,针对指定的任务和推理模块。

Source code in llama_index/packs/self_discover/base.py
161
162
163
164
165
166
167
def run(self, task):
    """运行配置的管道,针对指定的任务和推理模块。"""
    configurator = PipelineConfigurator(
        task, self.reasoning_modules, self.verbose, self.llm
    )
    pipeline = configurator.configure()
    return pipeline.run(task=task, reasoning_modules=self.reasoning_modules)