Skip to content

Agents llm compiler

LLMCompilerAgentPack #

Bases: BaseLlamaPack

LLMCompilerAgent包。

Source code in llama_index/packs/agents_llm_compiler/base.py
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
class LLMCompilerAgentPack(BaseLlamaPack):
    """LLMCompilerAgent包。

    Args:
        tools(List[BaseTool]):要使用的工具列表。
        llm(Optional[LLM]):要使用的LLM。"""

    def __init__(
        self,
        tools: List[BaseTool],
        llm: Optional[LLM] = None,
        callback_manager: Optional[CallbackManager] = None,
        agent_worker_kwargs: Optional[Dict[str, Any]] = None,
        agent_runner_kwargs: Optional[Dict[str, Any]] = None,
    ) -> None:
        """初始化参数。"""
        self.llm = llm or OpenAI(model="gpt-4")
        self.callback_manager = callback_manager or self.llm.callback_manager
        self.agent_worker = LLMCompilerAgentWorker.from_tools(
            tools,
            llm=llm,
            verbose=True,
            callback_manager=self.callback_manager,
            **(agent_worker_kwargs or {})
        )
        self.agent = AgentRunner(
            self.agent_worker,
            callback_manager=self.callback_manager,
            **(agent_runner_kwargs or {})
        )

    def get_modules(self) -> Dict[str, Any]:
        """获取模块。"""
        return {
            "llm": self.llm,
            "callback_manager": self.callback_manager,
            "agent_worker": self.agent_worker,
            "agent": self.agent,
        }

    def run(self, *args: Any, **kwargs: Any) -> Any:
        """运行流水线。"""
        return self.agent.chat(*args, **kwargs)

get_modules #

get_modules() -> Dict[str, Any]

获取模块。

Source code in llama_index/packs/agents_llm_compiler/base.py
46
47
48
49
50
51
52
53
def get_modules(self) -> Dict[str, Any]:
    """获取模块。"""
    return {
        "llm": self.llm,
        "callback_manager": self.callback_manager,
        "agent_worker": self.agent_worker,
        "agent": self.agent,
    }

run #

run(*args: Any, **kwargs: Any) -> Any

运行流水线。

Source code in llama_index/packs/agents_llm_compiler/base.py
55
56
57
def run(self, *args: Any, **kwargs: Any) -> Any:
    """运行流水线。"""
    return self.agent.chat(*args, **kwargs)