Skip to content

Llm

Bases: QueryComponent

基础LLM组件。

Source code in llama_index/core/llms/llm.py
683
684
685
686
687
688
689
690
691
692
693
694
class BaseLLMComponent(QueryComponent):
    """基础LLM组件。"""

    llm: LLM = Field(..., description="LLM")
    streaming: bool = Field(default=False, description="Streaming mode")

    class Config:
        arbitrary_types_allowed = True

    def set_callback_manager(self, callback_manager: Any) -> None:
        """设置回调管理器。"""
        self.llm.callback_manager = callback_manager

set_callback_manager #

set_callback_manager(callback_manager: Any) -> None

设置回调管理器。

Source code in llama_index/core/llms/llm.py
692
693
694
def set_callback_manager(self, callback_manager: Any) -> None:
    """设置回调管理器。"""
    self.llm.callback_manager = callback_manager

Bases: BaseLLMComponent

LLM完成组件。

Source code in llama_index/core/llms/llm.py
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
class LLMCompleteComponent(BaseLLMComponent):
    """LLM完成组件。"""

    def _validate_component_inputs(self, input: Dict[str, Any]) -> Dict[str, Any]:
        """在运行组件期间验证组件输入。"""
        if "prompt" not in input:
            raise ValueError("Prompt must be in input dict.")

        # do special check to see if prompt is a list of chat messages
        if isinstance(input["prompt"], get_args(List[ChatMessage])):
            input["prompt"] = self.llm.messages_to_prompt(input["prompt"])
            input["prompt"] = validate_and_convert_stringable(input["prompt"])
        else:
            input["prompt"] = validate_and_convert_stringable(input["prompt"])
            input["prompt"] = self.llm.completion_to_prompt(input["prompt"])

        return input

    def _run_component(self, **kwargs: Any) -> Any:
        """运行组件。"""
        # TODO: support only complete for now
        # non-trivial to figure how to support chat/complete/etc.
        prompt = kwargs["prompt"]
        # ignore all other kwargs for now
        if self.streaming:
            response = self.llm.stream_complete(prompt, formatted=True)
        else:
            response = self.llm.complete(prompt, formatted=True)
        return {"output": response}

    async def _arun_component(self, **kwargs: Any) -> Any:
        """运行组件。"""
        # TODO: support only complete for now
        # non-trivial to figure how to support chat/complete/etc.
        prompt = kwargs["prompt"]
        # ignore all other kwargs for now
        response = await self.llm.acomplete(prompt, formatted=True)
        return {"output": response}

    @property
    def input_keys(self) -> InputKeys:
        """输入键。"""
        # TODO: support only complete for now
        return InputKeys.from_keys({"prompt"})

    @property
    def output_keys(self) -> OutputKeys:
        """输出键。"""
        return OutputKeys.from_keys({"output"})

input_keys property #

input_keys: InputKeys

输入键。

output_keys property #

output_keys: OutputKeys

输出键。

Bases: BaseLLMComponent

LLM 聊天组件。

Source code in llama_index/core/llms/llm.py
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
class LLMChatComponent(BaseLLMComponent):
    """LLM 聊天组件。"""

    def _validate_component_inputs(self, input: Dict[str, Any]) -> Dict[str, Any]:
        """在运行组件期间验证组件输入。"""
        if "messages" not in input:
            raise ValueError("Messages must be in input dict.")

        # if `messages` is a string, convert to a list of chat message
        if isinstance(input["messages"], get_args(StringableInput)):
            input["messages"] = validate_and_convert_stringable(input["messages"])
            input["messages"] = prompt_to_messages(str(input["messages"]))

        for message in input["messages"]:
            if not isinstance(message, ChatMessage):
                raise ValueError("Messages must be a list of ChatMessage")
        return input

    def _run_component(self, **kwargs: Any) -> Any:
        """运行组件。"""
        # TODO: support only complete for now
        # non-trivial to figure how to support chat/complete/etc.
        messages = kwargs["messages"]
        if self.streaming:
            response = self.llm.stream_chat(messages)
        else:
            response = self.llm.chat(messages)
        return {"output": response}

    async def _arun_component(self, **kwargs: Any) -> Any:
        """运行组件。"""
        # TODO: support only complete for now
        # non-trivial to figure how to support chat/complete/etc.
        messages = kwargs["messages"]
        if self.streaming:
            response = await self.llm.astream_chat(messages)
        else:
            response = await self.llm.achat(messages)
        return {"output": response}

    @property
    def input_keys(self) -> InputKeys:
        """输入键。"""
        # TODO: support only complete for now
        return InputKeys.from_keys({"messages"})

    @property
    def output_keys(self) -> OutputKeys:
        """输出键。"""
        return OutputKeys.from_keys({"output"})

input_keys property #

input_keys: InputKeys

输入键。

output_keys property #

output_keys: OutputKeys

输出键。