Skip to content

Prompt

Bases: QueryComponent

组件提示。

Source code in llama_index/core/prompts/base.py
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
class PromptComponent(QueryComponent):
    """组件提示。"""

    prompt: BasePromptTemplate = Field(..., description="Prompt")
    llm: Optional[BaseLLM] = Field(
        default=None, description="LLM to use for formatting prompt."
    )
    format_messages: bool = Field(
        default=False,
        description="Whether to format the prompt into a list of chat messages.",
    )

    class Config:
        arbitrary_types_allowed = True

    def set_callback_manager(self, callback_manager: Any) -> None:
        """设置回调管理器。"""

    def _validate_component_inputs(self, input: Dict[str, Any]) -> Dict[str, Any]:
        """在运行组件期间验证组件输入。"""
        keys = list(input.keys())
        for k in keys:
            input[k] = validate_and_convert_stringable(input[k])
        return input

    def _run_component(self, **kwargs: Any) -> Any:
        """运行组件。"""
        if self.format_messages:
            output: Union[str, List[ChatMessage]] = self.prompt.format_messages(
                llm=self.llm, **kwargs
            )
        else:
            output = self.prompt.format(llm=self.llm, **kwargs)
        return {"prompt": output}

    async def _arun_component(self, **kwargs: Any) -> Any:
        """运行组件。"""
        # NOTE: no native async for prompt
        return self._run_component(**kwargs)

    @property
    def input_keys(self) -> InputKeys:
        """输入键。"""
        return InputKeys.from_keys(
            set(self.prompt.template_vars) - set(self.prompt.kwargs)
        )

    @property
    def output_keys(self) -> OutputKeys:
        """输出键。"""
        return OutputKeys.from_keys({"prompt"})

input_keys property #

input_keys: InputKeys

输入键。

output_keys property #

output_keys: OutputKeys

输出键。

set_callback_manager #

set_callback_manager(callback_manager: Any) -> None

设置回调管理器。

Source code in llama_index/core/prompts/base.py
553
554
def set_callback_manager(self, callback_manager: Any) -> None:
    """设置回调管理器。"""