Source code for langchain_core.callbacks.streaming_stdout

"""回调处理程序在新的llm令牌上流向标准输出。"""
from __future__ import annotations

import sys
from typing import TYPE_CHECKING, Any, Dict, List

from langchain_core.callbacks.base import BaseCallbackHandler

if TYPE_CHECKING:
    from langchain_core.agents import AgentAction, AgentFinish
    from langchain_core.messages import BaseMessage
    from langchain_core.outputs import LLMResult


[docs]class StreamingStdOutCallbackHandler(BaseCallbackHandler): """流处理的回调处理程序。仅适用于支持流处理的LLMs。"""
[docs] def on_llm_start( self, serialized: Dict[str, Any], prompts: List[str], **kwargs: Any ) -> None: """LLM 开始运行时运行。"""
[docs] def on_chat_model_start( self, serialized: Dict[str, Any], messages: List[List[BaseMessage]], **kwargs: Any, ) -> None: """LLM 开始运行时运行。"""
[docs] def on_llm_new_token(self, token: str, **kwargs: Any) -> None: """在新的LLM令牌上运行。仅在流式处理启用时可用。""" sys.stdout.write(token) sys.stdout.flush()
[docs] def on_llm_end(self, response: LLMResult, **kwargs: Any) -> None: """当LLM运行结束时运行。"""
[docs] def on_llm_error(self, error: BaseException, **kwargs: Any) -> None: """当LLM出现错误时运行。"""
[docs] def on_chain_start( self, serialized: Dict[str, Any], inputs: Dict[str, Any], **kwargs: Any ) -> None: """当链开始运行时运行。"""
[docs] def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None: """当链结束运行时运行。"""
[docs] def on_chain_error(self, error: BaseException, **kwargs: Any) -> None: """当链式错误时运行。"""
[docs] def on_tool_start( self, serialized: Dict[str, Any], input_str: str, **kwargs: Any ) -> None: """当工具开始运行时运行。"""
[docs] def on_agent_action(self, action: AgentAction, **kwargs: Any) -> Any: """在代理程序上运行的操作。""" pass
[docs] def on_tool_end(self, output: Any, **kwargs: Any) -> None: """当工具运行结束时运行。"""
[docs] def on_tool_error(self, error: BaseException, **kwargs: Any) -> None: """当工具出现错误时运行。"""
[docs] def on_text(self, text: str, **kwargs: Any) -> None: """在任意文本上运行。"""
[docs] def on_agent_finish(self, finish: AgentFinish, **kwargs: Any) -> None: """在代理端运行。"""