importjsonfromtypingimportList,Sequence,Tuplefromlangchain_core.agentsimportAgentAction,AgentActionMessageLogfromlangchain_core.messagesimportAIMessage,BaseMessage,FunctionMessagedef_convert_agent_action_to_messages(agent_action:AgentAction,observation:str)->List[BaseMessage]:"""Convert an agent action to a message. This code is used to reconstruct the original AI message from the agent action. Args: agent_action: Agent action to convert. Returns: AIMessage or the previous messages plus a FunctionMessage that corresponds to the original tool invocation """ifisinstance(agent_action,AgentActionMessageLog):returnlist(agent_action.message_log)+[_create_function_message(agent_action,observation)]else:return[AIMessage(content=agent_action.log)]def_create_function_message(agent_action:AgentAction,observation:str)->FunctionMessage:"""Convert agent action and observation into a function message. Args: agent_action: the tool invocation request from the agent. observation: the result of the tool invocation. Returns: FunctionMessage that corresponds to the original tool invocation. Raises: ValueError: if the observation cannot be converted to a string. """ifnotisinstance(observation,str):try:content=json.dumps(observation,ensure_ascii=False)exceptException:content=str(observation)else:content=observationreturnFunctionMessage(name=agent_action.tool,content=content,)
[docs]defformat_to_openai_function_messages(intermediate_steps:Sequence[Tuple[AgentAction,str]],)->List[BaseMessage]:"""Convert (AgentAction, tool output) tuples into FunctionMessages. Args: intermediate_steps: Steps the LLM has taken to date, along with observations Returns: list of messages to send to the LLM for the next prediction Raises: ValueError: if the observation cannot be converted to a string. """messages=[]foragent_action,observationinintermediate_steps:messages.extend(_convert_agent_action_to_messages(agent_action,observation))returnmessages