langchain DOC: how_to/migrate_agent/#memory => example does not work with other models

qcuzuvrc  于 3个月前  发布在  其他
关注(0)|答案(1)|浏览(39)

URL

https://python.langchain.com/v0.2/docs/how_to/migrate_agent/#memory

待办事项清单

  • 为这个问题添加了一个非常描述性的标题。
  • 如果适用,我包含了一个指向我参考的文档页面的链接。

当前文档的问题:

如果我将 model = ChatOpenAI(model="gpt-4o") 替换为 ChatAnthropicVertex(model_name="claude-3-haiku@20240307", location="us-east5", project="my_gcp_project"),那么内存示例会抛出以下错误:

{
"name": "ValueError",
"message": "Message dict must contain 'role' and 'content' keys, got {'text': '\
\
The magic_function applied to the input of 3 returns the output of 5.', 'type': 'text', 'index': 0}",
"stack": "---------------------------------------------------------------------------
KeyError                                  Traceback (most recent call last)
File /usr/local/lib/python3.11/site-packages/langchain_core/messages/utils.py:271, in _convert_to_message(message)
270         msg_type = msg_kwargs.pop(\"type\")
--> 271     msg_content = msg_kwargs.pop(\"content\")
272 except KeyError:

KeyError: 'content'

During handling of the above exception, another exception occurred:

ValueError                                Traceback (most recent call last)
Cell In[79], line 55
49 print(
50     agent_with_chat_history.invoke(
51         {\"input\": \"Hi, I'm polly! What's the output of magic_function of 3?\"}, config
52     )[\"output\"]
53 )
54 print(\"---\")
---> 55 print(agent_with_chat_history.invoke({\"input\": \"Remember my name?\"}, config)[\"output\"])
56 print(\"---\")
57 print(
58     agent_with_chat_history.invoke({\"input\": \"what was that output again?\"}, config)[
59         \"output\"
60     ]
61 )

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:4580, in RunnableBindingBase.invoke(self, input, config, **kwargs)
4574 def invoke(
4575     self,
4576     input: Input,
4577     config: Optional[RunnableConfig] = None,
4578     **kwargs: Optional[Any],
4579 ) -> Output:
-> 4580     return self.bound.invoke(
4581         input,
4582         self._merge_configs(config),
4583         **{**self.kwargs, **kwargs},
4584     )

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:4580, in RunnableBindingBase.invoke(self, input, config, **kwargs)
4574 def invoke(
4575     self,
4576     input: Input,
4577     config: Optional[RunnableConfig] = None,
4578     **kwargs: Optional[Any],
4579 ) -> Output:
-> 4580     return self.bound.invoke(
4581         input,
4582         self._merge_configs(config),
4583         **{**self.kwargs, **kwargs},
4584     )

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:2499, in RunnableSequence.invoke(self, input, config, **kwargs)
2497             input = step.invoke(input, config, **kwargs)
2498         else:
-> 2499             input = step.invoke(input, config)
2500 # finish the root run
2501 except BaseException as e:

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/branch.py:212, in RunnableBranch.invoke(self, input, config, **kwargs)
210             break
211     else:
--> 212         output = self.default.invoke(
213             input,
214             config=patch_config(
215                 config, callbacks=run_manager.get_child(tag=\"branch:default\")
216             ),
217             **kwargs,
218         )
219 except BaseException as e:
220     run_manager.on_chain_error(e)

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:4580, in RunnableBindingBase.invoke(self, input, config, **kwargs)
4574 def invoke(
4575     self,
4576     input: Input,
4577     config: Optional[RunnableConfig] = None,
4578     **kwargs: Optional[Any],
4579 ) -> Output:
-> 4580     return self.bound.invoke(
4581         input,
4582         self._merge_configs(config),
4583         **{**self.kwargs, **kwargs},
4584     )

File /usr/local/lib/python3.11/site-packages/langchain/chains/base.py:166, in Chain.invoke(self, input, config, **kwargs)
164 except BaseException as e:
165     run_manager.on_chain_error(e)
--> 166     raise e
167 run_manager.on_chain_end(outputs)
169 if include_run_info:

File /usr/local/lib/python3.11/site-packages/langchain/chains/base.py:156, in Chain.invoke(self, input, config, **kwargs)
153 try:
154     self._validate_inputs(inputs)
155     outputs = (
--> 156         self._call(inputs, run_manager=run_manager)
157         if new_arg_supported
158         else self._call(inputs)
159     )
161     final_outputs: Dict[str, Any] = self.prep_outputs(
162         inputs, outputs, return_only_outputs
163     )
164 except BaseException as e:

File /usr/local/lib/python3.11/site-packages/langchain/agents/agent.py:1636, in AgentExecutor._call(self, inputs, run_manager)
1634 # We now enter the agent loop (until it returns something).
1635 while self._should_continue(iterations, time_elapsed):
-> 1636     next_step_output = self._take_next_step(
1637         name_to_tool_map,
1638         color_mapping,
1639         inputs,
1640         intermediate_steps,
1641         run_manager=run_manager,
1642     )
1643     if isinstance(next_step_output, AgentFinish):
1644         return self._return(
1645             next_step_output, intermediate_steps, run_manager=run_manager
1646         )

File /usr/local/lib/python3.11/site-packages/langchain/agents/agent.py:1342, in AgentExecutor._take_next_step(self, name_to_tool_map, color_mapping, inputs, intermediate_steps, run_manager)
1333 def _take_next_step(
1334     self,
1335     name_to_tool_map: Dict[str, BaseTool],
(...)
1339     run_manager: Optional[CallbackManagerForChainRun] = None,
1340 ) -> Union[AgentFinish, List[Tuple[AgentAction, str]]]:
1341     return self._consume_next_step(
-> 1342         [
1343             a
1344             for a in self._iter_next_step(
1345                 name_to_tool_map,
1346                 color_mapping,
1347                 inputs,
1348                 intermediate_steps,
1349                 run_manager,
1350             )
1351         ]
1352     )

File /usr/local/lib/python3.11/site-packages/langchain/agents/agent.py:1342, in <listcomp>(.0)
1333 def _take_next_step(
1334     self,
1335     name_to_tool_map: Dict[str, BaseTool],
(...)
1339     run_manager: Optional[CallbackManagerForChainRun] = None,
1340 ) -> Union[AgentFinish, List[Tuple[AgentAction, str]]]:
1341     return self._consume_next_step(
-> 1342         [
1343             a
1344             for a in self._iter_next_step(
1345                 name_to_tool_map,
1346                 color_mapping,
1347                 inputs,
1348                 intermediate_steps,
1349                 run_manager,
1350             )
1351         ]
1352     )

File /usr/local/lib/python3.11/site-packages/langchain/agents/agent.py:1370, in AgentExecutor._iter_next_step(self, name_to_tool_map, color_mapping, inputs, intermediate_steps, run_manager)
1367     intermediate_steps = self._prepare_intermediate_steps(intermediate_steps)
1369     # Call the LLM to see what to do.
-> 1370     output = self.agent.plan(
1371         intermediate_steps,
1372         callbacks=run_manager.get_child() if run_manager else None,
1373         **inputs,
1374     )
1375 except OutputParserException as e:
1376     if isinstance(self.handle_parsing_errors, bool):

File /usr/local/lib/python3.11/site-packages/langchain/agents/agent.py:580, in RunnableMultiActionAgent.plan(self, intermediate_steps, callbacks, **kwargs)
572 final_output: Any = None
573 if self.stream_runnable:
574     # Use streaming to make sure that the underlying LLM is invoked in a
575     # streaming
(...)
578     # Because the response from the plan is not a generator, we need to
579     # accumulate the output into final output and return that.
--> 580     for chunk in self.runnable.stream(inputs, config={\"callbacks\": callbacks}):
581         if final_output is None:
582             final_output = chunk

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:2877, in RunnableSequence.stream(self, input, config, **kwargs)
2871 def stream(
2872     self,
2873     input: Input,
2874     config: Optional[RunnableConfig] = None,
2875     **kwargs: Optional[Any],
2876 ) -> Iterator[Output]:
-> 2877     yield from self.transform(iter([input]), config, **kwargs)

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:2864, in RunnableSequence.transform(self, input, config, **kwargs)
2858 def transform(
2859     self,
2860     input: Iterator[Input],
2861     config: Optional[RunnableConfig] = None,
2862     **kwargs: Optional[Any],
2863 ) -> Iterator[Output]:
-> 2864     yield from self._transform_stream_with_config(
2865         input,
2866         self._transform,
2867         patch_config(config, run_name=(config or {}).get(\"run_name\") or self.name),
2868         **kwargs,
2869     )

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:1862, in Runnable._transform_stream_with_config(self, input, transformer, config, run_type, **kwargs)
1860 try:
1861     while True:
-> 1862         chunk: Output = context.run(next, iterator)  # type: ignore
1863         yield chunk
1864         if final_output_supported:

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:2826, in RunnableSequence._transform(self, input, run_manager, config, **kwargs)
2823     else:
2824         final_pipeline = step.transform(final_pipeline, config)
-> 2826 for output in final_pipeline:
2827     yield output

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:1157, in Runnable.transform(self, input, config, **kwargs)
1154 final: Input
1155 got_first_val = False
-> 1157 for ichunk in input:
1158     # The default implementation of transform is to buffer input and
1159     # then call stream.
1160     # It'll attempt to gather all input into a single chunk using
1161     # the `+` operator.
1162     # If the input is not addable, then we'll assume that we can
1163     # only operate on the last chunk,
1164     # and we'll iterate until we get to the last chunk.
1165     if not got_first_val:
1166         final = ichunk

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:4787, in RunnableBindingBase.transform(self, input, config, **kwargs)
4781 def transform(
4782     self,
4783     input: Iterator[Input],
4784     config: Optional[RunnableConfig] = None,
4785     **kwargs: Any,
4786 ) -> Iterator[Output]:
-> 4787     yield from self.bound.transform(
4788         input,
4789         self._merge_configs(config),
4790         **{**self.kwargs, **kwargs},
4791     )

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:1157, in Runnable.transform(self, input, config, **kwargs)
1154 final: Input
1155 got_first_val = False
-> 1157 for ichunk in input:
1158     # The default implementation of transform is to buffer input and
1159     # then call stream.
1160     # It'll attempt to gather all input into a single chunk using
1161     # the `+` operator.
1162     # If the input is not addable, then we'll assume that we can
1163     # only operate on the last chunk,
1164     # and we'll iterate until we get to the last chunk.
1165     if not got_first_val:
1166         final = ichunk

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:1175, in Runnable.transform(self, input, config, **kwargs)
1172             final = ichunk
1174 if got_first_val:
-> 1175     yield from self.stream(final, config, **kwargs)

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:812, in Runnable.stream(self, input, config, **kwargs)
802 def stream(
803     self,
804     input: Input,
805     config: Optional[RunnableConfig] = None,
806     **kwargs: Optional[Any],
807 ) -> Iterator[Output]:
808     \"\"\"
809     Default implementation of stream, which calls invoke.
810     Subclasses should override this method if they support streaming output.
811     \"\"\"
--> 812     yield self.invoke(input, config, **kwargs)

File /usr/local/lib/python3.11/site-packages/langchain_core/prompts/base.py:179, in BasePromptTemplate.invoke(self, input, config)
177 if self.tags:
178     config[\"tags\"] = config[\"tags\"] + self.tags
--> 179 return self._call_with_config(
180     self._format_prompt_with_error_handling,
181     input,
182     config,
183     run_type=\"prompt\",
184 )

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/base.py:1593, in Runnable._call_with_config(self, func, input, config, run_type, **kwargs)
1589     context = copy_context()
1590     context.run(_set_config_context, child_config)
1591     output = cast(
1592         Output,
-> 1593         context.run(
1594             call_func_with_variable_args,  # type: ignore[arg-type]
1595             func,  # type: ignore[arg-type]
1596             input,  # type: ignore[arg-type]
1597             config,
1598             run_manager,
1599             **kwargs,
1600         ),
1601     )
1602 except BaseException as e:
1603     run_manager.on_chain_error(e)

File /usr/local/lib/python3.11/site-packages/langchain_core/runnables/config.py:380, in call_func_with_variable_args(func, input, config, run_manager, **kwargs)
378 if run_manager is not None and accepts_run_manager(func):
379     kwargs[\"run_manager\"] = run_manager
--> 380 return func(input, **kwargs)

File /usr/local/lib/python3.11/site-packages/langchain_core/prompts/base.py:154, in BasePromptTemplate._format_prompt_with_error_handling(self, inner_input)
152 def _format_prompt_with_error_handling(self, inner_input: Dict) -> PromptValue:
153     _inner_input = self._validate_input(inner_input)
--> 154     return self.format_prompt(**_inner_input)

File /usr/local/lib/python3.11/site-packages/langchain_core/prompts/chat.py:765, in BaseChatPromptTemplate.format_prompt(self, **kwargs)
756 def format_prompt(self, **kwargs: Any) -> PromptValue:
757     \"\"\"Format prompt. Should return a PromptValue.
758 
759     Args:
(...)
763         PromptValue.
764     \"\"\"
--> 765     messages = self.format_messages(**kwargs)
766     return ChatPromptValue(messages=messages)

File /usr/local/lib/python3.11/site-packages/langchain_core/prompts/chat.py:1142, in ChatPromptTemplate.format_messages(self, **kwargs)
1138     result.extend([message_template])
1139 elif isinstance(
1140     message_template, (BaseMessagePromptTemplate, BaseChatPromptTemplate)
1141 ):
-> 1142     message = message_template.format_messages(**kwargs)
1143     result.extend(message)
1144 else:

File /usr/local/lib/python3.11/site-packages/langchain_core/prompts/chat.py:235, in MessagesPlaceholder.format_messages(self, **kwargs)
230 if not isinstance(value, list):
231     raise ValueError(
232         f\"variable {self.variable_name} should be a list of base messages, \"
233         f\"got {value}\"
234     )
--> 235 value = convert_to_messages(value)
236 if self.n_messages:
237     value = value[-self.n_messages :]

File /usr/local/lib/python3.11/site-packages/langchain_core/messages/utils.py:296, in convert_to_messages(messages)
285 def convert_to_messages(
286     messages: Sequence[MessageLikeRepresentation],
287 ) -> List[BaseMessage]:
288     \"\"\"Convert a sequence of messages to a list of messages.
289 
290     Args:
(...)
294         List of messages (BaseMessages).
295     \"\"\"
--> 296     return [_convert_to_message(m) for m in messages]

File /usr/local/lib/python3.11/site-packages/langchain_core/messages/utils.py:296, in <listcomp>(.0)
285 def convert_to_messages(
286     messages: Sequence[MessageLikeRepresentation],
287 ) -> List[BaseMessage]:
288     \"\"\"Convert a sequence of messages to a list of messages.
289 
290     Args:
(...)
294         List of messages (BaseMessages).
295     \"\"\"
--> 296     return [_convert_to_message(m) for m in messages]

File /usr/local/lib/python3.11/site-packages/langchain_core/messages/utils.py:273, in _convert_to_message(message)
271         msg_content = msg_kwargs.pop(\"content\")
272     except KeyError:
--> 273         raise ValueError(
274             f\"Message dict must contain 'role' and 'content' keys, got {message}\"
275         )
276     _message = _create_message_from_message_type(
277         msg_type, msg_content, **msg_kwargs
278     )
279 else:

ValueError: Message dict must contain 'role' and 'content' keys, got {'text': '\
\
The magic_function applied to the input of 3 returns the output of 5.', 'type': 'text', 'index': 0}"
}

...并且不清楚原因。
我的已安装的langchain包:

langchain                               0.2.7
langchain-community                     0.2.7
langchain-core                          0.2.12
langchain-google-vertexai               1.0.6
langchain-groq                          0.1.6
langchain-openai                        0.1.14
langchain-text-splitters                0.2.2
langchainhub                            0.1.20

注意:如果使用Claude-3模型而不是model = ChatOpenAI(model="gpt-4o"),示例代码可以正常工作。

关于内容的想法或请求:

如果能展示如何根据使用的LLM更改内存示例代码,那将会非常有帮助。

qv7cva1a

qv7cva1a1#

根据模型的不同,生成的响应有明显的区别:

gpt-4o

{'input': "Hi, I'm polly! What's the output of magic_function of 3?",
'chat_history': [],
'output': 'Hi Polly! The output of the magic function for the input 3 is 5.'}

Claude-Haiku

{'input': "Hi, I'm polly! What's the output of magic_function of 3?",
'chat_history': [],
'output': [{'text': '\n\nThe output of the magic_function for the input 3 is 5.',
'type': 'text',
'index': 0}]}

相关问题