Bug Description
LocalProtocolError: Illegal header value b'Bearer '
The above exception was the direct cause of the following exception:
APIConnectionError Traceback (most recent call last)
Cell In[21, line 1] ---> 1 agent.chat("What's 212 multiplied by 122? Make sure to use Tools")
File c:\Users\Admin\AppData\Local\Programs\Python\Python311\Lib\site-packages\llama_index\callbacks\utils.py:41, in trace_method..decorator..wrapper(self, *args, **kwargs)
39 callback_manager = cast(CallbackManager, callback_manager)
40 with callback_manager.as_trace(trace_id):
41 ---> 42 return func(self, *args, **kwargs)
File c:\Users\Admin\AppData\Local\Programs\Python\Python311\Lib\site-packages\llama_index\agent\legacy\openai_agent.py:433, in BaseOpenAIAgent.chat(self, message, chat_history, tool_choice)
422 @trace_method("chat")
423 def chat(
424 self,
(...)
427 tool_choice: Union[str, dict] = "auto",
428 ) -> AgentChatResponse:
429 with self.callback_manager.event(
430 CBEventType.AGENT_STEP,
431 payload={EventPayload.MESSAGES: [message]},
432 ) as e:
--> 433 chat_response = self._chat(
434 message, chat_history, tool_choice, mode=ChatResponseMode.WAIT
435 )
436 assert isinstance(chat_response, AgentChatResponse)
437 e.on_end(payload={EventPayload.RESPONSE: chat_response})
File c:\Users\Admin\AppData\Local\Programs\Python\Python311\Lib\site-packages\llama_index\agent\legacy\openai_agent.py:355, in BaseOpenAIAgent._chat(self, message, chat_history, tool_choice, mode)
351 print(f"STARTING TURN {ix}\n---------------\n")
352 llm_chat_kwargs = self._get_llm_chat_kwargs(openai_tools, current_tool_choice
353 )
--> 355 agent_chat_response = self._get_agent_response(mode=mode, **llm_chat_kwargs)
356 if not self._should_continue(self.latest_tool_calls, n_function_calls):
357 logger.debug("Break: should continue False")
File c:\Users\Admin\AppData\Local\Programs\Python\Python311\Lib\site-packages\llama_index\agent\legacy\openai_agent.py:317, in BaseOpenAIAgent._get_agent_response(self, mode, **llm_chat_kwargs)
313 def _get_agent_response(self, mode: ChatResponseMode, **llm_chat_kwargs: Any
315 if mode == ChatResponseMode.WAIT:
--> 316 chat_response: ChatResponse = self._llm.chat(**llm_chat_kwargs)
317 return self._process_message(chat_response)
File c:\Users\Admin\AppData\Local\Programs\Python\Python311\Lib\site-packages\llama_index\llms\base.py:100, in llm_chat_callback..wrap..wrapped_llm_chat(self, messages, **kwargs)
91 with wrapper_logic(self) as callback_manager:
92 event_id = callback_manager.on_event_start(
93 CBEventType.LLM,
94 payload={
(...)
---> 98 },
99 ),
File c:\Users\Admin\AppData\Local\Programs\Python\Python311\Lib\site-packages\llama_index\llms\openai.py:237, in OpenAI.chat(self, messages, **kwargs)
else:
---> 238 chat_fn = completion_to_chat
代码翻译:
681 "temperature": temperature,
682 "tool_choice": tool_choice,
683 "tools": tools,
684 "top_logprobs": top_logprobs,
685 "top_p": top_p,
686 "user": user,
687 },
688 completion_create_params.CompletionCreateParams,
689 ),
690 options=make_request_options(
691 extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
692 ),
693 cast_to=ChatCompletion,
694 stream=stream or False,
695 stream_cls=Stream[ChatCompletionChunk],
696 )
948 response_headers=None,
949 )
951 log.debug("Raising connection error")
--> 952 raise APIConnectionError(request=request) from err
954 log.debug(
955 'HTTP Request: %s %s "%i %s"', request.method, request.url, response.status_code, response.reason_phrase
956 )
958 try:
3条答案
按热度按时间vnjpjtjt1#
这个错误通常意味着你没有设置API密钥。
尝试将LLM作为kwarg传递给fb检索器代理,否则它可能会默认为openai。
bpzcxfmw2#
vohkndzv3#
我遇到了相同的错误
不过,这个错误出现在LLM模型的
.complete()
方法上,这是一个OpenAIMultiModel,其中image_documnets
传递了一个ImageDocument
对象的列表。这有关系吗?