Bug描述
我尝试使用OpenAILike
运行示例https://docs.llamaindex.ai/en/stable/examples/query_engine/sec_tables/tesla_10q_table/,但是无法运行同步版本的代码,会出现错误。
RuntimeError: no running event loop" 和 "AttributeError: 'str' object has no attribute 'choices'"。
所以我在代码中使用了asyncio,但是又出现了错误。
RuntimeWarning: coroutine 'BaseElementNodeParser.extract_table_summaries.._get_table_output' was never awaited
版本
0.10.37
重现步骤
使用asyncio
import asyncio
import logging
import os
import pickle
import sys
from pathlib import Path
import pandas as pd
from llama_index.core import Settings
from llama_index.llms.openai_like import OpenAILike
from llama_index.readers.file import FlatReader
from pydantic import BaseModel
from unstructured.partition.html import partition_html
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
Settings.llm = OpenAILike(
model="Yi-34B-Chat",
is_chat_model=True,
is_function_calling_model=True,
api_base="xxxxxxxxxxxxxxxxxxxxxxx",
api_key="xxxxxxxxxxxxxxxxxxxxxxx",
temperature=0.2,
context_window=4000,
)
async def main():
reader = FlatReader()
docs_2021 = reader.load_data(Path(r"examples\tesla_2021_10k.htm"))
docs_2020 = reader.load_data(Path(r"examples\tesla_2020_10k.htm"))
from llama_index.core.node_parser import UnstructuredElementNodeParser
node_parser = UnstructuredElementNodeParser()
if not os.path.exists("2020_nodes.pkl"):
raw_nodes_2020 = node_parser.get_nodes_from_documents(docs_2020)
pickle.dump(raw_nodes_2020, open("2020_nodes.pkl", "wb"))
else:
raw_nodes_2020 = pickle.load(open("2020_nodes.pkl", "rb"))
asyncio.run(main())
不使用async的代码
import logging
import os
import pickle
import sys
from pathlib import Path
import pandas as pd
from llama_index.core import Settings
from llama_index.llms.openai_like import OpenAILike
from llama_index.readers.file import FlatReader
from pydantic import BaseModel
from unstructured.partition.html import partition_html
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
Settings.llm = OpenAILike(
model="Yi-34B-Chat",
is_chat_model=True,
is_function_calling_model=True,
api_base="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
api_key="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
temperature=0.2,
context_window=4000,
)
reader = FlatReader()
docs_2021 = reader.load_data(Path(r"examples\tesla_2021_10k.htm"))
docs_2020 = reader.load_data(Path(r"examples\tesla_2020_10k.htm"))
from llama_index.core.node_parser import UnstructuredElementNodeParser
node_parser = UnstructuredElementNodeParser()
if not os.path.exists("2020_nodes.pkl"):
raw_nodes_2020 = node_parser.get_nodes_from_documents(docs_2020)
pickle.dump(raw_nodes_2020, open("2020_nodes.pkl", "wb"))
else:
raw_nodes_2020 = pickle.load(open("2020_nodes.pkl", "rb"))
相关日志/回溯
如果在代码中使用async:
Traceback (most recent call last):
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 31, in asyncio_run
raise RuntimeError(
RuntimeError: Nested async detected. Use async functions where possible (`aquery`, `aretrieve`, `arun`, etc.). Otherwise, use `import nest_asyncio; nest_asyncio.apply()` to enable nested async or use in a jupyter notebook.
If you are experiencing while using async functions and not in a notebook, please raise an issue on github, as it indicates a bad design pattern.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\4-Working-Project\LLM-RAG-Knowloadege-Base\examples\parse_html_table_unstructured.py", line 47, in <module>
asyncio.run(main())
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\runners.py", line 44, in run
return loop.run_until_complete(main)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\base_events.py", line 649, in run_until_complete
return future.result()
File "D:\4-Working-Project\LLM-RAG-Knowloadege-Base\examples\parse_html_table_unstructured.py", line 41, in main
raw_nodes_2020 = node_parser.get_nodes_from_documents(docs_2020)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\interface.py", line 129, in get_nodes_from_documents
nodes = self._parse_nodes(documents, show_progress=show_progress, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\base_element.py", line 120, in _parse_nodes
nodes = self.get_nodes_from_node(node)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\unstructured_element.py", line 67, in get_nodes_from_node
self.extract_table_summaries(table_elements)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\base_element.py", line 208, in extract_table_summaries
summary_outputs = asyncio_run(summary_co)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 42, in asyncio_run
return asyncio.run(coro)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\runners.py", line 33, in run
raise RuntimeError(
RuntimeError: asyncio.run() cannot be called from a running event loop
sys:1: RuntimeWarning: coroutine 'Dispatcher.span.<locals>.async_wrapper' was never awaited
sys:1: RuntimeWarning: coroutine 'BaseElementNodeParser.extract_table_summaries.<locals>._get_table_output' was never awaited
如果不在代码中使用async:
Traceback (most recent call last):
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 29, in asyncio_run
loop = asyncio.get_running_loop()
RuntimeError: no running event loop
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\4-Working-Project\LLM-RAG-Knowloadege-Base\examples\parse_html_table_unstructured.py", line 41, in <module>
raw_nodes_2020 = node_parser.get_nodes_from_documents(docs_2020)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\interface.py", line 129, in get_nodes_from_documents
nodes = self._parse_nodes(documents, show_progress=show_progress, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\base_element.py", line 120, in _parse_nodes
nodes = self.get_nodes_from_node(node)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\unstructured_element.py", line 67, in get_nodes_from_node
self.extract_table_summaries(table_elements)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\base_element.py", line 208, in extract_table_summaries
summary_outputs = asyncio_run(summary_co)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 42, in asyncio_run
return asyncio.run(coro)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\runners.py", line 44, in run
return loop.run_until_complete(main)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\base_events.py", line 649, in run_until_complete
return future.result()
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 144, in run_jobs
results = await tqdm_asyncio.gather(*pool_jobs, desc=desc)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tqdm\asyncio.py", line 79, in gather
res = [await f for f in cls.as_completed(ifs, loop=loop, timeout=timeout,
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tqdm\asyncio.py", line 79, in <listcomp>
res = [await f for f in cls.as_completed(ifs, loop=loop, timeout=timeout,
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\asyncio\tasks.py", line 571, in _wait_for_one
return f.result() # May raise f.exception().
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tqdm\asyncio.py", line 76, in wrap_awaitable
return i, await f
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 137, in worker
return await job
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\base_element.py", line 192, in _get_table_output
response = await query_engine.aquery(summary_query_str)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\base\base_query_engine.py", line 63, in aquery
query_result = await self._aquery(str_or_query_bundle)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\query_engine\retriever_query_engine.py", line 206, in _aquery
response = await self._response_synthesizer.asynthesize(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\base.py", line 305, in asynthesize
response_str = await self.aget_response(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\compact_and_refine.py", line 23, in aget_response
return await super().aget_response(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\refine.py", line 363, in aget_response
response = await self._agive_response_single(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\refine.py", line 482, in _agive_response_single
structured_response = await program.acall(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\refine.py", line 92, in acall
answer = await self._llm.astructured_predict(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\llms\llm.py", line 395, in astructured_predict
result = await program.acall(**prompt_args)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\program\openai\base.py", line 209, in acall
chat_response = await self._llm.achat(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\llms\openai_like\base.py", line 147, in achat
return await super().achat(messages, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\llms\callbacks.py", line 75, in wrapped_async_llm_chat
f_return_val = await f(_self, messages, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\llms\openai\base.py", line 598, in achat
return await achat_fn(messages, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 142, in async_wrapped
return await fn(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 58, in __call__
do = await self.iter(retry_state=retry_state)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 110, in iter
result = await action(retry_state)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 78, in inner
return fn(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\__init__.py", line 390, in <lambda>
self._add_action_func(lambda rs: rs.outcome.result())
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\concurrent\futures\_base.py", line 451, in result
return self.__get_result()
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\concurrent\futures\_base.py", line 403, in __get_result
raise self._exception
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 61, in __call__
result = await fn(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\llms\openai\base.py", line 656, in _achat
openai_message = response.choices[0].message
AttributeError: 'str' object has no attribute 'choices'
sys:1: RuntimeWarning: coroutine 'BaseElementNodeParser.extract_table_summaries.<locals>._get_table_output' was never awaited
0%| | 0/91 [00:01<?, ?it/s]
ERROR:asyncio:Task exception was never retrieved
future: <Task finished name='Task-3' coro=<tqdm_asyncio.gather.<locals>.wrap_awaitable() done, defined at C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tqdm\asyncio.py:75> exception=AttributeError("'str' object has no attribute 'choices'")>
Traceback (most recent call last):
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 29, in asyncio_run
loop = asyncio.get_running_loop()
RuntimeError: no running event loop
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tqdm\asyncio.py", line 76, in wrap_awaitable
return i, await f
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 137, in worker
return await job
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\base_element.py", line 192, in _get_table_output
response = await query_engine.aquery(summary_query_str)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\base\base_query_engine.py", line 63, in aquery
query_result = await self._aquery(str_or_query_bundle)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\query_engine\retriever_query_engine.py", line 206, in _aquery
response = await self._response_synthesizer.asynthesize(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\base.py", line 305, in asynthesize
response_str = await self.aget_response(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\compact_and_refine.py", line 23, in aget_response
return await super().aget_response(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\refine.py", line 363, in aget_response
response = await self._agive_response_single(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\refine.py", line 482, in _agive_response_single
structured_response = await program.acall(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\refine.py", line 92, in acall
answer = await self._llm.astructured_predict(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\llms\llm.py", line 395, in astructured_predict
result = await program.acall(**prompt_args)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\program\openai\base.py", line 209, in acall
chat_response = await self._llm.achat(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\llms\openai_like\base.py", line 147, in achat
return await super().achat(messages, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\llms\callbacks.py", line 75, in wrapped_async_llm_chat
f_return_val = await f(_self, messages, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\llms\openai\base.py", line 598, in achat
return await achat_fn(messages, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 142, in async_wrapped
return await fn(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 58, in __call__
do = await self.iter(retry_state=retry_state)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 110, in iter
result = await action(retry_state)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 78, in inner
return fn(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\__init__.py", line 390, in <lambda>
self._add_action_func(lambda rs: rs.outcome.result())
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\concurrent\futures\_base.py", line 451, in result
return self.__get_result()
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\concurrent\futures\_base.py", line 403, in __get_result
raise self._exception
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 61, in __call__
result = await fn(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\llms\openai\base.py", line 656, in _achat
openai_message = response.choices[0].message
AttributeError: 'str' object has no attribute 'choices'
Task exception was never retrieved
future: <Task finished name='Task-3' coro=<tqdm_asyncio.gather.<locals>.wrap_awaitable() done, defined at C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tqdm\asyncio.py:75> exception=AttributeError("'str' object has no attribute 'choices'")>
Traceback (most recent call last):
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 29, in asyncio_run
loop = asyncio.get_running_loop()
RuntimeError: no running event loop
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tqdm\asyncio.py", line 76, in wrap_awaitable
return i, await f
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\async_utils.py", line 137, in worker
return await job
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\node_parser\relational\base_element.py", line 192, in _get_table_output
response = await query_engine.aquery(summary_query_str)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\base\base_query_engine.py", line 63, in aquery
query_result = await self._aquery(str_or_query_bundle)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\query_engine\retriever_query_engine.py", line 206, in _aquery
response = await self._response_synthesizer.asynthesize(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\base.py", line 305, in asynthesize
response_str = await self.aget_response(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\compact_and_refine.py", line 23, in aget_response
return await super().aget_response(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\refine.py", line 363, in aget_response
response = await self._agive_response_single(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\refine.py", line 482, in _agive_response_single
structured_response = await program.acall(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\response_synthesizers\refine.py", line 92, in acall
answer = await self._llm.astructured_predict(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\llms\llm.py", line 395, in astructured_predict
result = await program.acall(**prompt_args)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\program\openai\base.py", line 209, in acall
chat_response = await self._llm.achat(
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\llms\openai_like\base.py", line 147, in achat
return await super().achat(messages, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\instrumentation\dispatcher.py", line 248, in async_wrapper
result = await func(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\core\llms\callbacks.py", line 75, in wrapped_async_llm_chat
f_return_val = await f(_self, messages, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\llms\openai\base.py", line 598, in achat
return await achat_fn(messages, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 142, in async_wrapped
return await fn(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 58, in __call__
do = await self.iter(retry_state=retry_state)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 110, in iter
result = await action(retry_state)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 78, in inner
return fn(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\__init__.py", line 390, in <lambda>
self._add_action_func(lambda rs: rs.outcome.result())
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\concurrent\futures\_base.py", line 451, in result
return self.__get_result()
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\concurrent\futures\_base.py", line 403, in __get_result
raise self._exception
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\tenacity\_asyncio.py", line 61, in __call__
result = await fn(*args, **kwargs)
File "C:\Users\yong\miniconda3\envs\llm-rga-kb\lib\site-packages\llama_index\llms\openai\base.py", line 656, in _achat
openai_message = response.choices[0].message
AttributeError: 'str' object has no attribute 'choices'
2条答案
按热度按时间zazmityj1#
@shizidushu as the first error stated, I'm pretty sure you just need to enable nested async
2admgd592#
在添加了您提到的代码并运行了异步版本的代码后,这里是跟踪记录: