Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CancelledError: Cancelled by cancel scope 7fd22135b490 #3557

Open
linear bot opened this issue Jan 27, 2025 · 2 comments
Open

CancelledError: Cancelled by cancel scope 7fd22135b490 #3557

linear bot opened this issue Jan 27, 2025 · 2 comments
Assignees
Labels
bug Something isn't working

Comments

@linear
Copy link

linear bot commented Jan 27, 2025

Sentry Issue: PYTHON-FASTAPI-19X

CancelledError: Cancelled by cancel scope 7fd22135b490
  File "/usr/local/lib/python3.11/site-packages/langgraph/utils/runnable.py", line 576, in astream
    async for chunk in aiterator:
  File "langchain_core/tracers/log_stream.py", line 254, in tap_output_aiter
    async for chunk in output:
  File "langchain_core/runnables/base.py", line 1455, in atransform
    async for ichunk in input:
  File "langchain_core/runnables/base.py", line 1455, in atransform
    async for ichunk in input:
  File "langchain_core/runnables/base.py", line 1018, in astream
    yield await self.ainvoke(input, config, **kwargs)
  File "/usr/local/lib/python3.11/site-packages/langgraph/utils/runnable.py", line 236, in ainvoke
    ret = await asyncio.create_task(coro, context=context)
  File "quivr_core/rag/quivr_rag_langgraph.py", line 650, in retrieve
    await asyncio.gather(*(task[0] for task in async_jobs))
  File "langchain_core/retrievers.py", line 310, in ainvoke
    result = await self._aget_relevant_documents(
  File "langchain/retrievers/contextual_compression.py", line 70, in _aget_relevant_documents
    docs = await self.base_retriever.ainvoke(
  File "langchain_core/retrievers.py", line 310, in ainvoke
    result = await self._aget_relevant_documents(
  File "langchain_core/vectorstores/base.py", line 1101, in _aget_relevant_documents
    docs = await self.vectorstore.asimilarity_search(
  File "quivr_api/vectorstore/supabase.py", line 80, in asimilarity_search
    match_result = await self.vector_service.similarity_search(
  File "quivr_api/modules/vector/service/vector_service.py", line 55, in similarity_search
    vectors = await self.repository.similarity_search(
  File "quivr_api/modules/vector/repository/vectors_repository.py", line 105, in similarity_search
    result = await self.session.execute(sql_query, params=params)
  File "sqlmodel/ext/asyncio/session.py", line 143, in execute
    return await super().execute(
  File "sqlalchemy/ext/asyncio/session.py", line 461, in execute
    result = await greenlet_spawn(
  File "sqlalchemy/util/_concurrency_py3k.py", line 201, in greenlet_spawn
    result = context.throw(*sys.exc_info())
  File "sqlmodel/orm/session.py", line 127, in execute
    return super().execute(
  File "sqlalchemy/orm/session.py", line 2362, in execute
    return self._execute_internal(
  File "sqlalchemy/orm/session.py", line 2256, in _execute_internal
    result = conn.execute(
  File "sqlalchemy/engine/base.py", line 1418, in execute
    return meth(
  File "sqlalchemy/sql/elements.py", line 515, in _execute_on_connection
    return connection._execute_clauseelement(
  File "sqlalchemy/engine/base.py", line 1640, in _execute_clauseelement
    ret = self._execute_context(
  File "sqlalchemy/engine/base.py", line 1846, in _execute_context
    return self._exec_single_context(
  File "sqlalchemy/engine/base.py", line 1986, in _exec_single_context
    self._handle_dbapi_exception(
  File "sqlalchemy/engine/base.py", line 2358, in _handle_dbapi_exception
    raise exc_info[1].with_traceback(exc_info[2])
  File "sqlalchemy/engine/base.py", line 1967, in _exec_single_context
    self.dialect.do_execute(
  File "sqlalchemy/engine/default.py", line 941, in do_execute
    cursor.execute(statement, parameters)
  File "sqlalchemy/dialects/postgresql/asyncpg.py", line 568, in execute
    self._adapt_connection.await_(
  File "sqlalchemy/util/_concurrency_py3k.py", line 132, in await_only
    return current.parent.switch(awaitable)  # type: ignore[no-any-return,attr-defined] # noqa: E501
  File "sqlalchemy/util/_concurrency_py3k.py", line 196, in greenlet_spawn
    value = await result
  File "sqlalchemy/dialects/postgresql/asyncpg.py", line 510, in _prepare_and_execute
    prepared_stmt, attributes = await adapt_connection._prepare(
  File "sqlalchemy/dialects/postgresql/asyncpg.py", line 756, in _prepare
    prepared_stmt = await self._connection.prepare(
  File "asyncpg/connection.py", line 635, in prepare
    return await self._prepare(
  File "asyncpg/connection.py", line 653, in _prepare
    stmt = await self._get_statement(
  File "asyncpg/connection.py", line 432, in _get_statement
    statement = await self._protocol.prepare(
  File "asyncpg/protocol/protocol.pyx", line 165, in prepare
27T13:18:43.008107Z [error    ] [quivr_api.modules.rag_service.rag_service] msg=Exception during stream : string indices must be integers, not 'str'
╭─────────────────────────────── Traceback (most recent call last) ────────────────────────────────╮
│ /app/api/quivr_api/modules/rag_service/rag_service.py:321 in generate_answer_stream              │
│                                                                                                  │
│   318 │   │   │   │   │   brain_name=self.model_to_use,                                          │
│   319 │   │   │   │   )                                                                          │
│   320 │   │   │                                                                                  │
│ ❱ 321 │   │   │   async for response in brain_core.ask_streaming(                                │
│   322 │   │   │   │   question=question,                                                         │
│   323 │   │   │   │   retrieval_config=retrieval_config,                                         │
│   324 │   │   │   │   rag_pipeline=QuivrQARAGLangGraph,                                          │
│                                                                                                  │
│ /app/core/core/quivr_core/brain/brain.py:545 in ask_streaming                                    │
│                                                                                                  │
│   542 │   │   │   "langfuse_user_id": str(self.user_id),                                         │
│   543 │   │   │   "langfuse_session_id": str(self.chat_id),                                      │
│   544 │   │   }                                                                                  │
│ ❱ 545 │   │   async for response in rag_instance.answer_astream(                                 │
│   546 │   │   │   question=question,                                                             │
│   547 │   │   │   history=chat_history,                                                          │
│   548 │   │   │   list_files=list_files,                                                         │
│                                                                                                  │
│ /app/core/core/quivr_core/rag/quivr_rag_langgraph.py:967 in answer_astream                       │
│                                                                                                  │
│    964 │   │   │                                                                                 │
│    965 │   │   │   if self._is_final_node_and_chat_model_stream(event):                          │
│    966 │   │   │   │   chunk = event["data"]["chunk"]                                            │
│ ❱  967 │   │   │   │   rolling_message, new_content, previous_content = parse_chunk_response(    │
│    968 │   │   │   │   │   rolling_message,                                                      │
│    969 │   │   │   │   │   chunk,                                                                │
│    970 │   │   │   │   │   self.llm_endpoint.supports_func_calling(),                            │
│                                                                                                  │
│ /app/core/core/quivr_core/rag/utils.py:98 in parse_chunk_response                                │
│                                                                                                  │
│    95 │   Returns:                                                                               │
│    96 │   │   Tuple of (updated rolling message, new content only, full content)                 │
│    97 │   """                                                                                    │
│ ❱  98 │   rolling_msg += raw_chunk                                                               │
│    99 │                                                                                          │
│   100 │   tool_calls = rolling_msg.tool_calls                                                    │
│   101                                                                                            │
│                                                                                                  │
│ /usr/local/lib/python3.11/site-packages/langchain_core/messages/ai.py:395 in __add__             │
│                                                                                                  │
│   392 │                                                                                          │
│   393 │   def __add__(self, other: Any) -> BaseMessageChunk:  # type: ignore                     │
│   394 │   │   if isinstance(other, AIMessageChunk):                                              │
│ ❱ 395 │   │   │   return add_ai_message_chunks(self, other)                                      │
│   396 │   │   elif isinstance(other, (list, tuple)) and all(                                     │
│   397 │   │   │   isinstance(o, AIMessageChunk) for o in other                                   │
│   398 │   │   ):                                                                                 │
│                                                                                                  │
│ /usr/local/lib/python3.11/site-packages/langchain_core/messages/ai.py:411 in                     │
│ add_ai_message_chunks                                                                            │
│                                                                                                  │
│   408 │   │   msg = "Cannot concatenate AIMessageChunks with different example values."          │
│   409 │   │   raise ValueError(msg)                                                              │
│   410 │                                                                                          │
│ ❱ 411 │   content = merge_content(left.content, *(o.content for o in others))                    │
│   412 │   additional_kwargs = merge_dicts(                                                       │
│   413 │   │   left.additional_kwargs, *(o.additional_kwargs for o in others)                     │
│   414 │   )                                                                                      │
│                                                                                                  │
│ /usr/local/lib/python3.11/site-packages/langchain_core/messages/base.py:147 in merge_content     │
│                                                                                                  │
│   144 │   │   │   │   merged = [merged] + content  # type: ignore                                │
│   145 │   │   elif isinstance(content, list):                                                    │
│   146 │   │   │   # If both are lists                                                            │
│ ❱ 147 │   │   │   merged = merge_lists(cast(list, merged), content)  # type: ignore              │
│   148 │   │   # If the first content is a list, and the second content is a string               │
│   149 │   │   else:                                                                              │
│   150 │   │   │   # If the last element of the first content is a string                         │
│                                                                                                  │
│ /usr/local/lib/python3.11/site-packages/langchain_core/utils/_merge.py:91 in merge_lists         │
│                                                                                                  │
│    88 │   │   else:                                                                              │
│    89 │   │   │   for e in other:                                                                │
│    90 │   │   │   │   if isinstance(e, dict) and "index" in e and isinstance(e["index"], int):   │
│ ❱  91 │   │   │   │   │   to_merge = [                                                           │
│    92 │   │   │   │   │   │   i                                                                  │
│    93 │   │   │   │   │   │   for i, e_left in enumerate(merged)                                 │
│    94 │   │   │   │   │   │   if e_left["index"] == e["index"]                                   │
│                                                                                                  │
│ /usr/local/lib/python3.11/site-packages/langchain_core/utils/_merge.py:94 in <listcomp>          │
│                                                                                                  │
│    91 │   │   │   │   │   to_merge = [                                                           │
│    92 │   │   │   │   │   │   i                                                                  │
│    93 │   │   │   │   │   │   for i, e_left in enumerate(merged)                                 │
│ ❱  94 │   │   │   │   │   │   if e_left["index"] == e["index"]                                   │
│    95 │   │   │   │   │   ]                                                                      │
│    96 │   │   │   │   │   if to_merge:                                                           │
│    97 │   │   │   │   │   │   # TODO: Remove this once merge_dict is updated with special        │
╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
TypeError: string indices must be integers, not 'str'
Copy link
Author

linear bot commented Jan 27, 2025

Copy link
Collaborator

@dosubot dosubot bot added the bug Something isn't working label Jan 27, 2025
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
bug Something isn't working
Projects
None yet
Development

No branches or pull requests

2 participants