-
Notifications
You must be signed in to change notification settings - Fork 2k
Description
Description
Session LLM ollama
Reasoning class <class 'ktem.reasoning.simple.FullDecomposeQAPipeline'>
Reasoning state {'app': {'regen': False}, 'pipeline': {}}
Thinking ...
Chosen rewrite pipeline DecomposeQuestionPipeline(
(llm): ChatOpenAI(api_key=ollama, base_url=http://localhos..., frequency_penalty=None, logit_bias=None, logprobs=None, max_retries=None, max_retries_=2, max_tokens=None, model=deepseek-r1:1.5b, n=1, organization=None, presence_penalty=None, stop=None, temperature=None, timeout=None, tool_choice=None, tools=None, top_logprobs=None, top_p=None)
)
Traceback (most recent call last):
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\queueing.py", line 575, in process_events
response = await route_utils.call_process_api(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\route_utils.py", line 276, in call_process_api
output = await app.get_blocks().process_api(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\blocks.py", line 1923, in process_api
result = await self.call_function(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\blocks.py", line 1520, in call_function
prediction = await utils.async_iteration(iterator)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\utils.py", line 663, in async_iteration
return await iterator.anext()
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\utils.py", line 656, in anext
return await anyio.to_thread.run_sync(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\anyio\to_thread.py", line 56, in run_sync
return await get_async_backend().run_sync_in_worker_thread(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\anyio_backends_asyncio.py", line 2485, in run_sync_in_worker_thread
return await future
^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\anyio_backends_asyncio.py", line 976, in run
result = context.run(func, *args)
^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\utils.py", line 639, in run_sync_iterator_async
return next(iterator)
^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\utils.py", line 801, in gen_wrapper
response = next(iterator)
^^^^^^^^^^^^^^
File "D:\work\kotaemon\libs\ktem\ktem\pages\chat_init_.py", line 1321, in chat_fn
for response in pipeline.stream(chat_input, conversation_id, chat_history):
File "D:\work\kotaemon\libs\ktem\ktem\reasoning\simple.py", line 527, in stream
result = self.rewrite_pipeline(question=message)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1097, in call
raise e from None
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1088, in call
output = self.fl.exec(func, args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\backends\base.py", line 151, in exec
return run(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\middleware.py", line 144, in call
raise e from None
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\middleware.py", line 141, in call
_output = self.next_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\middleware.py", line 117, in call
return self.next_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1017, in _runx
return self.run(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\work\kotaemon\libs\ktem\ktem\reasoning\prompt_optimization\decompose_question.py", line 67, in run
result = self.llm(messages, **llm_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1261, in exec
return child(*args, **kwargs, fl_runstates=fl_runstates)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1097, in call
raise e from None
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1088, in call
output = self.fl.exec(func, args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\backends\base.py", line 151, in exec
return run(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\middleware.py", line 144, in call
raise e from None
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\middleware.py", line 141, in call
_output = self.next_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\middleware.py", line 117, in call
return self.next_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1017, in _runx
return self.run(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\work\kotaemon\libs\kotaemon\kotaemon\llms\base.py", line 25, in run
return self.invoke(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\work\kotaemon\libs\kotaemon\kotaemon\llms\chats\openai.py", line 216, in invoke
resp = self.openai_response(
^^^^^^^^^^^^^^^^^^^^^
File "D:\work\kotaemon\libs\kotaemon\kotaemon\llms\chats\openai.py", line 334, in openai_response
return client.chat.completions.create(**params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\openai_utils_utils.py", line 286, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\openai\resources\chat\completions\completions.py", line 1147, in create
return self._post(
^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\openai_base_client.py", line 1259, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\openai_base_client.py", line 1047, in request
raise self._make_status_error_from_response(err.response) from None
openai.BadRequestError: Error code: 400 - {'error': {'message': 'registry.ollama.ai/library/deepseek-r1:1.5b does not support tools', 'type': 'api_error', 'param': None, 'code': None}}
User-id: None, can see public conversations: False
User-id: d45b91283e094de693df0f42ee0ea0e6, can see public conversations: True
D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\components\dropdown.py:188: UserWarning:
The value passed into gr.Dropdown() is not in the list of choices. Please update the list of choices to include: 3ae5a6e0-3e0f-4e5a-a2a6-4700809afdb5 or set allow_custom_value=True.
D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\components\dropdown.py:188: UserWarning:
The value passed into gr.Dropdown() is not in the list of choices. Please update the list of choices to include: 1470d086-da9f-4568-938d-b97b391f541b or set allow_custom_value=True.
D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\components\dropdown.py:188: UserWarning:
The value passed into gr.Dropdown() is not in the list of choices. Please update the list of choices to include: c2daaeea-e0a7-403f-9c34-4b4c7aee7394 or set allow_custom_value=True.
D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\components\dropdown.py:188: UserWarning:
The value passed into gr.Dropdown() is not in the list of choices. Please update the list of choices to include: 2c6f7f72-dcc3-4a05-8d25-9aad6416ef61 or set allow_custom_value=True.
User-id: d45b91283e094de693df0f42ee0ea0e6, can see public conversations: True
Session reasoning type complex use mindmap True use citation highlight language zh
Session LLM ollama
Reasoning class <class 'ktem.reasoning.simple.FullDecomposeQAPipeline'>
Reasoning state {'app': {'regen': False}, 'pipeline': {}}
Thinking ...
Chosen rewrite pipeline DecomposeQuestionPipeline(
(llm): ChatOpenAI(api_key=ollama, base_url=http://localhos..., frequency_penalty=None, logit_bias=None, logprobs=None, max_retries=None, max_retries_=2, max_tokens=None, model=deepseek-r1:7b, n=1, organization=None, presence_penalty=None, stop=None, temperature=None, timeout=None, tool_choice=None, tools=None, top_logprobs=None, top_p=None)
)
Traceback (most recent call last):
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\queueing.py", line 575, in process_events
response = await route_utils.call_process_api(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\route_utils.py", line 276, in call_process_api
output = await app.get_blocks().process_api(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\blocks.py", line 1923, in process_api
result = await self.call_function(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\blocks.py", line 1520, in call_function
prediction = await utils.async_iteration(iterator)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\utils.py", line 663, in async_iteration
return await iterator.anext()
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\utils.py", line 656, in anext
return await anyio.to_thread.run_sync(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\anyio\to_thread.py", line 56, in run_sync
return await get_async_backend().run_sync_in_worker_thread(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\anyio_backends_asyncio.py", line 2485, in run_sync_in_worker_thread
return await future
^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\anyio_backends_asyncio.py", line 976, in run
result = context.run(func, *args)
^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\utils.py", line 639, in run_sync_iterator_async
return next(iterator)
^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\gradio\utils.py", line 801, in gen_wrapper
response = next(iterator)
^^^^^^^^^^^^^^
File "D:\work\kotaemon\libs\ktem\ktem\pages\chat_init_.py", line 1321, in chat_fn
for response in pipeline.stream(chat_input, conversation_id, chat_history):
File "D:\work\kotaemon\libs\ktem\ktem\reasoning\simple.py", line 527, in stream
result = self.rewrite_pipeline(question=message)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1097, in call
raise e from None
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1088, in call
output = self.fl.exec(func, args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\backends\base.py", line 151, in exec
return run(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\middleware.py", line 144, in call
raise e from None
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\middleware.py", line 141, in call
_output = self.next_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\middleware.py", line 117, in call
return self.next_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1017, in _runx
return self.run(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\work\kotaemon\libs\ktem\ktem\reasoning\prompt_optimization\decompose_question.py", line 67, in run
result = self.llm(messages, **llm_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1261, in exec
return child(*args, **kwargs, fl_runstates=fl_runstates)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1097, in call
raise e from None
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1088, in call
output = self.fl.exec(func, args, kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\backends\base.py", line 151, in exec
return run(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\middleware.py", line 144, in call
raise e from None
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\middleware.py", line 141, in call
_output = self.next_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\middleware.py", line 117, in call
return self.next_call(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\theflow\base.py", line 1017, in _runx
return self.run(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\work\kotaemon\libs\kotaemon\kotaemon\llms\base.py", line 25, in run
return self.invoke(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\work\kotaemon\libs\kotaemon\kotaemon\llms\chats\openai.py", line 216, in invoke
resp = self.openai_response(
^^^^^^^^^^^^^^^^^^^^^
File "D:\work\kotaemon\libs\kotaemon\kotaemon\llms\chats\openai.py", line 334, in openai_response
return client.chat.completions.create(**params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\openai_utils_utils.py", line 286, in wrapper
return func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\openai\resources\chat\completions\completions.py", line 1147, in create
return self._post(
^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\openai_base_client.py", line 1259, in post
return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\ProgramData\Anaconda3\envs\kotaemon_py311\Lib\site-packages\openai_base_client.py", line 1047, in request
raise self._make_status_error_from_response(err.response) from None
openai.BadRequestError: Error code: 400 - {'error': {'message': 'registry.ollama.ai/library/deepseek-r1:7b does not support tools', 'type': 'api_error', 'param': None, 'code': None}}
Hi all , there's a trouble i encountered. It can execute completely when I use OPENAI or DEEPSEEK api,. But when i use local ollama model such as deepseek r1:14b, it will show "openai.BadRequestError: Error code: 400 - {'error': {'message': 'registry.ollama.ai/library/deepseek-r1:7b does not support tools', 'type': 'api_error', 'param': None, 'code': None}}" . I don't know the reason why it comes. Can anyone help me? Thanks so much!
Reproduction steps
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See errorScreenshots
Logs
Browsers
No response
OS
No response
Additional information
No response