Skip to content

Instantly share code, notes, and snippets.

@nagyv
Created August 2, 2025 05:02
Show Gist options
  • Save nagyv/77e2670cb4e341529d9f25d922deed35 to your computer and use it in GitHub Desktop.
Save nagyv/77e2670cb4e341529d9f25d922deed35 to your computer and use it in GitHub Desktop.
Claude -> LiteLLM -> Qwen3-Coder in Ollama error
```
06:58:54 - LiteLLM Proxy:ERROR: endpoints.py:191 - litellm.proxy.proxy_server.anthropic_response(): Exception occured - Error calling litellm.acompletion for non-Anthropic model: litellm.APIConnectionError: 'str' object has no attribute 'get'
Traceback (most recent call last):
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/main.py", line 3084, in completion
response = base_llm_http_handler.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/llms/custom_httpx/llm_http_handler.py", line 329, in completion
data = provider_config.transform_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/llms/ollama/completion/transformation.py", line 342, in transform_request
modified_prompt = ollama_pt(model=model, messages=messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/prompt_templates/factory.py", line 222, in ollama_pt
system_content_str, msg_i = _handle_ollama_system_message(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/prompt_templates/factory.py", line 180, in _handle_ollama_system_message
msg_content = convert_content_list_to_str(messages[msg_i])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/prompt_templates/common_utils.py", line 132, in convert_content_list_to_str
text_content = c.get("text")
^^^^^
AttributeError: 'str' object has no attribute 'get'
Traceback (most recent call last):
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/main.py", line 3084, in completion
response = base_llm_http_handler.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/llms/custom_httpx/llm_http_handler.py", line 329, in completion
data = provider_config.transform_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/llms/ollama/completion/transformation.py", line 342, in transform_request
modified_prompt = ollama_pt(model=model, messages=messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/prompt_templates/factory.py", line 222, in ollama_pt
system_content_str, msg_i = _handle_ollama_system_message(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/prompt_templates/factory.py", line 180, in _handle_ollama_system_message
msg_content = convert_content_list_to_str(messages[msg_i])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/prompt_templates/common_utils.py", line 132, in convert_content_list_to_str
text_content = c.get("text")
^^^^^
AttributeError: 'str' object has no attribute 'get'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/llms/anthropic/experimental_pass_through/adapters/handler.py", line 157, in async_anthropic_messages_handler
completion_response = await litellm.acompletion(**completion_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/utils.py", line 1584, in wrapper_async
raise e
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/utils.py", line 1435, in wrapper_async
result = await original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/main.py", line 560, in acompletion
raise exception_type(
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/main.py", line 533, in acompletion
init_response = await loop.run_in_executor(None, func_with_context)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/nix/store/v0vy1c59f0vf71h64caw17hzymvg0zms-python3-3.12.10/lib/python3.12/concurrent/futures/thread.py", line 59, in run
result = self.fn(*self.args, **self.kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/utils.py", line 1058, in wrapper
result = original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/main.py", line 3428, in completion
raise exception_type(
^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/exception_mapping_utils.py", line 2301, in exception_type
raise e
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/exception_mapping_utils.py", line 2277, in exception_type
raise APIConnectionError(
litellm.exceptions.APIConnectionError: litellm.APIConnectionError: 'str' object has no attribute 'get'
Traceback (most recent call last):
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/main.py", line 3084, in completion
response = base_llm_http_handler.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/llms/custom_httpx/llm_http_handler.py", line 329, in completion
data = provider_config.transform_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/llms/ollama/completion/transformation.py", line 342, in transform_request
modified_prompt = ollama_pt(model=model, messages=messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/prompt_templates/factory.py", line 222, in ollama_pt
system_content_str, msg_i = _handle_ollama_system_message(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/prompt_templates/factory.py", line 180, in _handle_ollama_system_message
msg_content = convert_content_list_to_str(messages[msg_i])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/prompt_templates/common_utils.py", line 132, in convert_content_list_to_str
text_content = c.get("text")
^^^^^
AttributeError: 'str' object has no attribute 'get'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/proxy/anthropic_endpoints/endpoints.py", line 137, in anthropic_response
response = await llm_response
^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/router.py", line 3421, in async_wrapper
return await self._ageneric_api_call_with_fallbacks(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/router.py", line 2560, in _ageneric_api_call_with_fallbacks
raise e
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/router.py", line 2547, in _ageneric_api_call_with_fallbacks
response = await self.async_function_with_fallbacks(**kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/router.py", line 3734, in async_function_with_fallbacks
raise original_exception
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/router.py", line 3548, in async_function_with_fallbacks
response = await self.async_function_with_retries(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/router.py", line 3922, in async_function_with_retries
raise original_exception
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/router.py", line 3813, in async_function_with_retries
response = await self.make_call(original_function, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/router.py", line 3931, in make_call
response = await response
^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/router.py", line 2639, in _ageneric_api_call_with_fallbacks_helper
raise e
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/router.py", line 2625, in _ageneric_api_call_with_fallbacks_helper
response = await response # type: ignore
^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/utils.py", line 1584, in wrapper_async
raise e
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/utils.py", line 1435, in wrapper_async
result = await original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/llms/anthropic/experimental_pass_through/messages/handler.py", line 90, in anthropic_messages
response = await init_response
^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/llms/anthropic/experimental_pass_through/adapters/handler.py", line 179, in async_anthropic_messages_handler
raise ValueError(
ValueError: Error calling litellm.acompletion for non-Anthropic model: litellm.APIConnectionError: 'str' object has no attribute 'get'
Traceback (most recent call last):
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/main.py", line 3084, in completion
response = base_llm_http_handler.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/llms/custom_httpx/llm_http_handler.py", line 329, in completion
data = provider_config.transform_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/llms/ollama/completion/transformation.py", line 342, in transform_request
modified_prompt = ollama_pt(model=model, messages=messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/prompt_templates/factory.py", line 222, in ollama_pt
system_content_str, msg_i = _handle_ollama_system_message(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/prompt_templates/factory.py", line 180, in _handle_ollama_system_message
msg_content = convert_content_list_to_str(messages[msg_i])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nagyv/Projects/local-llm/.venv/lib/python3.12/site-packages/litellm/litellm_core_utils/prompt_templates/common_utils.py", line 132, in convert_content_list_to_str
text_content = c.get("text")
^^^^^
AttributeError: 'str' object has no attribute 'get'
```
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment