If you are using private models, there is a error. callback to ollama Error: litellm.APIConnectionError: 'name' Traceback (most recent call last): File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/llms/openai/openai.py", line 724, in completion raise e File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/llms/openai/openai.py", line 652, in completion ) = self.make_sync_openai_chat_completion_request( ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ openai_client=openai_client, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ...<2 lines>... logging_obj=logging_obj, ^^^^^^^^^^^^^^^^^^^^^^^^ ) ^ File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/litellm_core_utils/logging_utils.py", line 149, in sync_wrapper result = func(*args, **kwargs) File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/llms/openai/openai.py", line 471, in make_sync_openai_chat_completion_request raise e File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/llms/openai/openai.py", line 453, in make_sync_openai_chat_completion_request raw_response = openai_client.chat.completions.with_raw_response.create( **data, timeout=timeout ) File "/home/usuario/cai-framework/lib/python3.13/site-packages/openai/_legacy_response.py", line 364, in wrapped return cast(LegacyAPIResponse[R], func(*args, **kwargs)) ~~~~^^^^^^^^^^^^^^^^^ File "/home/usuario/cai-framework/lib/python3.13/site-packages/openai/_utils/_utils.py", line 287, in wrapper return func(*args, **kwargs) File "/home/usuario/cai-framework/lib/python3.13/site-packages/openai/resources/chat/completions/completions.py", line 925, in create return self._post( ~~~~~~~~~~^ "/chat/completions", ^^^^^^^^^^^^^^^^^^^^ ...<43 lines>... stream_cls=Stream[ChatCompletionChunk], ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ) ^ File "/home/usuario/cai-framework/lib/python3.13/site-packages/openai/_base_client.py", line 1239, in post return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) ~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/usuario/cai-framework/lib/python3.13/site-packages/openai/_base_client.py", line 1034, in request raise self._make_status_error_from_response(err.response) from None openai.NotFoundError: 404 page not found During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/main.py", line 1799, in completion raise e File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/main.py", line 1772, in completion response = openai_chat_completions.completion( model=model, ...<15 lines>... custom_llm_provider=custom_llm_provider, ) File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/llms/openai/openai.py", line 735, in completion raise OpenAIError( ...<4 lines>... ) litellm.llms.openai.common_utils.OpenAIError: 404 page not found During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/usuario/cai-framework/lib/python3.13/site-packages/cai/core.py", line 502, in get_chat_completion litellm_completion = litellm.completion(**ollama_params) File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/utils.py", line 1255, in wrapper raise e File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/utils.py", line 1133, in wrapper result = original_function(*args, **kwargs) File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/main.py", line 3184, in completion raise exception_type( ~~~~~~~~~~~~~~^ model=model, ^^^^^^^^^^^^ ...<3 lines>... extra_kwargs=kwargs, ^^^^^^^^^^^^^^^^^^^^ ) ^ File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/litellm_core_utils/exception_mapping_utils.py", line 2217, in exception_type raise e File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/litellm_core_utils/exception_mapping_utils.py", line 405, in exception_type raise NotFoundError( ...<5 lines>... ) litellm.exceptions.NotFoundError: litellm.NotFoundError: NotFoundError: OpenAIException - 404 page not found During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/main.py", line 2872, in completion response = base_llm_http_handler.completion( model=model, ...<13 lines>... client=client, ) File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/llms/custom_httpx/llm_http_handler.py", line 415, in completion return provider_config.transform_response( ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^ model=model, ^^^^^^^^^^^^ ...<9 lines>... json_mode=json_mode, ^^^^^^^^^^^^^^^^^^^^ ) ^ File "/home/usuario/cai-framework/lib/python3.13/site-packages/litellm/llms/ollama/completion/transformation.py", line 266, in transform_response "name": function_call["name"], ~~~~~~~~~~~~~^^^^^^^^ KeyError: 'name'