-
Notifications
You must be signed in to change notification settings - Fork 1.4k
Closed
Labels
bugSomething isn't workingSomething isn't working
Milestone
Description
Initial Checks
- I confirm that I'm using the latest version of Pydantic AI
- I confirm that I searched for my issue in https://github.com/pydantic/pydantic-ai/issues before opening this issue
Description
When the Ollama service is down or unreachable, the FallbackModel does not automatically fallback to the secondary model (OpenAI). Instead it application crashes with below error.
Traceback (most recent call last):
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpx/_transports/default.py", line 101, in map_httpcore_exceptions
yield
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpx/_transports/default.py", line 394, in handle_async_request
resp = await self._pool.handle_async_request(req)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpcore/_async/connection_pool.py", line 256, in handle_async_request
raise exc from None
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpcore/_async/connection_pool.py", line 236, in handle_async_request
response = await connection.handle_async_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
pool_request.request
^^^^^^^^^^^^^^^^^^^^
)
^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpcore/_async/connection.py", line 101, in handle_async_request
raise exc
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpcore/_async/connection.py", line 78, in handle_async_request
stream = await self._connect(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpcore/_async/connection.py", line 124, in _connect
stream = await self._network_backend.connect_tcp(**kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpcore/_backends/auto.py", line 31, in connect_tcp
return await self._backend.connect_tcp(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
...<5 lines>...
)
^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpcore/_backends/anyio.py", line 113, in connect_tcp
with map_exceptions(exc_map):
~~~~~~~~~~~~~~^^^^^^^^^
File "/opt/homebrew/Cellar/[email protected]/3.14.0_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/contextlib.py", line 162, in __exit__
self.gen.throw(value)
~~~~~~~~~~~~~~^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpcore/_exceptions.py", line 14, in map_exceptions
raise to_exc(exc) from exc
httpcore.ConnectError: All connection attempts failed
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/openai/_base_client.py", line 1529, in request
response = await self._client.send(
^^^^^^^^^^^^^^^^^^^^^^^^
...<3 lines>...
)
^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpx/_client.py", line 1629, in send
response = await self._send_handling_auth(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
...<4 lines>...
)
^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpx/_client.py", line 1657, in _send_handling_auth
response = await self._send_handling_redirects(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
...<3 lines>...
)
^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpx/_client.py", line 1694, in _send_handling_redirects
response = await self._send_single_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpx/_client.py", line 1730, in _send_single_request
response = await transport.handle_async_request(request)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpx/_transports/default.py", line 393, in handle_async_request
with map_httpcore_exceptions():
~~~~~~~~~~~~~~~~~~~~~~~^^
File "/opt/homebrew/Cellar/[email protected]/3.14.0_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/contextlib.py", line 162, in __exit__
self.gen.throw(value)
~~~~~~~~~~~~~~^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/httpx/_transports/default.py", line 118, in map_httpcore_exceptions
raise mapped_exc(message) from exc
httpx.ConnectError: All connection attempts failed
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/private/tmp/agent-example/main.py", line 18, in <module>
response = agent.run_sync("What is the capital of France?")
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_ai/agent/abstract.py", line 346, in run_sync
return _utils.get_event_loop().run_until_complete(
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^
self.run(
^^^^^^^^^
...<14 lines>...
)
^
)
^
File "/opt/homebrew/Cellar/[email protected]/3.14.0_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/asyncio/base_events.py", line 719, in run_until_complete
return future.result()
~~~~~~~~~~~~~^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_ai/agent/abstract.py", line 225, in run
async with self.iter(
~~~~~~~~~^
user_prompt=user_prompt,
^^^^^^^^^^^^^^^^^^^^^^^^
...<10 lines>...
builtin_tools=builtin_tools,
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
) as agent_run:
^
File "/opt/homebrew/Cellar/[email protected]/3.14.0_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/contextlib.py", line 235, in __aexit__
await self.gen.athrow(value)
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_ai/agent/__init__.py", line 649, in iter
async with graph.iter(
~~~~~~~~~~^
inputs=user_prompt_node,
^^^^^^^^^^^^^^^^^^^^^^^^
...<3 lines>...
infer_name=False,
^^^^^^^^^^^^^^^^^
) as graph_run:
^
File "/opt/homebrew/Cellar/[email protected]/3.14.0_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/contextlib.py", line 235, in __aexit__
await self.gen.athrow(value)
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_graph/beta/graph.py", line 271, in iter
async with GraphRun[StateT, DepsT, OutputT](
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^
graph=self,
^^^^^^^^^^^
...<3 lines>...
traceparent=traceparent,
^^^^^^^^^^^^^^^^^^^^^^^^
) as graph_run:
^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_graph/beta/graph.py", line 400, in __aexit__
await self._async_exit_stack.__aexit__(exc_type, exc_val, exc_tb)
File "/opt/homebrew/Cellar/[email protected]/3.14.0_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/contextlib.py", line 768, in __aexit__
raise exc
File "/opt/homebrew/Cellar/[email protected]/3.14.0_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/contextlib.py", line 749, in __aexit__
cb_suppress = cb(*exc_details)
File "/opt/homebrew/Cellar/[email protected]/3.14.0_1/Frameworks/Python.framework/Versions/3.14/lib/python3.14/contextlib.py", line 162, in __exit__
self.gen.throw(value)
~~~~~~~~~~~~~~^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_graph/beta/graph.py", line 939, in _unwrap_exception_groups
raise exception
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_graph/beta/graph.py", line 711, in _run_tracked_task
result = await self._run_task(t_)
^^^^^^^^^^^^^^^^^^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_graph/beta/graph.py", line 740, in _run_task
output = await node.call(step_context)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_graph/beta/step.py", line 253, in _call_node
return await node.run(GraphRunContext(state=ctx.state, deps=ctx.deps))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_ai/_agent_graph.py", line 424, in run
return await self._make_request(ctx)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_ai/_agent_graph.py", line 466, in _make_request
model_response = await ctx.deps.model.request(message_history, model_settings, model_request_parameters)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_ai/models/fallback.py", line 89, in request
raise exc
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_ai/models/fallback.py", line 84, in request
response = await model.request(messages, model_settings, model_request_parameters)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_ai/models/openai.py", line 435, in request
response = await self._completions_create(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
messages, False, cast(OpenAIChatModelSettings, model_settings or {}), model_request_parameters
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
)
^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/pydantic_ai/models/openai.py", line 517, in _completions_create
return await self.client.chat.completions.create(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
...<26 lines>...
)
^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/openai/resources/chat/completions/completions.py", line 2672, in create
return await self._post(
^^^^^^^^^^^^^^^^^
...<49 lines>...
)
^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/openai/_base_client.py", line 1794, in post
return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/private/tmp/agent-example/.venv/lib/python3.14/site-packages/openai/_base_client.py", line 1561, in request
raise APIConnectionError(request=request) from err
openai.APIConnectionError: Connection error.Example Code
from pydantic_ai import Agent
from pydantic_ai.models.fallback import FallbackModel
from pydantic_ai.models.openai import OpenAIChatModel
from pydantic_ai.providers.ollama import OllamaProvider
import dotenv
dotenv.load_dotenv()
ollama_model = OpenAIChatModel(
model_name="llama3.2:1b",
provider=OllamaProvider(base_url="http://localhost:11434/v1"),
)
openai_model = OpenAIChatModel("gpt-5")
model = FallbackModel(ollama_model, openai_model)
agent = Agent(model=model)
response = agent.run_sync("What is the capital of France?")
print(response.all_messages()[-1].model_name)Python, Pydantic AI & LLM client version
Python Version: 3.14
Pydantic and LLM versions:
- pydantic-ai-slim[openai]>=1.18.0"
- openai==2.8.0
Metadata
Metadata
Assignees
Labels
bugSomething isn't workingSomething isn't working