python e2e.py localhost 5000 No available shields. Disable safety. Using model: meta-llama/Llama-3.2-1B-Instruct Created session_id=f077f8c3-4f01-413f-8389-1ac61d79c60a for Agent(793512c3-4740-4bea-861b-f11daf82f307) inference> Traceback (most recent call last): File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/ols-llamastack/e2e.py", line 93, in <module> fire.Fire(main) File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/fire/core.py", line 135, in Fire component_trace = _Fire(component, args, parsed_flag_args, context, name) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/fire/core.py", line 468, in _Fire component, remaining_args = _CallAndUpdateTrace( ^^^^^^^^^^^^^^^^^^^^ File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/fire/core.py", line 684, in _CallAndUpdateTrace component = fn(*varargs, **kwargs) ^^^^^^^^^^^^^^^^^^^^^^ File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/ols-llamastack/e2e.py", line 89, in main asyncio.run(run_main(host, port)) File "/home/thoraxe/.pyenv/versions/3.11.5/lib/python3.11/asyncio/runners.py", line 190, in run return runner.run(main) ^^^^^^^^^^^^^^^^ File "/home/thoraxe/.pyenv/versions/3.11.5/lib/python3.11/asyncio/runners.py", line 118, in run return self._loop.run_until_complete(task) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/thoraxe/.pyenv/versions/3.11.5/lib/python3.11/asyncio/base_events.py", line 653, in run_until_complete return future.result() ^^^^^^^^^^^^^^^ File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/ols-llamastack/e2e.py", line 84, in run_main for log in EventLogger().log(response): File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/llama_stack_client/lib/agents/event_logger.py", line 167, in log for chunk in event_generator: File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/llama_stack_client/lib/agents/agent.py", line 157, in _create_turn_streaming tool_calls = self._get_tool_calls(chunk) ^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/llama_stack_client/lib/agents/agent.py", line 58, in _get_tool_calls if chunk.event.payload.event_type != "turn_complete": ^^^^^^^^^^^^^^^^^^^ AttributeError: 'NoneType' object has no attribute 'payload'