Skip to content

Instantly share code, notes, and snippets.

@thoraxe
Created February 13, 2025 16:45
Show Gist options
  • Save thoraxe/b8707a0471015924c9226371d6d3264e to your computer and use it in GitHub Desktop.
Save thoraxe/b8707a0471015924c9226371d6d3264e to your computer and use it in GitHub Desktop.
python client.py localhost 5000 "what pods are int he openshift-lightspeed namespace?"
No available shields. Disable safety.
Created session_id=32feb5bb-c1eb-4a2e-92b1-5f548b55812e for Agent(453bd9b5-53d1-4f13-82c0-5cfee5b826ef)
inference> tool_call>Traceback (most recent call last):
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpx/_transports/default.py", line 72, in map_httpcore_exceptions
yield
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpx/_transports/default.py", line 116, in __iter__
for part in self._httpcore_stream:
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpcore/_sync/connection_pool.py", line 407, in __iter__
raise exc from None
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpcore/_sync/connection_pool.py", line 403, in __iter__
for part in self._stream:
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpcore/_sync/http11.py", line 342, in __iter__
raise exc
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpcore/_sync/http11.py", line 334, in __iter__
for chunk in self._connection._receive_response_body(**kwargs):
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpcore/_sync/http11.py", line 203, in _receive_response_body
event = self._receive_event(timeout=timeout)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpcore/_sync/http11.py", line 213, in _receive_event
with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}):
File "/home/thoraxe/.pyenv/versions/3.11.5/lib/python3.11/contextlib.py", line 155, in __exit__
self.gen.throw(typ, value, traceback)
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpcore/_exceptions.py", line 14, in map_exceptions
raise to_exc(exc) from exc
httpcore.RemoteProtocolError: peer closed connection without sending complete message body (incomplete chunked read)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/ols-llamastack/client.py", line 103, in <module>
fire.Fire(main)
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/fire/core.py", line 135, in Fire
component_trace = _Fire(component, args, parsed_flag_args, context, name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/fire/core.py", line 468, in _Fire
component, remaining_args = _CallAndUpdateTrace(
^^^^^^^^^^^^^^^^^^^^
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/fire/core.py", line 684, in _CallAndUpdateTrace
component = fn(*varargs, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/ols-llamastack/client.py", line 99, in main
asyncio.run(run_main(host, port, user_query))
File "/home/thoraxe/.pyenv/versions/3.11.5/lib/python3.11/asyncio/runners.py", line 190, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "/home/thoraxe/.pyenv/versions/3.11.5/lib/python3.11/asyncio/runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/thoraxe/.pyenv/versions/3.11.5/lib/python3.11/asyncio/base_events.py", line 653, in run_until_complete
return future.result()
^^^^^^^^^^^^^^^
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/ols-llamastack/client.py", line 95, in run_main
for log in EventLogger().log(response):
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/llama_stack_client/lib/agents/event_logger.py", line 167, in log
for chunk in event_generator:
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/llama_stack_client/lib/agents/agent.py", line 156, in _create_turn_streaming
for chunk in response:
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/llama_stack_client/_streaming.py", line 45, in __iter__
for item in self._iterator:
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/llama_stack_client/_streaming.py", line 57, in __stream__
for sse in iterator:
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/llama_stack_client/_streaming.py", line 49, in _iter_events
yield from self._decoder.iter_bytes(self.response.iter_bytes())
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/llama_stack_client/_streaming.py", line 203, in iter_bytes
for chunk in self._iter_chunks(iterator):
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/llama_stack_client/_streaming.py", line 214, in _iter_chunks
for chunk in iterator:
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpx/_models.py", line 831, in iter_bytes
for raw_bytes in self.iter_raw():
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpx/_models.py", line 885, in iter_raw
for raw_stream_bytes in self.stream:
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpx/_client.py", line 127, in __iter__
for chunk in self._stream:
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpx/_transports/default.py", line 115, in __iter__
with map_httpcore_exceptions():
File "/home/thoraxe/.pyenv/versions/3.11.5/lib/python3.11/contextlib.py", line 155, in __exit__
self.gen.throw(typ, value, traceback)
File "/home/thoraxe/.pyenv/versions/ols-llamastack/lib/python3.11/site-packages/httpx/_transports/default.py", line 89, in map_httpcore_exceptions
raise mapped_exc(message) from exc
httpx.RemoteProtocolError: peer closed connection without sending complete message body (incomplete chunked read)
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
import asyncio
import os
import subprocess
import fire
from llama_stack_client import LlamaStackClient
from llama_stack_client.lib.agents.client_tool import client_tool
from llama_stack_client.lib.agents.agent import Agent
from llama_stack_client.lib.agents.event_logger import EventLogger
from llama_stack_client.types.agent_create_params import AgentConfig
pre_path = "/home/thoraxe/bin/"
@client_tool
async def get_object_namespace_list(kind: str, namespace: str) -> str:
"""Get the list of all objects in a namespace
:param kind: the type of object
:param namespace: the name of the namespace
:returns: a plaintext list of the kind object in the namespace
"""
output = subprocess.run(
[pre_path + "oc", "get", kind, "-n", namespace, "-o", "name"],
capture_output=True,
timeout=2,
)
return output.stdout
async def run_main(host: str, port: int, user_query: str, disable_safety: bool = False):
client = LlamaStackClient(
base_url=f"http://{host}:{port}",
)
available_shields = [shield.identifier for shield in client.shields.list()]
if not available_shields:
print("No available shields. Disable safety.")
else:
print(f"Available shields found: {available_shields}")
client_tools = [get_object_namespace_list]
agent_config = AgentConfig(
model="meta-llama/Llama-3.2-1B-Instruct",
instructions="""You are a helpful assistant with access to the following
function calls. Your task is to produce a list of function calls
necessary to generate response to the user utterance. Use the following
function calls as required.""",
toolgroups=[],
client_tools=[
client_tool.get_tool_definition() for client_tool in client_tools
],
tool_choice="auto",
tool_prompt_format="python_list",
enable_session_persistence=False,
)
agent = Agent(client, agent_config, client_tools)
session_id = agent.create_session("test-session")
print(f"Created session_id={session_id} for Agent({agent.agent_id})")
response = agent.create_turn(
messages=[
{
"role": "user",
"content": user_query,
}
],
session_id=session_id,
)
for log in EventLogger().log(response):
log.print()
def main(host: str, port: int, user_query: str):
asyncio.run(run_main(host, port, user_query))
if __name__ == "__main__":
fire.Fire(main)
INFO: ::1:39792 - "GET /v1/shields HTTP/1.1" 200 OK
16:40:44.024 [START] /v1/shields
16:40:44.025 [END] /v1/shields [StatusCode.OK] (1.41ms)
16:40:44.028 [START] /v1/agents
INFO: ::1:39792 - "POST /v1/agents HTTP/1.1" 200 OK
16:40:44.036 [END] /v1/agents [StatusCode.OK] (7.92ms)
16:40:44.038 [START] /v1/agents/453bd9b5-53d1-4f13-82c0-5cfee5b826ef/session
INFO: ::1:39792 - "POST /v1/agents/453bd9b5-53d1-4f13-82c0-5cfee5b826ef/session HTTP/1.1" 200 OK
16:40:44.050 [END] /v1/agents/453bd9b5-53d1-4f13-82c0-5cfee5b826ef/session [StatusCode.OK] (12.04ms)
16:40:44.053 [START] /v1/agents/453bd9b5-53d1-4f13-82c0-5cfee5b826ef/session/32feb5bb-c1eb-4a2e-92b1-5f548b55812e/turn
INFO: ::1:39792 - "POST /v1/agents/453bd9b5-53d1-4f13-82c0-5cfee5b826ef/session/32feb5bb-c1eb-4a2e-92b1-5f548b55812e/turn HTTP/1.1" 200 OK
16:40:44.063 [START] create_and_execute_turn
16:40:44.089 [START] inference
INFO 2025-02-13 11:40:44,310 httpx:1038: HTTP Request: POST http://192.168.1.252:4000/v1/chat/completions "HTTP/1.1 200 OK"
16:40:45.617 [END] inference [StatusCode.OK] (1528.26ms)
16:40:44.310 [INFO] HTTP Request: POST http://192.168.1.252:4000/v1/chat/completions "HTTP/1.1 200 OK"
16:40:45.618 [END] create_and_execute_turn [StatusCode.OK] (1555.13ms)
Traceback (most recent call last):
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/server/server.py", line 207, in sse_generator
async for item in event_gen:
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/inline/agents/meta_reference/agents.py", line 169, in _create_agent_turn_streaming
async for event in agent.create_and_execute_turn(request):
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/inline/agents/meta_reference/agent_instance.py", line 189, in create_and_execute_turn
async for chunk in self.run(
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/inline/agents/meta_reference/agent_instance.py", line 258, in run
async for res in self._run(
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/inline/agents/meta_reference/agent_instance.py", line 493, in _run
async for chunk in await self.inference_api.chat_completion(
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/routers/routers.py", line 167, in <genexpr>
return (chunk async for chunk in await provider.chat_completion(**params))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/remote/inference/vllm/vllm.py", line 290, in _stream_chat_completion
async for chunk in res:
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/remote/inference/vllm/vllm.py", line 175, in _process_vllm_chat_completion_stream_response
tool_call = convert_tool_call(choice.delta.tool_calls[0])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/utils/inference/openai_compat.py", line 443, in convert_tool_call
return ToolCall(
^^^^^^^^^
File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/pydantic/main.py", line 214, in __init__
validated_self = self.__pydantic_validator__.validate_python(data, self_instance=self)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
pydantic_core._pydantic_core.ValidationError: 3 validation errors for ToolCall
call_id
Input should be a valid string [type=string_type, input_value=None, input_type=NoneType]
For further information visit https://errors.pydantic.dev/2.10/v/string_type
tool_name.enum[BuiltinTool]
Input should be 'brave_search', 'wolfram_alpha', 'photogen' or 'code_interpreter' [type=enum, input_value=None, input_type=NoneType]
For further information visit https://errors.pydantic.dev/2.10/v/enum
tool_name.str
Input should be a valid string [type=string_type, input_value=None, input_type=NoneType]
For further information visit https://errors.pydantic.dev/2.10/v/string_type
16:40:45.620 [END] /v1/agents/453bd9b5-53d1-4f13-82c0-5cfee5b826ef/session/32feb5bb-c1eb-4a2e-92b1-5f548b55812e/turn [StatusCode.OK] (1566.69ms)
Traceback (most recent call last):
File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/responses.py", line 268, in __call__
await wrap(partial(self.listen_for_disconnect, receive))
File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/responses.py", line 264, in wrap
await func()
File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/responses.py", line 233, in listen_for_disconnect
message = await receive()
^^^^^^^^^^^^^^^
File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/uvicorn/protocols/http/h11_impl.py", line 531, in receive
await self.message_event.wait()
File "/home/thoraxe/.pyenv/versions/3.11.5/lib/python3.11/asyncio/locks.py", line 213, in wait
await fut
asyncio.exceptions.CancelledError: Cancelled by cancel scope 7f59331c5510
During handling of the above exception, another exception occurred:
+ Exception Group Traceback (most recent call last):
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/middleware/errors.py", line 165, in __call__
| await self.app(scope, receive, _send)
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/server/server.py", line 311, in __call__
| return await self.app(scope, receive, send)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/server/server.py", line 269, in __call__
| return await self.app(scope, receive, send)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/middleware/exceptions.py", line 62, in __call__
| await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
| raise exc
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/_exception_handler.py", line 42, in wrapped_app
| await app(scope, receive, sender)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/routing.py", line 715, in __call__
| await self.middleware_stack(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/routing.py", line 735, in app
| await route.handle(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/routing.py", line 288, in handle
| await self.app(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/routing.py", line 76, in app
| await wrap_app_handling_exceptions(app, request)(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
| raise exc
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/_exception_handler.py", line 42, in wrapped_app
| await app(scope, receive, sender)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/routing.py", line 74, in app
| await response(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/responses.py", line 261, in __call__
| async with anyio.create_task_group() as task_group:
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/anyio/_backends/_asyncio.py", line 767, in __aexit__
| raise BaseExceptionGroup(
| ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
+-+---------------- 1 ----------------
| Traceback (most recent call last):
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/server/server.py", line 207, in sse_generator
| async for item in event_gen:
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/inline/agents/meta_reference/agents.py", line 169, in _create_agent_turn_streaming
| async for event in agent.create_and_execute_turn(request):
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/inline/agents/meta_reference/agent_instance.py", line 189, in create_and_execute_turn
| async for chunk in self.run(
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/inline/agents/meta_reference/agent_instance.py", line 258, in run
| async for res in self._run(
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/inline/agents/meta_reference/agent_instance.py", line 493, in _run
| async for chunk in await self.inference_api.chat_completion(
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/routers/routers.py", line 167, in <genexpr>
| return (chunk async for chunk in await provider.chat_completion(**params))
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/remote/inference/vllm/vllm.py", line 290, in _stream_chat_completion
| async for chunk in res:
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/remote/inference/vllm/vllm.py", line 175, in _process_vllm_chat_completion_stream_response
| tool_call = convert_tool_call(choice.delta.tool_calls[0])
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/utils/inference/openai_compat.py", line 443, in convert_tool_call
| return ToolCall(
| ^^^^^^^^^
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/pydantic/main.py", line 214, in __init__
| validated_self = self.__pydantic_validator__.validate_python(data, self_instance=self)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| pydantic_core._pydantic_core.ValidationError: 3 validation errors for ToolCall
| call_id
| Input should be a valid string [type=string_type, input_value=None, input_type=NoneType]
| For further information visit https://errors.pydantic.dev/2.10/v/string_type
| tool_name.enum[BuiltinTool]
| Input should be 'brave_search', 'wolfram_alpha', 'photogen' or 'code_interpreter' [type=enum, input_value=None, input_type=NoneType]
| For further information visit https://errors.pydantic.dev/2.10/v/enum
| tool_name.str
| Input should be a valid string [type=string_type, input_value=None, input_type=NoneType]
| For further information visit https://errors.pydantic.dev/2.10/v/string_type
|
| During handling of the above exception, another exception occurred:
|
| Traceback (most recent call last):
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/responses.py", line 264, in wrap
| await func()
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/responses.py", line 245, in stream_response
| async for chunk in self.body_iterator:
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/server/server.py", line 218, in sse_generator
| "message": str(translate_exception(e)),
| ^^^^^^^^^^^^^^^^^^^^^^
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/server/server.py", line 88, in translate_exception
| exc = RequestValidationError(exc.raw_errors)
| ^^^^^^^^^^^^^^
| AttributeError: 'pydantic_core._pydantic_core.ValidationError' object has no attribute 'raw_errors'
+------------------------------------
ERROR: Exception in ASGI application
Traceback (most recent call last):
File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/responses.py", line 268, in __call__
await wrap(partial(self.listen_for_disconnect, receive))
File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/responses.py", line 264, in wrap
await func()
File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/responses.py", line 233, in listen_for_disconnect
message = await receive()
^^^^^^^^^^^^^^^
File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/uvicorn/protocols/http/h11_impl.py", line 531, in receive
await self.message_event.wait()
File "/home/thoraxe/.pyenv/versions/3.11.5/lib/python3.11/asyncio/locks.py", line 213, in wait
await fut
asyncio.exceptions.CancelledError: Cancelled by cancel scope 7f59331c5510
During handling of the above exception, another exception occurred:
+ Exception Group Traceback (most recent call last):
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/uvicorn/protocols/http/h11_impl.py", line 403, in run_asgi
| result = await app( # type: ignore[func-returns-value]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/uvicorn/middleware/proxy_headers.py", line 60, in __call__
| return await self.app(scope, receive, send)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/fastapi/applications.py", line 1054, in __call__
| await super().__call__(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/applications.py", line 112, in __call__
| await self.middleware_stack(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/middleware/errors.py", line 187, in __call__
| raise exc
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/middleware/errors.py", line 165, in __call__
| await self.app(scope, receive, _send)
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/server/server.py", line 311, in __call__
| return await self.app(scope, receive, send)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/server/server.py", line 269, in __call__
| return await self.app(scope, receive, send)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/middleware/exceptions.py", line 62, in __call__
| await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
| raise exc
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/_exception_handler.py", line 42, in wrapped_app
| await app(scope, receive, sender)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/routing.py", line 715, in __call__
| await self.middleware_stack(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/routing.py", line 735, in app
| await route.handle(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/routing.py", line 288, in handle
| await self.app(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/routing.py", line 76, in app
| await wrap_app_handling_exceptions(app, request)(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
| raise exc
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/_exception_handler.py", line 42, in wrapped_app
| await app(scope, receive, sender)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/routing.py", line 74, in app
| await response(scope, receive, send)
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/responses.py", line 261, in __call__
| async with anyio.create_task_group() as task_group:
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/anyio/_backends/_asyncio.py", line 767, in __aexit__
| raise BaseExceptionGroup(
| ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
+-+---------------- 1 ----------------
| Traceback (most recent call last):
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/server/server.py", line 207, in sse_generator
| async for item in event_gen:
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/inline/agents/meta_reference/agents.py", line 169, in _create_agent_turn_streaming
| async for event in agent.create_and_execute_turn(request):
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/inline/agents/meta_reference/agent_instance.py", line 189, in create_and_execute_turn
| async for chunk in self.run(
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/inline/agents/meta_reference/agent_instance.py", line 258, in run
| async for res in self._run(
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/inline/agents/meta_reference/agent_instance.py", line 493, in _run
| async for chunk in await self.inference_api.chat_completion(
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/routers/routers.py", line 167, in <genexpr>
| return (chunk async for chunk in await provider.chat_completion(**params))
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/remote/inference/vllm/vllm.py", line 290, in _stream_chat_completion
| async for chunk in res:
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/remote/inference/vllm/vllm.py", line 175, in _process_vllm_chat_completion_stream_response
| tool_call = convert_tool_call(choice.delta.tool_calls[0])
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/providers/utils/inference/openai_compat.py", line 443, in convert_tool_call
| return ToolCall(
| ^^^^^^^^^
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/pydantic/main.py", line 214, in __init__
| validated_self = self.__pydantic_validator__.validate_python(data, self_instance=self)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| pydantic_core._pydantic_core.ValidationError: 3 validation errors for ToolCall
| call_id
| Input should be a valid string [type=string_type, input_value=None, input_type=NoneType]
| For further information visit https://errors.pydantic.dev/2.10/v/string_type
| tool_name.enum[BuiltinTool]
| Input should be 'brave_search', 'wolfram_alpha', 'photogen' or 'code_interpreter' [type=enum, input_value=None, input_type=NoneType]
| For further information visit https://errors.pydantic.dev/2.10/v/enum
| tool_name.str
| Input should be a valid string [type=string_type, input_value=None, input_type=NoneType]
| For further information visit https://errors.pydantic.dev/2.10/v/string_type
|
| During handling of the above exception, another exception occurred:
|
| Traceback (most recent call last):
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/responses.py", line 264, in wrap
| await func()
| File "/home/thoraxe/.pyenv/versions/3.11.5/envs/ols-llamastack/lib/python3.11/site-packages/starlette/responses.py", line 245, in stream_response
| async for chunk in self.body_iterator:
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/server/server.py", line 218, in sse_generator
| "message": str(translate_exception(e)),
| ^^^^^^^^^^^^^^^^^^^^^^
| File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/llama-stack/llama_stack/distribution/server/server.py", line 88, in translate_exception
| exc = RequestValidationError(exc.raw_errors)
| ^^^^^^^^^^^^^^
| AttributeError: 'pydantic_core._pydantic_core.ValidationError' object has no attribute 'raw_errors'
+------------------------------------
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment