Skip to content

Instantly share code, notes, and snippets.

@thoraxe
Created March 31, 2025 18:33
Show Gist options
  • Save thoraxe/3c3dbe400a562924484bc71bab3709ef to your computer and use it in GitHub Desktop.
Save thoraxe/3c3dbe400a562924484bc71bab3709ef to your computer and use it in GitHub Desktop.
inference> I'll help you get a list of all available Prometheus metrics. Let me retrieve that information for you.get_prometheus_metrics(get_prometheus_metrics({})
tool_execution> Tool:get_prometheus_metrics Args:{}
tool_execution> Tool:get_prometheus_metrics Response:Unknown tool `get_prometheus_metrics` was called.
import os
import fire
from llama_stack_client import LlamaStackClient
from llama_stack_client.lib.agents.agent import Agent
from llama_stack_client.lib.agents.client_tool import client_tool
from llama_stack_client.lib.agents.event_logger import EventLogger
from llama_stack_client.types.agent_create_params import AgentConfig
from llama_stack_client.types.toolgroup_register_params import McpEndpoint
from prometheus_api_client import PrometheusConnect
from termcolor import colored
# set up the prometheus client
prom = PrometheusConnect(url=os.environ['PROMETHEUS_URL'], disable_ssl=True, headers={"Authorization": f"bearer {os.environ['PROMETHEUS_TOKEN']}"})
@client_tool
def get_prometheus_metrics():
"""Get the list of all of the prometheus metrics available
:returns: a plaintext list of the kind object in the namespace
"""
output = prom.all_metrics()
return str(output)
def main(host: str, port: int, user_query: str):
client = LlamaStackClient(
base_url=f"http://{host}:{port}",
)
available_shields = [shield.identifier for shield in client.shields.list()]
if not available_shields:
print(colored("No available shields. Disabling safety.", "yellow"))
else:
print(f"Available shields found: {available_shields}")
available_models = [
model.identifier for model in client.models.list() if model.model_type == "llm"
]
# the model decision logic is basic
if not available_models:
print(colored("No available models. Exiting.", "red"))
return
else:
selected_model = available_models[0]
print(f"Using model: {selected_model}")
#mcp_ep : McpEndpoint = { 'uri' : "http://localhost:8989/sse" }
#client.toolgroups.register(
# toolgroup_id="mcp::grafana",
# provider_id="model-context-protocol",
# mcp_endpoint=mcp_ep,
#)
client_tools = [get_prometheus_metrics]
agent_config = AgentConfig(
model=selected_model,
instructions="You are a helpful assistant.",
# "You will be able to understand the request and choose the best tools to use. You are also an expert in Ansible Automation Platform(AAP) and you know how to use mcp:aap_api too to answer questions about a AAP instance",
sampling_params={
"strategy": {"type": "top_p", "temperature": 1.0, "top_p": 0.9},
},
toolgroups=(
[
]
),
client_tools=[
client_tool.get_tool_definition() for client_tool in client_tools
],
tool_choice="auto",
input_shields=[], #available_shields if available_shields else [],
output_shields=[], #available_shields if available_shields else [],
enable_session_persistence=False,
)
agent = Agent(client, agent_config)
session_id = agent.create_session("test-session")
response = agent.create_turn(
messages=[
{
"role": "user",
"content": user_query,
}
],
session_id=session_id,
)
for log in EventLogger().log(response):
log.print()
if __name__ == "__main__":
fire.Fire(main)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment