Created
April 10, 2025 08:39
-
-
Save codefromthecrypt/429aa77ec0a1f1522462857486907f0e to your computer and use it in GitHub Desktop.
Minimum viable agent using google adk
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import httpx | |
from google.adk.agents import Agent | |
from google.adk.models.lite_llm import LiteLlm | |
from google.adk.runners import Runner | |
from google.adk.sessions import InMemorySessionService | |
from google.genai import types | |
# TODO: native openai https://github.com/google/adk-python/issues/27 | |
os.environ["OPENAI_API_BASE"] = os.getenv("OPENAI_BASE_URL") | |
# TODO: major_version: int = 0 https://github.com/google/adk-python/issues/26 | |
def get_latest_elasticsearch_version(major_version: int) -> str: | |
"""Returns the latest GA version of Elasticsearch in "X.Y.Z" format. | |
Args: | |
major_version: Major version to filter by (e.g. 7, 8). Defaults to latest | |
""" | |
response = httpx.get("https://artifacts.elastic.co/releases/stack.json") | |
response.raise_for_status() | |
releases = response.json()["releases"] | |
# Fetch releases and filter out non-release versions (e.g., -rc1) or | |
# those not matching major_version. In any case, remove " GA" suffix. | |
versions = [] | |
for r in releases: | |
v = r["version"].removesuffix(" GA") | |
if "-" in r["version"]: | |
continue | |
if major_version and int(v.split(".")[0]) != major_version: | |
continue | |
versions.append(v) | |
if not versions: | |
raise ValueError("No valid versions found") | |
# "8.9.1" > "8.10.0", so coerce to a numeric tuple: (8,9,1) < (8,10,0) | |
return max(versions, key=lambda v: tuple(map(int, v.split(".")))) | |
def main(): | |
# Configure the agent to use our LLM and tools | |
app_name = "elasticsearch_version_agent" | |
model = os.getenv("CHAT_MODEL", "gpt-4o-mini") | |
agent = Agent(name=app_name, model=LiteLlm(model="openai/" + model), tools=[get_latest_elasticsearch_version]) | |
# Make the runner ephemeral, storing conversational state in memory | |
session_service = InMemorySessionService() | |
runner = Runner(app_name=app_name, agent=agent, session_service=session_service) | |
session = session_service.create_session(app_name=app_name, user_id="unused", session_id="unused") | |
# Run the agent, beginning with our user's question | |
message = types.Content( | |
role="user", parts=[types.Part.from_text(text="What is the latest version of Elasticsearch 8?")] | |
) | |
events = list(runner.run(new_message=message, user_id=session.user_id, session_id=session.id)) | |
# For simplicity, assume success that the last message was the answer | |
print(events[-1].content.parts[0].text) | |
if __name__ == "__main__": | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment