Skip to content

Instantly share code, notes, and snippets.

@dgomesbr
Created April 1, 2026 13:48
Show Gist options
  • Select an option

  • Save dgomesbr/71907d845223eec691a9c84a6b2fcd9d to your computer and use it in GitHub Desktop.

Select an option

Save dgomesbr/71907d845223eec691a9c84a6b2fcd9d to your computer and use it in GitHub Desktop.
feat: AWS Bedrock provider — full patch for issue #33
diff --git a/.env.example b/.env.example
index 44bd34e..7441dc9 100644
--- a/.env.example
+++ b/.env.example
@@ -57,6 +57,15 @@ LLM_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
# OMICSCLAW_MODEL=qwen2.5:7b
# LLM_BASE_URL=http://localhost:11434/v1
+# --- Option 4: AWS Bedrock (uses AWS credential chain, no API key needed) ---
+# Requires: pip install -e ".[aws]"
+# Uses standard AWS auth: profiles, env vars, or IAM roles.
+# Default model: Claude Sonnet. Default region: us-east-1.
+# LLM_PROVIDER=bedrock
+# AWS_PROFILE=my-profile # Optional: named profile from ~/.aws/credentials
+# AWS_BEDROCK_REGION=us-east-1 # Optional: overrides profile/AWS_DEFAULT_REGION
+# OMICSCLAW_MODEL=anthropic.claude-sonnet-4-20250514-v1:0 # Optional: override model
+
# Override base URL (optional — auto-set by provider)
# LLM_BASE_URL=https://api.deepseek.com/v1
# Per-provider base URL overrides: <PROVIDER>_BASE_URL (e.g. ANTHROPIC_BASE_URL)
diff --git a/bot/core.py b/bot/core.py
index d842ac3..000bf7f 100644
--- a/bot/core.py
+++ b/bot/core.py
@@ -57,7 +57,8 @@ PROVIDER_PRESETS: dict[str, tuple[str, str, str]] = {
"ollama": ("http://localhost:11434/v1", "qwen2.5:7b", ""),
"custom": ("", "", ""),
- # --- Legacy alias (backward compat — same as gemini) ---
+ # --- Tier 4: Cloud-native (AWS SDK auth, no static API key) ---
+ "bedrock": ("", "anthropic.claude-sonnet-4-20250514-v1:0", ""),
}
# Ordered list for auto-detection: when LLM_PROVIDER is not set, we pick the
@@ -65,6 +66,7 @@ PROVIDER_PRESETS: dict[str, tuple[str, str, str]] = {
_PROVIDER_DETECT_ORDER = [
"deepseek", "openai", "anthropic", "gemini", "nvidia",
"siliconflow", "openrouter", "volcengine", "dashscope", "zhipu",
+ "bedrock",
]
@@ -87,12 +89,32 @@ def resolve_provider(
# Auto-detect provider from available API keys
if not provider_key and not api_key:
for p in _PROVIDER_DETECT_ORDER:
+ if p == "bedrock":
+ try:
+ from omicsclaw.core.aws_bedrock import has_aws_credentials
+ if has_aws_credentials():
+ provider_key = "bedrock"
+ break
+ except ImportError:
+ pass
+ continue
env_var = PROVIDER_PRESETS[p][2]
if env_var and os.environ.get(env_var):
provider_key = p
api_key = os.environ[env_var]
break
+ # Bedrock uses SigV4 auth, not API keys
+ if provider_key == "bedrock":
+ try:
+ from omicsclaw.core.aws_bedrock import get_bedrock_region
+ region = get_bedrock_region()
+ except ImportError:
+ region = os.environ.get("AWS_BEDROCK_REGION") or os.environ.get("AWS_DEFAULT_REGION") or "us-east-1"
+ resolved_url = base_url or f"https://bedrock-runtime.{region}.amazonaws.com/v1"
+ resolved_model = model or "anthropic.claude-sonnet-4-20250514-v1:0"
+ return resolved_url, resolved_model, "bedrock-sigv4"
+
# Look up preset
preset = PROVIDER_PRESETS.get(provider_key, ("", "", ""))
preset_url, preset_model, preset_key_env = preset
@@ -640,6 +662,19 @@ def init(
kw: dict = {"api_key": resolved_key or api_key}
if resolved_url:
kw["base_url"] = resolved_url
+
+ # Bedrock: use SigV4-signed HTTP client instead of API key auth
+ if (provider and provider.lower() == "bedrock") or resolved_key == "bedrock-sigv4":
+ LLM_PROVIDER_NAME = "bedrock"
+ kw["api_key"] = "bedrock" # placeholder — SigV4 handles auth
+ try:
+ from omicsclaw.core.aws_bedrock import create_bedrock_http_client
+ kw["http_client"] = create_bedrock_http_client()
+ except ImportError:
+ raise ImportError(
+ "AWS Bedrock requires boto3. Install with: pip install -e '.[aws]'"
+ )
+
llm = AsyncOpenAI(**kw)
logger.info(
diff --git a/bot/onboard.py b/bot/onboard.py
index b33efb5..90cf42d 100644
--- a/bot/onboard.py
+++ b/bot/onboard.py
@@ -63,7 +63,7 @@ def run_onboard():
env_vars = load_env()
# ── 1. LLM Provider ──
- providers = ["deepseek", "openai", "anthropic", "gemini", "siliconflow", "zhipu", "dashscope", "volcengine", "openrouter", "ollama", "custom"]
+ providers = ["deepseek", "openai", "anthropic", "gemini", "bedrock", "siliconflow", "zhipu", "dashscope", "volcengine", "openrouter", "ollama", "custom"]
current_provider = env_vars.get("LLM_PROVIDER", "deepseek")
provider = questionary.select(
@@ -79,7 +79,7 @@ def run_onboard():
env_vars["LLM_PROVIDER"] = provider
# ── 2. LLM API Key ──
- if provider != "ollama":
+ if provider not in ("ollama", "bedrock"):
current_k = env_vars.get("LLM_API_KEY", "")
if not current_k:
specific_k = f"{provider.upper()}_API_KEY"
@@ -105,6 +105,14 @@ def run_onboard():
elif provider == "ollama":
model = questionary.text("Enter Ollama model name (e.g. qwen2.5:7b):", default=env_vars.get("OMICSCLAW_MODEL", "qwen2.5:7b")).ask()
if model: env_vars["OMICSCLAW_MODEL"] = model.strip()
+ elif provider == "bedrock":
+ console.print("[dim]Bedrock uses your AWS credentials (profile, env vars, or IAM role). No API key needed.[/dim]")
+ profile = questionary.text("AWS Profile name (leave blank for default):", default=env_vars.get("AWS_PROFILE", "")).ask()
+ if profile is None: return
+ if profile.strip(): env_vars["AWS_PROFILE"] = profile.strip()
+ region = questionary.text("AWS Region (leave blank for us-east-1):", default=env_vars.get("AWS_BEDROCK_REGION", "us-east-1")).ask()
+ if region is None: return
+ if region.strip(): env_vars["AWS_BEDROCK_REGION"] = region.strip()
# ── 3. Channels Multi-select ──
all_channels = [
diff --git a/omicsclaw/agents/pipeline.py b/omicsclaw/agents/pipeline.py
index 032d817..81354d0 100644
--- a/omicsclaw/agents/pipeline.py
+++ b/omicsclaw/agents/pipeline.py
@@ -384,6 +384,23 @@ class ResearchPipeline:
anthropic_api_key=os.getenv("ANTHROPIC_API_KEY") or api_key or None,
temperature=0.3,
)
+ elif provider == "bedrock":
+ from langchain_aws import ChatBedrockConverse
+ import boto3
+ region = (
+ os.getenv("AWS_BEDROCK_REGION")
+ or os.getenv("AWS_DEFAULT_REGION")
+ or "us-east-1"
+ )
+ session = boto3.Session(
+ profile_name=os.getenv("AWS_PROFILE"),
+ region_name=region,
+ )
+ return ChatBedrockConverse(
+ model=model or "anthropic.claude-sonnet-4-20250514-v1:0",
+ client=session.client("bedrock-runtime"),
+ temperature=0.3,
+ )
else:
# Generic OpenAI-compatible
return SafeChatOpenAI(
diff --git a/omicsclaw/core/aws_bedrock.py b/omicsclaw/core/aws_bedrock.py
new file mode 100644
index 0000000..72ffc36
--- /dev/null
+++ b/omicsclaw/core/aws_bedrock.py
@@ -0,0 +1,62 @@
+"""AWS Bedrock SigV4 authentication for OpenAI-compatible client."""
+from __future__ import annotations
+
+import os
+
+import httpx
+
+
+def get_bedrock_region() -> str:
+ """Resolve AWS region: env override → profile config → us-east-1."""
+ region = os.environ.get("AWS_BEDROCK_REGION") or os.environ.get("AWS_DEFAULT_REGION")
+ if not region:
+ try:
+ import botocore.session
+ region = botocore.session.get_session().get_config_variable("region")
+ except Exception:
+ pass
+ return region or "us-east-1"
+
+
+def has_aws_credentials() -> bool:
+ """Check if any AWS credentials are available."""
+ if os.environ.get("AWS_ACCESS_KEY_ID") or os.environ.get("AWS_PROFILE"):
+ return True
+ try:
+ import botocore.session
+ return botocore.session.get_session().get_credentials() is not None
+ except Exception:
+ return False
+
+
+class BedrockSigV4Auth(httpx.Auth):
+ """httpx Auth that signs requests with AWS SigV4 for Bedrock."""
+
+ def __init__(self):
+ import boto3
+ region = get_bedrock_region()
+ self._session = boto3.Session(
+ profile_name=os.environ.get("AWS_PROFILE"),
+ region_name=region,
+ )
+ self._region = region
+
+ def auth_flow(self, request: httpx.Request):
+ from botocore.auth import SigV4Auth
+ from botocore.awsrequest import AWSRequest
+
+ creds = self._session.get_credentials().get_frozen_credentials()
+ aws_req = AWSRequest(
+ method=request.method,
+ url=str(request.url),
+ data=request.content,
+ headers=dict(request.headers),
+ )
+ SigV4Auth(creds, "bedrock", self._region).add_auth(aws_req)
+ request.headers.update(dict(aws_req.headers))
+ yield request
+
+
+def create_bedrock_http_client() -> httpx.AsyncClient:
+ """Create an httpx async client with SigV4 signing for Bedrock."""
+ return httpx.AsyncClient(auth=BedrockSigV4Auth())
diff --git a/omicsclaw/routing/llm_router.py b/omicsclaw/routing/llm_router.py
index b10713f..2e4a6a4 100644
--- a/omicsclaw/routing/llm_router.py
+++ b/omicsclaw/routing/llm_router.py
@@ -42,10 +42,12 @@ except ImportError:
"zhipu": ("https://open.bigmodel.cn/api/paas/v4", "glm-4-flash", "ZHIPU_API_KEY"),
"ollama": ("http://localhost:11434/v1", "qwen2.5:7b", ""),
"custom": ("", "", ""),
+ "bedrock": ("", "anthropic.claude-sonnet-4-20250514-v1:0", ""),
}
_PROVIDER_DETECT_ORDER = [
"deepseek", "openai", "anthropic", "gemini", "nvidia",
"siliconflow", "openrouter", "volcengine", "dashscope", "zhipu",
+ "bedrock",
]
@@ -63,7 +65,16 @@ def _resolve_llm_config() -> Tuple[str, str, str]:
api_key = os.getenv("LLM_API_KEY", "")
# If provider is set, use its preset
- if provider and provider in PROVIDER_PRESETS:
+ if provider == "bedrock":
+ try:
+ from omicsclaw.core.aws_bedrock import get_bedrock_region
+ region = get_bedrock_region()
+ except ImportError:
+ region = os.getenv("AWS_BEDROCK_REGION") or os.getenv("AWS_DEFAULT_REGION") or "us-east-1"
+ base_url = base_url or f"https://bedrock-runtime.{region}.amazonaws.com/v1"
+ model = model or "anthropic.claude-sonnet-4-20250514-v1:0"
+ return "bedrock-sigv4", base_url, model
+ elif provider and provider in PROVIDER_PRESETS:
purl, pmodel, pkey_env = PROVIDER_PRESETS[provider]
base_url = base_url or os.getenv(f"{provider.upper()}_BASE_URL", "") or purl
model = model or pmodel
diff --git a/pyproject.toml b/pyproject.toml
index 107f06c..b2c9659 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -228,6 +228,14 @@ banksy = [
"pybanksy>=1.3.0",
]
+# --------------------------------------------------------------------------- #
+# aws — AWS Bedrock LLM provider (SigV4 auth, profiles, IAM roles) #
+# --------------------------------------------------------------------------- #
+aws = [
+ "boto3>=1.34",
+ "langchain-aws>=0.2.0",
+]
+
# --------------------------------------------------------------------------- #
# Tier 4: memory — persistent bot conversation context (optional) #
# --------------------------------------------------------------------------- #
@@ -284,6 +292,7 @@ research = [
"langchain>=1.2.12",
"langchain-openai>=1.1",
"langchain-anthropic>=1.4.0",
+ "langchain-aws>=0.2.0",
"langgraph-checkpoint-sqlite>=3.0.0",
"tavily-python>=0.7",
"httpx>=0.27",
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment