Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
147 changes: 142 additions & 5 deletions src/strands/experimental/agent_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,33 @@
agent = config_to_agent("config.json")
# Add tools that need code-based instantiation
agent.tool_registry.process_tools([ToolWithConfigArg(HttpsConnection("localhost"))])

The ``model`` field supports two formats:

**String format (backward compatible — defaults to Bedrock):**
{"model": "us.anthropic.claude-sonnet-4-20250514-v1:0"}

**Object format (supports all providers):**
{
"model": {
"provider": "anthropic",
"model_id": "claude-sonnet-4-20250514",
"max_tokens": 10000,
"client_args": {"api_key": "$ANTHROPIC_API_KEY"}
}
}

Environment variable references (``$VAR`` or ``${VAR}``) in model config values are resolved
automatically before provider instantiation.

Note: The following constructor parameters cannot be specified from JSON because they require
code-based instantiation: ``boto_session`` (Bedrock, SageMaker), ``client`` (OpenAI, Gemini),
``gemini_tools`` (Gemini). Use ``region_name`` / ``client_args`` as JSON-friendly alternatives.
"""

import json
import os
import re
from pathlib import Path
from typing import Any

Expand All @@ -19,7 +43,7 @@
from jsonschema import ValidationError

# JSON Schema for agent configuration
AGENT_CONFIG_SCHEMA = {

Check warning on line 46 in src/strands/experimental/agent_config.py

View workflow job for this annotation

GitHub Actions / check-api

AGENT_CONFIG_SCHEMA

Attribute value was changed: `{'$schema': 'http://json-schema.org/draft-07/schema#', 'title': 'Agent Configuration', 'description': 'Configuration schema for creating agents', 'type': 'object', 'properties': {'name': {'description': 'Name of the agent', 'type': ['string', 'null'], 'default': None}, 'model': {'description': 'The model ID to use for this agent. If not specified, uses the default model.', 'type': ['string', 'null'], 'default': None}, 'prompt': {'description': 'The system prompt for the agent. Provides high level context to the agent.', 'type': ['string', 'null'], 'default': None}, 'tools': {'description': 'List of tools the agent can use. Can be file paths, Python module names, or @tool annotated functions in files.', 'type': 'array', 'items': {'type': 'string'}, 'default': []}}, 'additionalProperties': False}` -> `{'$schema': 'http://json-schema.org/draft-07/schema#', 'title': 'Agent Configuration', 'description': 'Configuration schema for creating agents', 'type': 'object', 'properties': {'name': {'description': 'Name of the agent', 'type': ['string', 'null'], 'default': None}, 'model': {'description': "The model to use for this agent. Can be a string (Bedrock model_id) or an object with a 'provider' field for any supported provider.", 'oneOf': [{'type': 'string'}, {'type': 'null'}, {'type': 'object', 'properties': {'provider': {'description': 'The model provider name', 'type': 'string'}}, 'required': ['provider'], 'additionalProperties': True}], 'default': None}, 'prompt': {'description': 'The system prompt for the agent. Provides high level context to the agent.', 'type': ['string', 'null'], 'default': None}, 'tools': {'description': 'List of tools the agent can use. Can be file paths, Python module names, or @tool annotated functions in files.', 'type': 'array', 'items': {'type': 'string'}, 'default': []}}, 'additionalProperties': False}`
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Agent Configuration",
"description": "Configuration schema for creating agents",
Expand All @@ -27,8 +51,25 @@
"properties": {
"name": {"description": "Name of the agent", "type": ["string", "null"], "default": None},
"model": {
"description": "The model ID to use for this agent. If not specified, uses the default model.",
"type": ["string", "null"],
"description": (
"The model to use for this agent. Can be a string (Bedrock model_id) "
"or an object with a 'provider' field for any supported provider."
),
"oneOf": [
{"type": "string"},
{"type": "null"},
{
"type": "object",
"properties": {
"provider": {
"description": "The model provider name",
"type": "string",
}
},
"required": ["provider"],
"additionalProperties": True,
},
],
"default": None,
},
"prompt": {
Expand All @@ -50,6 +91,87 @@
# Pre-compile validator for better performance
_VALIDATOR = jsonschema.Draft7Validator(AGENT_CONFIG_SCHEMA)

# Pattern for matching environment variable references
_ENV_VAR_PATTERN = re.compile(r"^\$\{([^}]+)\}$|^\$([A-Za-z_][A-Za-z0-9_]*)$")
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Issue: The regex ^\$\{([^}]+)\}$|^\$([A-Za-z_][A-Za-z0-9_]*)$ only matches full-string env var references (anchored with ^ and $). This means "prefix-$VAR-suffix" won't be resolved, which may surprise users coming from shell-like environments.

Suggestion: This is a reasonable design choice for security and simplicity, but it should be explicitly documented — either in the module docstring or as a comment near the pattern. Something like:

# Only full-string env var references are resolved (no inline interpolation).
# "prefix-$VAR" is NOT resolved; use the object format to construct values programmatically.


# Provider name to model class name — resolved via strands.models lazy __getattr__
PROVIDER_MAP: dict[str, str] = {
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Issue: PROVIDER_MAP maps provider names to string function names, which are then resolved via globals()[factory_name] on line 438. This pattern is fragile — it breaks silently if a function is renamed or removed, and bypasses static analysis tools and IDE navigation.

Suggestion: Map directly to function references instead:

PROVIDER_MAP: dict[str, Callable[[dict[str, Any]], Any]] = {
    "bedrock": _create_bedrock_model,
    "anthropic": _create_anthropic_model,
    ...
}

This requires moving PROVIDER_MAP below the factory function definitions, but it gives you type safety, IDE go-to-definition, and eliminates the globals() lookup.

"bedrock": "BedrockModel",
"anthropic": "AnthropicModel",
"openai": "OpenAIModel",
"gemini": "GeminiModel",
"ollama": "OllamaModel",
"litellm": "LiteLLMModel",
"mistral": "MistralModel",
"llamaapi": "LlamaAPIModel",
"llamacpp": "LlamaCppModel",
"sagemaker": "SageMakerAIModel",
"writer": "WriterModel",
"openai_responses": "OpenAIResponsesModel",
}


def _resolve_env_vars(value: Any) -> Any:
"""Recursively resolve environment variable references in config values.

String values matching ``$VAR_NAME`` or ``${VAR_NAME}`` are replaced with the
corresponding environment variable value. Dicts and lists are traversed recursively.

Args:
value: The value to resolve. Can be a string, dict, list, or any other type.

Returns:
The resolved value with environment variable references replaced.

Raises:
ValueError: If a referenced environment variable is not set.
"""
if isinstance(value, str):
match = _ENV_VAR_PATTERN.match(value)
if match:
var_name = match.group(1) or match.group(2)
env_value = os.environ.get(var_name)
if env_value is None:
raise ValueError(f"Environment variable '{var_name}' is not set")
return env_value
return value
if isinstance(value, dict):
return {k: _resolve_env_vars(v) for k, v in value.items()}
if isinstance(value, list):
return [_resolve_env_vars(item) for item in value]
return value


def _create_model_from_dict(model_config: dict[str, Any]) -> Any:
"""Create a Model instance from a provider config dict.

Routes the config to the appropriate model class based on the ``provider`` field,
then delegates to the class's ``from_dict`` method. All imports are lazy to avoid
requiring optional dependencies that are not installed.

Args:
model_config: Dict containing at least a ``provider`` key and provider-specific params.

Returns:
A configured Model instance for the specified provider.

Raises:
ValueError: If the provider name is not recognized.
ImportError: If the provider's optional dependencies are not installed.
"""
config = model_config.copy()
provider = config.pop("provider")

class_name = PROVIDER_MAP.get(provider)
if class_name is None:
supported = ", ".join(sorted(PROVIDER_MAP.keys()))
raise ValueError(f"Unknown model provider: '{provider}'. Supported providers: {supported}")

from .. import models

model_cls = getattr(models, class_name)
return model_cls.from_dict(config)


def config_to_agent(config: str | dict[str, Any], **kwargs: dict[str, Any]) -> Any:
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Issue: The type annotation for **kwargs is dict[str, Any], but **kwargs already captures keyword arguments as a dict — the annotation should be the value type, not the full dict type.

Suggestion:

def config_to_agent(config: str | dict[str, Any], **kwargs: Any) -> Any:

"""Create an Agent from a configuration file or dictionary.
Expand Down Expand Up @@ -83,6 +205,12 @@
Create agent from dictionary:
>>> config = {"model": "anthropic.claude-3-5-sonnet-20241022-v2:0", "tools": ["calculator"]}
>>> agent = config_to_agent(config)

Create agent with object model config:
>>> config = {
... "model": {"provider": "openai", "model_id": "gpt-4o", "client_args": {"api_key": "$OPENAI_API_KEY"}}
... }
>>> agent = config_to_agent(config)
"""
# Parse configuration
if isinstance(config, str):
Expand Down Expand Up @@ -114,11 +242,20 @@
raise ValueError(f"Configuration validation error at {error_path}: {e.message}") from e

# Prepare Agent constructor arguments
agent_kwargs = {}
agent_kwargs: dict[str, Any] = {}

# Handle model field — string vs object format
model_value = config_dict.get("model")
if isinstance(model_value, dict):
# Object format: resolve env vars and create Model instance via factory
resolved_config = _resolve_env_vars(model_value)
agent_kwargs["model"] = _create_model_from_dict(resolved_config)
elif model_value is not None:
# String format (backward compat): pass directly as model_id to Agent
agent_kwargs["model"] = model_value

# Map configuration keys to Agent constructor parameters
# Map remaining configuration keys to Agent constructor parameters
config_mapping = {
"model": "model",
"prompt": "system_prompt",
"tools": "tools",
"name": "name",
Expand Down
26 changes: 26 additions & 0 deletions src/strands/models/bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,32 @@ class BedrockConfig(TypedDict, total=False):
temperature: float | None
top_p: float | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "BedrockModel":
"""Create a BedrockModel from a configuration dictionary.

Handles extraction of ``region_name``, ``endpoint_url``, and conversion of
``boto_client_config`` from a plain dict to ``botocore.config.Config``.

Args:
config: Model configuration dictionary.

Returns:
A configured BedrockModel instance.
"""
kwargs: dict[str, Any] = {}

if "region_name" in config:
kwargs["region_name"] = config.pop("region_name")
if "endpoint_url" in config:
kwargs["endpoint_url"] = config.pop("endpoint_url")
if "boto_client_config" in config:
raw = config.pop("boto_client_config")
kwargs["boto_client_config"] = BotocoreConfig(**raw) if isinstance(raw, dict) else raw

kwargs.update(config)
return cls(**kwargs)

def __init__(
self,
*,
Expand Down
20 changes: 20 additions & 0 deletions src/strands/models/llamacpp.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,26 @@ class LlamaCppConfig(TypedDict, total=False):
model_id: str
params: dict[str, Any] | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "LlamaCppModel":
"""Create a LlamaCppModel from a configuration dictionary.

Handles extraction of ``base_url`` and ``timeout`` as separate constructor parameters.

Args:
config: Model configuration dictionary.

Returns:
A configured LlamaCppModel instance.
"""
kwargs: dict[str, Any] = {}
if "base_url" in config:
kwargs["base_url"] = config.pop("base_url")
if "timeout" in config:
kwargs["timeout"] = config.pop("timeout")
kwargs.update(config)
return cls(**kwargs)

def __init__(
self,
base_url: str = "http://localhost:8080",
Expand Down
22 changes: 22 additions & 0 deletions src/strands/models/mistral.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,28 @@ class MistralConfig(TypedDict, total=False):
top_p: float | None
stream: bool | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "MistralModel":
"""Create a MistralModel from a configuration dictionary.

Handles extraction of ``api_key`` and ``client_args`` as separate constructor parameters.

Args:
config: Model configuration dictionary.

Returns:
A configured MistralModel instance.
"""
api_key = config.pop("api_key", None)
client_args = config.pop("client_args", None)
kwargs: dict[str, Any] = {}
if api_key is not None:
kwargs["api_key"] = api_key
if client_args is not None:
kwargs["client_args"] = client_args
kwargs.update(config)
return cls(**kwargs)

def __init__(
self,
api_key: str | None = None,
Expand Down
23 changes: 23 additions & 0 deletions src/strands/models/model.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
"""Abstract base class for Agent model providers."""

from __future__ import annotations

import abc
import logging
from collections.abc import AsyncGenerator, AsyncIterable
Expand Down Expand Up @@ -51,6 +53,27 @@ def stateful(self) -> bool:
"""
return False

@classmethod
def from_dict(cls, config: dict[str, Any]) -> Model:
"""Create a Model instance from a configuration dictionary.

The default implementation extracts ``client_args`` (if present) and passes
all remaining keys as keyword arguments to the constructor. Subclasses with
non-standard constructor signatures should override this method.

Args:
config: Provider-specific configuration dictionary.

Returns:
A configured Model instance.
"""
client_args = config.pop("client_args", None)
kwargs: dict[str, Any] = {}
if client_args is not None:
kwargs["client_args"] = client_args
kwargs.update(config)
return cls(**kwargs)

@abc.abstractmethod
# pragma: no cover
def update_config(self, **model_config: Any) -> None:
Expand Down
21 changes: 21 additions & 0 deletions src/strands/models/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,27 @@ class OllamaConfig(TypedDict, total=False):
temperature: float | None
top_p: float | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "OllamaModel":
"""Create an OllamaModel from a configuration dictionary.

Handles extraction of ``host`` as a positional argument and mapping of
``client_args`` to the ``ollama_client_args`` constructor parameter.

Args:
config: Model configuration dictionary.

Returns:
A configured OllamaModel instance.
"""
host = config.pop("host", None)
client_args = config.pop("client_args", None)
kwargs: dict[str, Any] = {}
if client_args is not None:
kwargs["ollama_client_args"] = client_args
kwargs.update(config)
return cls(host, **kwargs)

def __init__(
self,
host: str | None,
Expand Down
21 changes: 21 additions & 0 deletions src/strands/models/sagemaker.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,27 @@ class SageMakerAIEndpointConfig(TypedDict, total=False):
target_variant: str | None | None
additional_args: dict[str, Any] | None

@classmethod
def from_dict(cls, config: dict[str, Any]) -> "SageMakerAIModel":
"""Create a SageMakerAIModel from a configuration dictionary.

Handles extraction of ``endpoint_config``, ``payload_config``, and conversion of
``boto_client_config`` from a plain dict to ``botocore.config.Config``.

Args:
config: Model configuration dictionary.

Returns:
A configured SageMakerAIModel instance.
"""
kwargs: dict[str, Any] = {}
kwargs["endpoint_config"] = config.pop("endpoint_config", {})
kwargs["payload_config"] = config.pop("payload_config", {})
if "boto_client_config" in config:
raw = config.pop("boto_client_config")
kwargs["boto_client_config"] = BotocoreConfig(**raw) if isinstance(raw, dict) else raw
return cls(**kwargs)

def __init__(
self,
endpoint_config: SageMakerAIEndpointConfig,
Expand Down
Loading
Loading