From 7f5d38632063c1ec38a10a96925e9a5f6a253e3f Mon Sep 17 00:00:00 2001 From: Mackenzie Zastrow Date: Tue, 15 Jul 2025 11:04:25 -0400 Subject: [PATCH] Update docs to reference 4.0 as the default model To guide them towards the new default model --- docs/examples/deploy_to_eks/README.md | 2 +- .../model-providers/amazon-bedrock.md | 28 +++++++++---------- .../concepts/model-providers/anthropic.md | 6 ++-- .../observability-evaluation/evaluation.md | 8 +++--- .../observability-evaluation/logs.md | 2 +- .../observability-evaluation/traces.md | 8 +++--- docs/user-guide/quickstart.md | 14 +++++----- 7 files changed, 34 insertions(+), 34 deletions(-) diff --git a/docs/examples/deploy_to_eks/README.md b/docs/examples/deploy_to_eks/README.md index f91a78a6..c519ec81 100644 --- a/docs/examples/deploy_to_eks/README.md +++ b/docs/examples/deploy_to_eks/README.md @@ -17,7 +17,7 @@ The example deploys a weather forecaster application that runs as a containerize - Either: - [Podman](https://podman.io/) installed and running - (or) [Docker](https://www.docker.com/) installed and running -- Amazon Bedrock Anthropic Claude 3.7 model enabled in your AWS environment +- Amazon Bedrock Anthropic Claude 4 model enabled in your AWS environment You'll need to enable model access in the Amazon Bedrock console following the [AWS documentation](https://docs.aws.amazon.com/bedrock/latest/userguide/model-access-modify.html) ## Project Structure diff --git a/docs/user-guide/concepts/model-providers/amazon-bedrock.md b/docs/user-guide/concepts/model-providers/amazon-bedrock.md index 5a4b761a..febc60f7 100644 --- a/docs/user-guide/concepts/model-providers/amazon-bedrock.md +++ b/docs/user-guide/concepts/model-providers/amazon-bedrock.md @@ -100,7 +100,7 @@ session = boto3.Session( # Create a Bedrock model with the custom session bedrock_model = BedrockModel( - model_id="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model_id="anthropic.claude-sonnet-4-20250514-v1:0", boto_session=session ) ``` @@ -109,7 +109,7 @@ For complete details on credential configuration and resolution, see the [boto3 ## Basic Usage -The [`BedrockModel`](../../../api-reference/models.md#strands.models.bedrock) provider is used by default when creating a basic Agent, and uses the [Claude 3.7 Sonnet](https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-anthropic-claude-37.html) model by default. This basic example creates an agent using this default setup: +The [`BedrockModel`](../../../api-reference/models.md#strands.models.bedrock) provider is used by default when creating a basic Agent, and uses the [Claude 4 Sonnet](https://aws.amazon.com/blogs/aws/claude-opus-4-anthropics-most-powerful-model-for-coding-is-now-in-amazon-bedrock/) model by default. This basic example creates an agent using this default setup: ```python from strands import Agent @@ -125,7 +125,7 @@ You can specify which Bedrock model to use by passing in the model ID string dir from strands import Agent # Create an agent with a specific model by passing the model ID string -agent = Agent(model="us.anthropic.claude-3-7-sonnet-20250219-v1:0") +agent = Agent(model="anthropic.claude-sonnet-4-20250514-v1:0") response = agent("Tell me about Amazon Bedrock.") ``` @@ -156,7 +156,7 @@ The [`BedrockModel`](../../../api-reference/models.md#strands.models.bedrock) su | Parameter | Description | Default | | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | -| [`model_id`](https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html) | The Bedrock model identifier | "us.anthropic.claude-3-7-sonnet-20250219-v1:0" | +| [`model_id`](https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html) | The Bedrock model identifier | "anthropic.claude-sonnet-4-20250514-v1:0" | | [`boto_session`](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html) | Boto Session to use when creating the Boto3 Bedrock Client | Boto Session with region: "us-west-2" | | [`boto_client_config`](https://botocore.amazonaws.com/v1/documentation/api/latest/reference/config.html) | Botocore Configuration used when creating the Boto3 Bedrock Client | - | | [`region_name`](https://docs.aws.amazon.com/general/latest/gr/bedrock.html) | AWS region to use for the Bedrock service | "us-west-2" | @@ -195,7 +195,7 @@ boto_config = BotocoreConfig( # Create a configured Bedrock model bedrock_model = BedrockModel( - model_id="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model_id="anthropic.claude-sonnet-4-20250514-v1:0", region_name="us-east-1", # Specify a different region than the default temperature=0.3, top_p=0.8, @@ -220,7 +220,7 @@ in order to use these models. Both modes provide the same event structure and fu ```python # Streaming model (default) streaming_model = BedrockModel( - model_id="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model_id="anthropic.claude-sonnet-4-20250514-v1:0", streaming=True, # This is the default ) @@ -243,7 +243,7 @@ from strands.models import BedrockModel # Create a Bedrock model that supports multimodal inputs bedrock_model = BedrockModel( - model_id="us.anthropic.claude-3-7-sonnet-20250219-v1:0" + model_id="anthropic.claude-sonnet-4-20250514-v1:0" ) agent = Agent(model=bedrock_model) @@ -278,7 +278,7 @@ from strands.models import BedrockModel # Using guardrails with BedrockModel bedrock_model = BedrockModel( - model_id="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model_id="anthropic.claude-sonnet-4-20250514-v1:0", guardrail_id="your-guardrail-id", guardrail_version="DRAFT", guardrail_trace="enabled", # Options: "enabled", "disabled", "enabled_full" @@ -320,7 +320,7 @@ from strands.models import BedrockModel # Using system prompt caching with BedrockModel bedrock_model = BedrockModel( - model_id="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model_id="anthropic.claude-sonnet-4-20250514-v1:0", cache_prompt="default" ) @@ -350,7 +350,7 @@ from strands_tools import calculator, current_time # Using tool caching with BedrockModel bedrock_model = BedrockModel( - model_id="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model_id="anthropic.claude-sonnet-4-20250514-v1:0", cache_tools="default" ) @@ -426,7 +426,7 @@ You can update the model configuration during runtime: ```python # Create the model with initial configuration bedrock_model = BedrockModel( - model_id="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model_id="anthropic.claude-sonnet-4-20250514-v1:0", temperature=0.7 ) @@ -478,7 +478,7 @@ from strands.models import BedrockModel # Create a Bedrock model with reasoning configuration bedrock_model = BedrockModel( - model_id="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model_id="anthropic.claude-sonnet-4-20250514-v1:0", additional_request_fields={ "thinking": { "type": "enabled", @@ -555,13 +555,13 @@ This typically indicates that the model requires Cross-Region Inference, as docu Instead of: ``` -anthropic.claude-3-7-sonnet-20250219-v1:0 +anthropic.claude-sonnet-4-20250514-v1:0 ``` Use: ``` -us.anthropic.claude-3-7-sonnet-20250219-v1:0 +us.anthropic.claude-sonnet-4-20250514-v1:0 ``` ## Related Resources diff --git a/docs/user-guide/concepts/model-providers/anthropic.md b/docs/user-guide/concepts/model-providers/anthropic.md index 6ca64c2f..47175c23 100644 --- a/docs/user-guide/concepts/model-providers/anthropic.md +++ b/docs/user-guide/concepts/model-providers/anthropic.md @@ -25,7 +25,7 @@ model = AnthropicModel( }, # **model_config max_tokens=1028, - model_id="claude-3-7-sonnet-20250219", + model_id="claude-sonnet-4-20250514", params={ "temperature": 0.7, } @@ -49,7 +49,7 @@ The `model_config` configures the underlying model selected for inference. The s | Parameter | Description | Example | Options | |------------|-------------|---------|---------| | `max_tokens` | Maximum number of tokens to generate before stopping | `1028` | [reference](https://docs.anthropic.com/en/api/messages#body-max-tokens) -| `model_id` | ID of a model to use | `claude-3-7-sonnet-20250219` | [reference](https://docs.anthropic.com/en/api/messages#body-model) +| `model_id` | ID of a model to use | `claude-sonnet-4-20250514` | [reference](https://docs.anthropic.com/en/api/messages#body-model) | `params` | Model specific parameters | `{"max_tokens": 1000, "temperature": 0.7}` | [reference](https://docs.anthropic.com/en/api/messages) ## Troubleshooting @@ -82,7 +82,7 @@ model = AnthropicModel( "api_key": "", }, max_tokens=1028, - model_id="claude-3-7-sonnet-20250219", + model_id="claude-sonnet-4-20250514", params={ "temperature": 0.7, } diff --git a/docs/user-guide/observability-evaluation/evaluation.md b/docs/user-guide/observability-evaluation/evaluation.md index 503c2206..5f66d8b5 100644 --- a/docs/user-guide/observability-evaluation/evaluation.md +++ b/docs/user-guide/observability-evaluation/evaluation.md @@ -68,7 +68,7 @@ from strands_tools import calculator # Create agent with specific configuration agent = Agent( - model="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model="us.anthropic.claude-sonnet-4-20250514-v1:0", system_prompt="You are a helpful assistant specialized in data analysis.", tools=[calculator] ) @@ -94,7 +94,7 @@ with open("test_cases.json", "r") as f: test_cases = json.load(f) # Create agent -agent = Agent(model="us.anthropic.claude-3-7-sonnet-20250219-v1:0") +agent = Agent(model="us.anthropic.claude-sonnet-4-20250514-v1:0") # Run tests and collect results results = [] @@ -138,7 +138,7 @@ agent = Agent(model="anthropic.claude-3-5-sonnet-20241022-v2:0") # Create an evaluator agent with a stronger model evaluator = Agent( - model="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model="us.anthropic.claude-sonnet-4-20250514-v1:0", system_prompt=""" You are an expert AI evaluator. Your job is to assess the quality of AI responses based on: 1. Accuracy - factual correctness of the response @@ -199,7 +199,7 @@ from strands import Agent from strands_tools import calculator, file_read, current_time # Create agent with multiple tools agent = Agent( - model="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model="us.anthropic.claude-sonnet-4-20250514-v1:0", tools=[calculator, file_read, current_time], record_direct_tool_call = True ) diff --git a/docs/user-guide/observability-evaluation/logs.md b/docs/user-guide/observability-evaluation/logs.md index caa903f1..2607744c 100644 --- a/docs/user-guide/observability-evaluation/logs.md +++ b/docs/user-guide/observability-evaluation/logs.md @@ -99,7 +99,7 @@ DEBUG | strands.event_loop.error_handler | message_index=<5> | found message wit Logs related to interactions with foundation models: ``` -DEBUG | strands.models.bedrock | config=<{'model_id': 'anthropic.claude-3-7-sonnet-20250219-v1:0'}> | initializing +DEBUG | strands.models.bedrock | config=<{'model_id': 'us.anthropic.claude-4-sonnet-20250219-v1:0'}> | initializing WARNING | strands.models.bedrock | bedrock threw context window overflow error DEBUG | strands.models.bedrock | Found blocked output guardrail. Redacting output. ``` diff --git a/docs/user-guide/observability-evaluation/traces.md b/docs/user-guide/observability-evaluation/traces.md index c8ff378c..45e6baaf 100644 --- a/docs/user-guide/observability-evaluation/traces.md +++ b/docs/user-guide/observability-evaluation/traces.md @@ -106,7 +106,7 @@ from strands import Agent # Option 1: Skip StrandsTelemetry if global tracer provider and/or meter provider are already configured # (your existing OpenTelemetry setup will be used automatically) agent = Agent( - model="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model="us.anthropic.claude-sonnet-4-20250514-v1:0", system_prompt="You are a helpful AI assistant" ) @@ -131,7 +131,7 @@ strands_telemetry.setup_otlp_exporter().setup_console_exporter() # Chaining sup # Create agent (tracing will be enabled automatically) agent = Agent( - model="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model="us.anthropic.claude-sonnet-4-20250514-v1:0", system_prompt="You are a helpful AI assistant" ) @@ -200,7 +200,7 @@ Strands traces include rich attributes that provide context for each operation: | `gen_ai.agent.name` | Name of the agent | | `gen_ai.user.message` | Formatted prompt sent to the model | | `gen_ai.assistant.message` | Formatted assistant prompt sent to the model | -| `gen_ai.request.model` | Model ID (e.g., "us.anthropic.claude-3-7-sonnet-20250219-v1:0") | +| `gen_ai.request.model` | Model ID (e.g., "us.anthropic.claude-sonnet-4-20250514-v1:0") | | `gen_ai.event.start_time` | When model invocation began | | `gen_ai.event.end_time` | When model invocation completed | | `gen_ai.choice` | Response from the model (may include tool calls) | @@ -340,7 +340,7 @@ strands_telemetry.setup_console_exporter() # Print traces to console # Create agent agent = Agent( - model="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model="us.anthropic.claude-sonnet-4-20250514-v1:0", system_prompt="You are a helpful AI assistant" ) diff --git a/docs/user-guide/quickstart.md b/docs/user-guide/quickstart.md index 011ce92a..8e1f870c 100644 --- a/docs/user-guide/quickstart.md +++ b/docs/user-guide/quickstart.md @@ -35,15 +35,15 @@ pip install strands-agents-tools strands-agents-builder ## Configuring Credentials -Strands supports many different model providers. By default, agents use the Amazon Bedrock model provider with the Claude 3.7 model. +Strands supports many different model providers. By default, agents use the Amazon Bedrock model provider with the Claude 4 model. -To use the examples in this guide, you'll need to configure your environment with AWS credentials that have permissions to invoke the Claude 3.7 model. You can set up your credentials in several ways: +To use the examples in this guide, you'll need to configure your environment with AWS credentials that have permissions to invoke the Claude 4 model. You can set up your credentials in several ways: 1. **Environment variables**: Set `AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`, and optionally `AWS_SESSION_TOKEN` 2. **AWS credentials file**: Configure credentials using `aws configure` CLI command 3. **IAM roles**: If running on AWS services like EC2, ECS, or Lambda, use IAM roles -Make sure your AWS credentials have the necessary permissions to access Amazon Bedrock and invoke the Claude 3.7 model. You'll need to enable model access in the Amazon Bedrock console following the [AWS documentation](https://docs.aws.amazon.com/bedrock/latest/userguide/model-access-modify.html). +Make sure your AWS credentials have the necessary permissions to access Amazon Bedrock and invoke the Claude 4 model. You'll need to enable model access in the Amazon Bedrock console following the [AWS documentation](https://docs.aws.amazon.com/bedrock/latest/userguide/model-access-modify.html). ## Project Setup @@ -170,7 +170,7 @@ agent("Hello!") ### Identifying a configured model -Strands defaults to the Bedrock model provider using Claude 3.7 Sonnet. The model your agent is using can be retrieved by accessing [`model.config`](../api-reference/models.md#strands.models.model.Model.get_config): +Strands defaults to the Bedrock model provider using Claude 4 Sonnet. The model your agent is using can be retrieved by accessing [`model.config`](../api-reference/models.md#strands.models.model.Model.get_config): ```python from strands import Agent @@ -178,7 +178,7 @@ from strands import Agent agent = Agent() print(agent.model.config) -# {'model_id': 'us.anthropic.claude-3-7-sonnet-20250219-v1:0'} +# {'model_id': 'us.anthropic.claude-sonnet-4-20250514-v1:0'} ``` You can specify a different model in two ways: @@ -194,7 +194,7 @@ The simplest way to specify a model is to pass the model ID string directly: from strands import Agent # Create an agent with a specific model by passing the model ID string -agent = Agent(model="us.anthropic.claude-3-7-sonnet-20250219-v1:0") +agent = Agent(model="anthropic.claude-sonnet-4-20250514-v1:0") ``` ### Amazon Bedrock (Default) @@ -208,7 +208,7 @@ from strands.models import BedrockModel # Create a BedrockModel bedrock_model = BedrockModel( - model_id="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + model_id="anthropic.claude-sonnet-4-20250514-v1:0", region_name="us-west-2", temperature=0.3, )