id | str | "anthropic.claude-3-5-sonnet-20241022-v2:0" | The id of the AWS Bedrock Claude model to use |
name | str | "BedrockClaude" | The name of the model |
provider | str | "AWS" | The provider of the model |
max_tokens | Optional[int] | 4096 | Maximum number of tokens to generate in the chat completion |
thinking | Optional[Dict[str, Any]] | None | Configuration for the thinking (reasoning) process |
temperature | Optional[float] | None | Controls randomness in the model’s output |
stop_sequences | Optional[List[str]] | None | A list of strings that the model should stop generating text at |
top_p | Optional[float] | None | Controls diversity via nucleus sampling |
top_k | Optional[int] | None | Controls diversity via top-k sampling |
cache_system_prompt | Optional[bool] | False | Whether to cache the system prompt for improved performance |
extended_cache_time | Optional[bool] | False | Whether to use extended cache time (1 hour instead of default) |
request_params | Optional[Dict[str, Any]] | None | Additional parameters to include in the request |
aws_region | Optional[str] | None | The AWS region to use (defaults to AWS_REGION env var) |
aws_access_key_id | Optional[str] | None | AWS access key ID (defaults to AWS_ACCESS_KEY_ID env var) |
aws_secret_access_key | Optional[str] | None | AWS secret access key (defaults to AWS_SECRET_ACCESS_KEY env var) |
aws_session_token | Optional[str] | None | AWS session token (defaults to AWS_SESSION_TOKEN env var) |
aws_profile | Optional[str] | None | AWS profile to use (defaults to AWS_PROFILE env var) |
client_params | Optional[Dict[str, Any]] | None | Additional parameters for client configuration |