oai.bedrock
Create a compatible client for the Amazon Bedrock Converse API.
Example usage:
Install the boto3
package by running pip install --upgrade boto3
.
import autogen
config_list = [ { "api_type": "bedrock", "model": "meta.llama3-1-8b-instruct-v1:0", "aws_region": "us-west-2", "aws_access_key": "", "aws_secret_key": "", "price" : [0.003, 0.015] } ]
assistant = autogen.AssistantAgent("assistant", llm_config={"config_list": config_list})
BedrockClient
class BedrockClient()
Client for Amazon's Bedrock Converse API.
__init__
def __init__(**kwargs: Any)
Initialises BedrockClient for Amazon's Bedrock Converse API
message_retrieval
def message_retrieval(response)
Retrieve the messages from the response.
parse_custom_params
def parse_custom_params(params: Dict[str, Any])
Parses custom parameters for logic in this client class
parse_params
def parse_params(
params: Dict[str, Any]) -> tuple[Dict[str, Any], Dict[str, Any]]
Loads the valid parameters required to invoke Bedrock Converse Returns a tuple of (base_params, additional_params)
create
def create(params)
Run Amazon Bedrock inference and return AutoGen response
cost
def cost(response: ChatCompletion) -> float
Calculate the cost of the response.
get_usage
@staticmethod
def get_usage(response) -> Dict
Get the usage of tokens and their cost information.
extract_system_messages
def extract_system_messages(messages: List[dict]) -> List
Extract the system messages from the list of messages.
Arguments:
messages
list[dict] - List of messages.
Returns:
List[SystemMessage]
- List of System messages.
oai_messages_to_bedrock_messages
def oai_messages_to_bedrock_messages(
messages: List[Dict[str, Any]], has_tools: bool,
supports_system_prompts: bool) -> List[Dict]
Convert messages from OAI format to Bedrock format. We correct for any specific role orders and types, etc. AWS Bedrock requires messages to alternate between user and assistant roles. This function ensures that the messages are in the correct order and format for Bedrock by inserting "Please continue" messages as needed. This is the same method as the one in the Autogen Anthropic client
parse_image
def parse_image(image_url: str) -> Tuple[bytes, str]
Try to get the raw data from an image url.
Ref: https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_ImageSource.html returns a tuple of (Image Data, Content Type)
format_tool_calls
def format_tool_calls(content)
Converts Converse API response tool calls to AutoGen format
convert_stop_reason_to_finish_reason
def convert_stop_reason_to_finish_reason(
stop_reason: str
) -> Literal["stop", "length", "tool_calls", "content_filter"]
Converts Bedrock finish reasons to our finish reasons, according to OpenAI:
- stop: if the model hit a natural stop point or a provided stop sequence,
- length: if the maximum number of tokens specified in the request was reached,
- content_filter: if content was omitted due to a flag from our content filters,
- tool_calls: if the model called a tool
calculate_cost
def calculate_cost(input_tokens: int, output_tokens: int,
model_id: str) -> float
Calculate the cost of the completion using the Bedrock pricing.