Skip to content

Commit

Permalink
add unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
SmartManoj committed Feb 14, 2025
1 parent 59b946a commit 597ec09
Show file tree
Hide file tree
Showing 2 changed files with 72 additions and 0 deletions.
35 changes: 35 additions & 0 deletions libs/aws/tests/unit_tests/chat_models/test_bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

"""Test chat model integration."""

import os
from contextlib import nullcontext
from typing import Any, Callable, Dict, Literal, Type, cast

Expand Down Expand Up @@ -472,3 +473,37 @@ def test__get_provider(model_id, provider, expected_provider, expectation) -> No
llm = ChatBedrock(model_id=model_id, provider=provider, region_name="us-west-2")
with expectation:
assert llm._get_provider() == expected_provider


def test_chat_bedrock_different_regions() -> None:
regions = ["us-east-1", "us-west-2", "ap-south-2"]
for region in regions:
llm = ChatBedrock(model_id="anthropic.claude-3-sonnet-20240229-v1:0", region_name=region)
assert llm.region_name == region


def test_chat_bedrock_environment_variable() -> None:
regions = ["us-east-1", "us-west-2", "ap-south-2"]
for region in regions:
os.environ["AWS_REGION"] = region
llm = ChatBedrock(model_id="anthropic.claude-3-sonnet-20240229-v1:0")
assert llm.region_name == region


def test_chat_bedrock_scenarios() -> None:
scenarios = [
{"model_id": "anthropic.claude-3-sonnet-20240229-v1:0", "temperature": 0.5},
{"model_id": "anthropic.claude-3-sonnet-20240229-v1:0", "max_tokens": 50},
{
"model_id": "anthropic.claude-3-sonnet-20240229-v1:0",
"temperature": 0.5,
"max_tokens": 50,
},
]
for scenario in scenarios:
llm = ChatBedrock(region_name="us-west-2", **scenario)
assert llm.model_id == scenario["model_id"]
if "temperature" in scenario:
assert llm.temperature == scenario["temperature"]
if "max_tokens" in scenario:
assert llm.max_tokens == scenario["max_tokens"]
37 changes: 37 additions & 0 deletions libs/aws/tests/unit_tests/chat_models/test_bedrock_converse.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Test chat model integration."""

import base64
import os
from typing import Dict, List, Tuple, Type, Union, cast

import pytest
Expand Down Expand Up @@ -502,3 +503,39 @@ def test__extract_response_metadata() -> None:
}
response_metadata = _extract_response_metadata(response)
assert response_metadata["metrics"]["latencyMs"] == [191]


def test_chat_bedrock_converse_different_regions() -> None:
regions = ["us-east-1", "us-west-2", "ap-south-2"]
for region in regions:
llm = ChatBedrockConverse(
model="anthropic.claude-3-sonnet-20240229-v1:0", region_name=region
)
assert llm.region_name == region


def test_chat_bedrock_converse_environment_variable() -> None:
regions = ["us-east-1", "us-west-2", "ap-south-2"]
for region in regions:
os.environ["AWS_REGION"] = region
llm = ChatBedrockConverse(model="anthropic.claude-3-sonnet-20240229-v1:0")
assert llm.region_name == region


def test_chat_bedrock_converse_scenarios() -> None:
scenarios = [
{"model": "anthropic.claude-3-sonnet-20240229-v1:0", "temperature": 0.5},
{"model": "anthropic.claude-3-sonnet-20240229-v1:0", "max_tokens": 50},
{
"model": "anthropic.claude-3-sonnet-20240229-v1:0",
"temperature": 0.5,
"max_tokens": 50,
},
]
for scenario in scenarios:
llm = ChatBedrockConverse(region_name="us-west-2", **scenario)
assert llm.model_id == scenario["model"]
if "temperature" in scenario:
assert llm.temperature == scenario["temperature"]
if "max_tokens" in scenario:
assert llm.max_tokens == scenario["max_tokens"]

0 comments on commit 597ec09

Please sign in to comment.