Skip to content

Commit

Permalink
Adds Schema tests + 100% coverage
Browse files Browse the repository at this point in the history
  • Loading branch information
bruvduroiu committed Nov 27, 2023
1 parent d1694f6 commit 5823473
Show file tree
Hide file tree
Showing 4 changed files with 123 additions and 3 deletions.
1 change: 1 addition & 0 deletions semantic_router/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ class Decision(BaseModel):
class EncoderType(Enum):
OPENAI = "openai"
COHERE = "cohere"
HUGGINGFACE = "huggingface"


@dataclass
Expand Down
68 changes: 65 additions & 3 deletions tests/encoders/test_openai.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import os

import pytest
import openai
from semantic_router.encoders import OpenAIEncoder
from openai.error import RateLimitError
from semantic_router.encoders import OpenAIEncoder


@pytest.fixture
Expand All @@ -29,8 +30,8 @@ def test_call_method_success(self, openai_encoder, mocker):
assert isinstance(result, list), "Result should be a list"
assert len(result) == 1 and len(result[0]) == 3, "Result list size is incorrect"

@pytest.mark.skip(reason="Currently quite a slow test")
def test_call_method_rate_limit_error(self, openai_encoder, mocker):
def test_call_method_rate_limit_error__raises_value_error_after_max_retries(self, openai_encoder, mocker):
mocker.patch("semantic_router.encoders.openai.sleep")
mocker.patch(
"openai.Embedding.create", side_effect=RateLimitError(message="rate limit exceeded", http_status=429)
)
Expand All @@ -43,3 +44,64 @@ def test_call_method_failure(self, openai_encoder, mocker):

with pytest.raises(ValueError):
openai_encoder(["test"])

def test_call_method_rate_limit_error__exponential_backoff_single_retry(self, openai_encoder, mocker):
mock_sleep = mocker.patch("semantic_router.encoders.openai.sleep")
mocker.patch(
"openai.Embedding.create",
side_effect=[
RateLimitError("rate limit exceeded"),
{"data": [{"embedding": [1, 2, 3]}]},
],
)

openai_encoder(["sample text"])

mock_sleep.assert_called_once_with(1) # 2**0

def test_call_method_rate_limit_error__exponential_backoff_multiple_retries(self, openai_encoder, mocker):
mock_sleep = mocker.patch("semantic_router.encoders.openai.sleep")
mocker.patch(
"openai.Embedding.create",
side_effect=[
RateLimitError("rate limit exceeded"),
RateLimitError("rate limit exceeded"),
{"data": [{"embedding": [1, 2, 3]}]},
],
)

openai_encoder(["sample text"])

assert mock_sleep.call_count == 2
mock_sleep.assert_any_call(1) # 2**0
mock_sleep.assert_any_call(2) # 2**1

def test_call_method_rate_limit_error__exponential_backoff_max_retries_exceeded(self, openai_encoder, mocker):
mock_sleep = mocker.patch("semantic_router.encoders.openai.sleep")
mocker.patch("openai.Embedding.create", side_effect=RateLimitError("rate limit exceeded"))

with pytest.raises(ValueError):
openai_encoder(["sample text"])

assert mock_sleep.call_count == 5 # Assuming 5 retries
mock_sleep.assert_any_call(1) # 2**0
mock_sleep.assert_any_call(2) # 2**1
mock_sleep.assert_any_call(4) # 2**2
mock_sleep.assert_any_call(8) # 2**3
mock_sleep.assert_any_call(16) # 2**4

def test_call_method_rate_limit_error__exponential_backoff_successful(self, openai_encoder, mocker):
mock_sleep = mocker.patch("semantic_router.encoders.openai.sleep")
mocker.patch(
"openai.Embedding.create",
side_effect=[
RateLimitError("rate limit exceeded"),
RateLimitError("rate limit exceeded"),
{"data": [{"embedding": [1, 2, 3]}]},
],
)

embeddings = openai_encoder(["sample text"])

assert mock_sleep.call_count == 2
assert embeddings == [[1, 2, 3]]
9 changes: 9 additions & 0 deletions tests/test_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,11 @@ def mock_encoder_call(utterances):
return [mock_responses.get(u, [0, 0, 0]) for u in utterances]


@pytest.fixture
def base_encoder():
return BaseEncoder(name="test-encoder")


@pytest.fixture
def cohere_encoder(mocker):
mocker.patch.object(CohereEncoder, "__call__", side_effect=mock_encoder_call)
Expand Down Expand Up @@ -102,5 +107,9 @@ def test_pass_threshold(self, openai_encoder):
assert not decision_layer._pass_threshold([], 0.5)
assert decision_layer._pass_threshold([0.6, 0.7], 0.5)

def test_failover_similarity_threshold(self, base_encoder):
decision_layer = DecisionLayer(encoder=base_encoder)
assert decision_layer.similarity_threshold == 0.82


# Add more tests for edge cases and error handling as needed.
48 changes: 48 additions & 0 deletions tests/test_schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import pytest
from semantic_router.schema import Decision, Encoder, EncoderType, OpenAIEncoder, CohereEncoder, SemanticSpace


class TestEncoderDataclass:
def test_encoder_initialization_openai(self, mocker):
mocker.patch.dict("os.environ", {"OPENAI_API_KEY": "test"})
encoder = Encoder(type="openai", name="test-engine")
assert encoder.type == EncoderType.OPENAI
assert isinstance(encoder.model, OpenAIEncoder)

def test_encoder_initialization_cohere(self, mocker):
mocker.patch.dict("os.environ", {"COHERE_API_KEY": "test"})
encoder = Encoder(type="cohere", name="test-engine")
assert encoder.type == EncoderType.COHERE
assert isinstance(encoder.model, CohereEncoder)

def test_encoder_initialization_unsupported_type(self):
with pytest.raises(ValueError):
Encoder(type="unsupported", name="test-engine")

def test_encoder_initialization_huggingface(self):
with pytest.raises(NotImplementedError):
Encoder(type="huggingface", name="test-engine")

def test_encoder_call_method(self, mocker):
mocker.patch.dict("os.environ", {"OPENAI_API_KEY": "test"})
mocker.patch("semantic_router.encoders.openai.OpenAIEncoder.__call__", return_value=[0.1, 0.2, 0.3])
encoder = Encoder(type="openai", name="test-engine")
result = encoder(["test"])
assert result == [0.1, 0.2, 0.3]


class TestSemanticSpaceDataclass:
def test_semanticspace_initialization(self):
semantic_space = SemanticSpace()
assert semantic_space.id == ""
assert semantic_space.decisions == []

def test_semanticspace_add_decision(self):
decision = Decision(name="test", utterances=["hello", "hi"], description="greeting")
semantic_space = SemanticSpace()
semantic_space.add(decision)

assert len(semantic_space.decisions) == 1
assert semantic_space.decisions[0].name == "test"
assert semantic_space.decisions[0].utterances == ["hello", "hi"]
assert semantic_space.decisions[0].description == "greeting"

0 comments on commit 5823473

Please sign in to comment.