Skip to content

Commit

Permalink
Styling and library updates
Browse files Browse the repository at this point in the history
  • Loading branch information
bruvduroiu committed Nov 27, 2023
1 parent 5823473 commit 171ad8c
Show file tree
Hide file tree
Showing 9 changed files with 57 additions and 60 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,6 @@ venv/
.env*.local
.env
mac.env

# Code coverage history
.coverage
1 change: 1 addition & 0 deletions .python-version
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3.11
44 changes: 1 addition & 43 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ pydantic = "^1.8.2"
openai = "^0.28.1"
cohere = "^4.32"
numpy = "^1.26.2"
scipy = "^1.11.4"


[tool.poetry.group.dev.dependencies]
Expand Down
1 change: 1 addition & 0 deletions tests/encoders/test_base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import pytest

from semantic_router.encoders import BaseEncoder


Expand Down
11 changes: 8 additions & 3 deletions tests/encoders/test_cohere.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import pytest
import cohere
import pytest

from semantic_router.encoders import CohereEncoder


Expand All @@ -12,7 +13,9 @@ def cohere_encoder(mocker):
class TestCohereEncoder:
def test_initialization_with_api_key(self, cohere_encoder):
assert cohere_encoder.client is not None, "Client should be initialized"
assert cohere_encoder.name == "embed-english-v3.0", "Default name not set correctly"
assert (
cohere_encoder.name == "embed-english-v3.0"
), "Default name not set correctly"

def test_initialization_without_api_key(self, mocker, monkeypatch):
monkeypatch.delenv("COHERE_API_KEY", raising=False)
Expand All @@ -27,7 +30,9 @@ def test_call_method(self, cohere_encoder, mocker):

result = cohere_encoder(["test"])
assert isinstance(result, list), "Result should be a list"
assert all(isinstance(sublist, list) for sublist in result), "Each item in result should be a list"
assert all(
isinstance(sublist, list) for sublist in result
), "Each item in result should be a list"
cohere_encoder.client.embed.assert_called_once()

def test_call_with_uninitialized_client(self, mocker):
Expand Down
35 changes: 26 additions & 9 deletions tests/encoders/test_openai.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import os

import pytest
import openai
import pytest
from openai.error import RateLimitError

from semantic_router.encoders import OpenAIEncoder


Expand All @@ -24,16 +25,22 @@ def test_initialization_without_api_key(self, mocker, monkeypatch):
OpenAIEncoder(name="test-engine")

def test_call_method_success(self, openai_encoder, mocker):
mocker.patch("openai.Embedding.create", return_value={"data": [{"embedding": [0.1, 0.2, 0.3]}]})
mocker.patch(
"openai.Embedding.create",
return_value={"data": [{"embedding": [0.1, 0.2, 0.3]}]},
)

result = openai_encoder(["test"])
assert isinstance(result, list), "Result should be a list"
assert len(result) == 1 and len(result[0]) == 3, "Result list size is incorrect"

def test_call_method_rate_limit_error__raises_value_error_after_max_retries(self, openai_encoder, mocker):
def test_call_method_rate_limit_error__raises_value_error_after_max_retries(
self, openai_encoder, mocker
):
mocker.patch("semantic_router.encoders.openai.sleep")
mocker.patch(
"openai.Embedding.create", side_effect=RateLimitError(message="rate limit exceeded", http_status=429)
"openai.Embedding.create",
side_effect=RateLimitError(message="rate limit exceeded", http_status=429),
)

with pytest.raises(ValueError):
Expand All @@ -45,7 +52,9 @@ def test_call_method_failure(self, openai_encoder, mocker):
with pytest.raises(ValueError):
openai_encoder(["test"])

def test_call_method_rate_limit_error__exponential_backoff_single_retry(self, openai_encoder, mocker):
def test_call_method_rate_limit_error__exponential_backoff_single_retry(
self, openai_encoder, mocker
):
mock_sleep = mocker.patch("semantic_router.encoders.openai.sleep")
mocker.patch(
"openai.Embedding.create",
Expand All @@ -59,7 +68,9 @@ def test_call_method_rate_limit_error__exponential_backoff_single_retry(self, op

mock_sleep.assert_called_once_with(1) # 2**0

def test_call_method_rate_limit_error__exponential_backoff_multiple_retries(self, openai_encoder, mocker):
def test_call_method_rate_limit_error__exponential_backoff_multiple_retries(
self, openai_encoder, mocker
):
mock_sleep = mocker.patch("semantic_router.encoders.openai.sleep")
mocker.patch(
"openai.Embedding.create",
Expand All @@ -76,9 +87,13 @@ def test_call_method_rate_limit_error__exponential_backoff_multiple_retries(self
mock_sleep.assert_any_call(1) # 2**0
mock_sleep.assert_any_call(2) # 2**1

def test_call_method_rate_limit_error__exponential_backoff_max_retries_exceeded(self, openai_encoder, mocker):
def test_call_method_rate_limit_error__exponential_backoff_max_retries_exceeded(
self, openai_encoder, mocker
):
mock_sleep = mocker.patch("semantic_router.encoders.openai.sleep")
mocker.patch("openai.Embedding.create", side_effect=RateLimitError("rate limit exceeded"))
mocker.patch(
"openai.Embedding.create", side_effect=RateLimitError("rate limit exceeded")
)

with pytest.raises(ValueError):
openai_encoder(["sample text"])
Expand All @@ -90,7 +105,9 @@ def test_call_method_rate_limit_error__exponential_backoff_max_retries_exceeded(
mock_sleep.assert_any_call(8) # 2**3
mock_sleep.assert_any_call(16) # 2**4

def test_call_method_rate_limit_error__exponential_backoff_successful(self, openai_encoder, mocker):
def test_call_method_rate_limit_error__exponential_backoff_successful(
self, openai_encoder, mocker
):
mock_sleep = mocker.patch("semantic_router.encoders.openai.sleep")
mocker.patch(
"openai.Embedding.create",
Expand Down
2 changes: 1 addition & 1 deletion tests/test_layer.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import pytest

from semantic_router.encoders import BaseEncoder, CohereEncoder, OpenAIEncoder
from semantic_router.schema import Decision
from semantic_router.layer import DecisionLayer # Replace with the actual module name
from semantic_router.schema import Decision


def mock_encoder_call(utterances):
Expand Down
19 changes: 16 additions & 3 deletions tests/test_schema.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
import pytest
from semantic_router.schema import Decision, Encoder, EncoderType, OpenAIEncoder, CohereEncoder, SemanticSpace

from semantic_router.schema import (
CohereEncoder,
Decision,
Encoder,
EncoderType,
OpenAIEncoder,
SemanticSpace,
)


class TestEncoderDataclass:
Expand All @@ -25,7 +33,10 @@ def test_encoder_initialization_huggingface(self):

def test_encoder_call_method(self, mocker):
mocker.patch.dict("os.environ", {"OPENAI_API_KEY": "test"})
mocker.patch("semantic_router.encoders.openai.OpenAIEncoder.__call__", return_value=[0.1, 0.2, 0.3])
mocker.patch(
"semantic_router.encoders.openai.OpenAIEncoder.__call__",
return_value=[0.1, 0.2, 0.3],
)
encoder = Encoder(type="openai", name="test-engine")
result = encoder(["test"])
assert result == [0.1, 0.2, 0.3]
Expand All @@ -38,7 +49,9 @@ def test_semanticspace_initialization(self):
assert semantic_space.decisions == []

def test_semanticspace_add_decision(self):
decision = Decision(name="test", utterances=["hello", "hi"], description="greeting")
decision = Decision(
name="test", utterances=["hello", "hi"], description="greeting"
)
semantic_space = SemanticSpace()
semantic_space.add(decision)

Expand Down

0 comments on commit 171ad8c

Please sign in to comment.