Skip to content

Commit

Permalink
✅ add unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ajndkr committed Nov 24, 2023
1 parent 60deda3 commit 8791721
Show file tree
Hide file tree
Showing 2 changed files with 163 additions and 0 deletions.
104 changes: 104 additions & 0 deletions tests/adapters/openai/test_openai_responses.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
from typing import Type
from unittest.mock import AsyncMock, MagicMock, call

import pytest
from starlette.types import Send

from lanarky.adapters.openai.resources import ChatCompletionResource
from lanarky.adapters.openai.responses import (
HTTPStatusDetail,
StreamingResponse,
status,
)
from lanarky.events import Events, ServerSentEvent, ensure_bytes


@pytest.fixture
def resource() -> Type[ChatCompletionResource]:
return MagicMock(spec=ChatCompletionResource)


@pytest.mark.asyncio
async def test_stream_response_successful(
send: Send, resource: Type[ChatCompletionResource]
):
resource.stream_response.__aiter__ = AsyncMock(return_value="")

response = StreamingResponse(
resource=resource,
messages=[],
)

await response.stream_response(send)

resource.stream_response.assert_called_once()

expected_calls = [
call(
{
"type": "http.response.start",
"status": response.status_code,
"headers": response.raw_headers,
}
),
call(
{
"type": "http.response.body",
"body": b"",
"more_body": False,
}
),
]

send.assert_has_calls(expected_calls, any_order=False)


@pytest.mark.asyncio
async def test_stream_response_error(
send: Send, resource: Type[ChatCompletionResource]
):
resource.stream_response = AsyncMock(side_effect=Exception("Some error occurred"))

response = StreamingResponse(
resource=resource,
messages=[],
)

await response.stream_response(send)

resource.stream_response.assert_called_once()

expected_calls = [
call(
{
"type": "http.response.start",
"status": response.status_code,
"headers": response.raw_headers,
}
),
call(
{
"type": "http.response.body",
"body": ensure_bytes(
ServerSentEvent(
data=dict(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=HTTPStatusDetail.INTERNAL_SERVER_ERROR,
),
event=Events.ERROR,
),
None,
),
"more_body": True,
}
),
call(
{
"type": "http.response.body",
"body": b"",
"more_body": False,
}
),
]

send.assert_has_calls(expected_calls, any_order=False)
59 changes: 59 additions & 0 deletions tests/adapters/openai/test_openai_routing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
from unittest.mock import create_autospec

import pytest

from lanarky.adapters.openai.resources import ChatCompletionResource
from lanarky.adapters.openai.routing import OpenAIAPIRoute, OpenAIAPIRouter


def test_langchain_api_router():
router = OpenAIAPIRouter()

assert isinstance(router, OpenAIAPIRouter)
assert isinstance(router.routes, list)
assert router.route_class == OpenAIAPIRoute

def mock_endpoint():
pass

with pytest.raises(TypeError):
router.add_api_websocket_route(
"/test",
endpoint=mock_endpoint,
name="test_ws_route",
)

def mock_chain_factory():
return create_autospec(ChatCompletionResource)

router.add_api_websocket_route(
"/test",
endpoint=mock_chain_factory,
name="test_ws_route",
)

assert len(router.routes) == 1
assert router.routes[0].path == "/test"
assert router.routes[0].name == "test_ws_route"


def test_langchain_api_route():
def mock_endpoint():
pass

with pytest.raises(TypeError):
route = OpenAIAPIRoute(
"/test",
endpoint=mock_endpoint,
)

def mock_chain_factory():
return create_autospec(ChatCompletionResource)

route = OpenAIAPIRoute(
"/test",
endpoint=mock_chain_factory,
)

assert isinstance(route, OpenAIAPIRoute)
assert route.path == "/test"

0 comments on commit 8791721

Please sign in to comment.