|
| 1 | +import pytest |
| 2 | +import unittest.mock as mock |
| 3 | +from mistralai.async_client import MistralAsyncClient |
| 4 | +from mistralai.models.chat_completion import ChatCompletionResponse, ChatMessage, ChatCompletionStreamResponse |
| 5 | +from .utils import mock_response, mock_async_stream_response, mock_chat_response_payload, mock_chat_response_streaming_payload |
| 6 | + |
| 7 | +@pytest.fixture() |
| 8 | +def client(): |
| 9 | + client = MistralAsyncClient() |
| 10 | + client._client = mock.AsyncMock() |
| 11 | + client._client.stream = mock.Mock() |
| 12 | + return client |
| 13 | + |
| 14 | + |
| 15 | +class TestAsyncChat: |
| 16 | + @pytest.mark.asyncio |
| 17 | + async def test_chat(self, client): |
| 18 | + client._client.request.return_value = mock_response( |
| 19 | + 200, |
| 20 | + mock_chat_response_payload(), |
| 21 | + ) |
| 22 | + |
| 23 | + result = await client.chat( |
| 24 | + model="mistral-small", |
| 25 | + messages=[ChatMessage(role="user", content="What is the best French cheese?")], |
| 26 | + ) |
| 27 | + |
| 28 | + client._client.request.assert_awaited_once_with( |
| 29 | + "post", |
| 30 | + "https://api.mistral.ai/v1/chat/completions", |
| 31 | + headers={ |
| 32 | + "Accept": "application/json", |
| 33 | + "Authorization": "Bearer None", |
| 34 | + "Content-Type": "application/json", |
| 35 | + }, |
| 36 | + json={'model': 'mistral-small', 'messages': [{'role': 'user', 'content': 'What is the best French cheese?'}], 'safe_prompt': False, 'stream': False}, |
| 37 | + ) |
| 38 | + |
| 39 | + |
| 40 | + assert isinstance( |
| 41 | + result, ChatCompletionResponse |
| 42 | + ), "Should return an ChatCompletionResponse" |
| 43 | + assert len(result.choices) == 1 |
| 44 | + assert result.choices[0].index == 0 |
| 45 | + assert result.object == "chat.completion" |
| 46 | + |
| 47 | + @pytest.mark.asyncio |
| 48 | + async def test_chat_streaming(self, client): |
| 49 | + client._client.stream.return_value = mock_async_stream_response( |
| 50 | + 200, |
| 51 | + mock_chat_response_streaming_payload(), |
| 52 | + ) |
| 53 | + |
| 54 | + result = client.chat_stream( |
| 55 | + model="mistral-small", |
| 56 | + messages=[ChatMessage(role="user", content="What is the best French cheese?")], |
| 57 | + ) |
| 58 | + |
| 59 | + results = [r async for r in result] |
| 60 | + |
| 61 | + client._client.stream.assert_called_once_with( |
| 62 | + "post", |
| 63 | + "https://api.mistral.ai/v1/chat/completions", |
| 64 | + headers={ |
| 65 | + "Accept": "application/json", |
| 66 | + "Authorization": "Bearer None", |
| 67 | + "Content-Type": "application/json", |
| 68 | + }, |
| 69 | + json={'model': 'mistral-small', 'messages': [{'role': 'user', 'content': 'What is the best French cheese?'}], 'safe_prompt': False, 'stream': True}, |
| 70 | + ) |
| 71 | + |
| 72 | + for i, result in enumerate(results): |
| 73 | + if i == 0: |
| 74 | + assert isinstance( |
| 75 | + result, ChatCompletionStreamResponse |
| 76 | + ), "Should return an ChatCompletionStreamResponse" |
| 77 | + assert len(result.choices) == 1 |
| 78 | + assert result.choices[0].index == 0 |
| 79 | + assert result.choices[0].delta.role == "assistant" |
| 80 | + else: |
| 81 | + assert isinstance( |
| 82 | + result, ChatCompletionStreamResponse |
| 83 | + ), "Should return an ChatCompletionStreamResponse" |
| 84 | + assert len(result.choices) == 1 |
| 85 | + assert result.choices[0].index == i-1 |
| 86 | + assert result.choices[0].delta.content == f"stream response {i-1}" |
| 87 | + assert result.object == "chat.completion.chunk" |
0 commit comments