forked from mistralai/client-python
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest_chat_async.py
More file actions
118 lines (104 loc) · 3.79 KB
/
test_chat_async.py
File metadata and controls
118 lines (104 loc) · 3.79 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
import unittest.mock as mock
import pytest
from mistralai.async_client import MistralAsyncClient
from mistralai.models.chat_completion import (
ChatCompletionResponse,
ChatCompletionStreamResponse,
ChatMessage,
)
from .utils import (
mock_async_stream_response,
mock_chat_response_payload,
mock_chat_response_streaming_payload,
mock_response,
)
@pytest.fixture()
def client():
client = MistralAsyncClient()
client._client = mock.AsyncMock()
client._client.stream = mock.Mock()
return client
class TestAsyncChat:
@pytest.mark.asyncio
async def test_chat(self, client):
client._client.request.return_value = mock_response(
200,
mock_chat_response_payload(),
)
result = await client.chat(
model="mistral-small",
messages=[
ChatMessage(role="user", content="What is the best French cheese?")
],
)
client._client.request.assert_awaited_once_with(
"post",
"https://api.mistral.ai/v1/chat/completions",
headers={
"User-Agent": f"mistral-client-python/{client._version}",
"Accept": "application/json",
"Authorization": "Bearer None",
"Content-Type": "application/json",
},
json={
"model": "mistral-small",
"messages": [
{"role": "user", "content": "What is the best French cheese?"}
],
"safe_prompt": False,
"stream": False,
},
)
assert isinstance(
result, ChatCompletionResponse
), "Should return an ChatCompletionResponse"
assert len(result.choices) == 1
assert result.choices[0].index == 0
assert result.object == "chat.completion"
@pytest.mark.asyncio
async def test_chat_streaming(self, client):
client._client.stream.return_value = mock_async_stream_response(
200,
mock_chat_response_streaming_payload(),
)
result = client.chat_stream(
model="mistral-small",
messages=[
ChatMessage(role="user", content="What is the best French cheese?")
],
)
results = [r async for r in result]
client._client.stream.assert_called_once_with(
"post",
"https://api.mistral.ai/v1/chat/completions",
headers={
"Accept": "text/event-stream",
"User-Agent": f"mistral-client-python/{client._version}",
"Authorization": "Bearer None",
"Content-Type": "application/json",
},
json={
"model": "mistral-small",
"messages": [
{"role": "user", "content": "What is the best French cheese?"}
],
"safe_prompt": False,
"stream": True,
},
)
for i, result in enumerate(results):
if i == 0:
assert isinstance(
result, ChatCompletionStreamResponse
), "Should return an ChatCompletionStreamResponse"
assert len(result.choices) == 1
assert result.choices[0].index == 0
assert result.choices[0].delta.role == "assistant"
else:
assert isinstance(
result, ChatCompletionStreamResponse
), "Should return an ChatCompletionStreamResponse"
assert len(result.choices) == 1
assert result.choices[0].index == i - 1
assert result.choices[0].delta.content == f"stream response {i-1}"
assert result.object == "chat.completion.chunk"