Skip to content

Commit b021a56

Browse files
committed
cleaning some examples a bit
1 parent dfc3827 commit b021a56

9 files changed

Lines changed: 22 additions & 9 deletions

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,8 @@ poetry run python chat_no_streaming.py
3232
### Using poetry shell
3333

3434
```bash
35-
cd examples
3635
poetry shell
36+
cd examples
3737

3838
>> python chat_no_streaming.py
3939
```

examples/async_chat_no_streaming.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,10 @@ async def main():
1515
model=model,
1616
messages=[ChatMessage(role="user", content="What is the best French cheese?")],
1717
)
18-
print(chat_response)
18+
19+
print(chat_response.choices[0].message.content)
20+
21+
await client.close()
1922

2023

2124
if __name__ == "__main__":

examples/async_chat_with_streaming.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,17 @@ async def main():
1111

1212
client = MistralAsyncClient(api_key=api_key)
1313

14+
print("Chat response:")
1415
async for chunk in client.chat_stream(
1516
model=model,
1617
messages=[ChatMessage(role="user", content="What is the best French cheese?")],
1718
):
18-
print(chunk)
19+
if chunk.choices[0].delta.content is not None:
20+
print(chunk.choices[0].delta.content, end="")
21+
22+
print("\n")
23+
24+
await client.close()
1925

2026

2127
if __name__ == "__main__":

examples/chat_no_streaming.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def main():
1414
model=model,
1515
messages=[ChatMessage(role="user", content="What is the best French cheese?")],
1616
)
17-
print(chat_response)
17+
print(chat_response.choices[0].message.content)
1818

1919

2020
if __name__ == "__main__":

examples/chat_with_streaming.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,8 @@ def main():
1414
model=model,
1515
messages=[ChatMessage(role="user", content="What is the best French cheese?")],
1616
):
17-
print(chunk)
17+
if chunk.choices[0].delta.content is not None:
18+
print(chunk.choices[0].delta.content, end="")
1819

1920

2021
if __name__ == "__main__":

src/mistralai/async_client.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -174,6 +174,9 @@ def __init__(
174174
timeout=timeout,
175175
)
176176

177+
async def close(self) -> None:
178+
await self._backend.close()
179+
177180
async def _request(
178181
self,
179182
method: str,
@@ -266,7 +269,6 @@ async def chat_stream(
266269

267270
async with async_response as response:
268271
async for line in response.content:
269-
self._logger.debug(f"Received line: {line.decode()}")
270272
if line == b"\n":
271273
continue
272274

src/mistralai/client.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
1+
import os
12
import posixpath
23
from json import JSONDecodeError
34
from typing import Any, Dict, Iterable, List, Optional, Union
4-
import os
5+
56
import orjson
67
import requests
78
from requests import Response

src/mistralai/client_base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def _make_chat_request(
3535
request_data: Dict[str, Any] = {
3636
"model": model,
3737
"messages": [msg.model_dump() for msg in messages],
38-
"safe_mode": safe_mode,
38+
"safe_prompt": safe_mode,
3939
}
4040
if temperature is not None:
4141
request_data["temperature"] = temperature

src/mistralai/constants.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,4 @@
22

33
RETRY_STATUS_CODES = {429, 500, 502, 503, 504}
44

5-
ENDPOINT = "http://api.mistral.ai"
5+
ENDPOINT = "https://api.mistral.ai"

0 commit comments

Comments
 (0)