Skip to content
This repository was archived by the owner on Aug 5, 2025. It is now read-only.

Commit 9b2159e

Browse files
committed
fix: update mistralai instrumentation for 1.0.0
1 parent 89e966c commit 9b2159e

File tree

3 files changed

+43
-39
lines changed

3 files changed

+43
-39
lines changed

literalai/instrumentation/mistralai.py

Lines changed: 32 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -11,79 +11,84 @@
1111

1212
from literalai.context import active_steps_var, active_thread_var
1313
from literalai.helper import ensure_values_serializable
14-
from literalai.observability.generation import GenerationMessage, CompletionGeneration, ChatGeneration, GenerationType
14+
from literalai.observability.generation import (
15+
ChatGeneration,
16+
CompletionGeneration,
17+
GenerationMessage,
18+
GenerationType,
19+
)
1520
from literalai.wrappers import AfterContext, BeforeContext, wrap_all
1621

17-
REQUIREMENTS = ["mistralai>=0.2.0"]
22+
REQUIREMENTS = ["mistralai>=1.0.0"]
1823

1924
APIS_TO_WRAP = [
2025
{
21-
"module": "mistralai.client",
22-
"object": "MistralClient",
23-
"method": "chat",
26+
"module": "mistralai",
27+
"object": "Mistral",
28+
"method": "chat.complete",
2429
"metadata": {
2530
"type": GenerationType.CHAT,
2631
},
2732
"async": False,
2833
},
2934
{
30-
"module": "mistralai.client",
31-
"object": "MistralClient",
32-
"method": "chat_stream",
35+
"module": "mistralai",
36+
"object": "Mistral",
37+
"method": "chat.stream",
3338
"metadata": {
3439
"type": GenerationType.CHAT,
3540
},
3641
"async": False,
3742
},
3843
{
39-
"module": "mistralai.async_client",
40-
"object": "MistralAsyncClient",
41-
"method": "chat",
44+
"module": "mistralai",
45+
"object": "Mistral",
46+
"method": "chat.complete_async",
4247
"metadata": {
4348
"type": GenerationType.CHAT,
4449
},
4550
"async": True,
4651
},
4752
{
48-
"module": "mistralai.async_client",
49-
"object": "MistralAsyncClient",
50-
"method": "chat_stream",
53+
"module": "mistralai",
54+
"object": "Mistral",
55+
"method": "chat.stream_async",
5156
"metadata": {
5257
"type": GenerationType.CHAT,
5358
},
5459
"async": True,
5560
},
5661
{
57-
"module": "mistralai.client",
58-
"object": "MistralClient",
59-
"method": "completion",
62+
"module": "mistralai",
63+
"object": "Mistral",
64+
"method": "fim.complete",
6065
"metadata": {
6166
"type": GenerationType.COMPLETION,
6267
},
6368
"async": False,
6469
},
6570
{
66-
"module": "mistralai.client",
67-
"object": "MistralClient",
68-
"method": "completion_stream",
71+
"module": "mistralai",
72+
"object": "Mistral",
73+
"method": "fim.stream",
6974
"metadata": {
7075
"type": GenerationType.COMPLETION,
7176
},
7277
"async": False,
7378
},
7479
{
75-
"module": "mistralai.async_client",
76-
"object": "MistralAsyncClient",
77-
"method": "completion",
80+
"module": "mistralai",
81+
"object": "Mistral",
82+
"method": "fim.complete_async",
7883
"metadata": {
7984
"type": GenerationType.COMPLETION,
8085
},
8186
"async": True,
8287
},
8388
{
84-
"module": "mistralai.async_client",
85-
"object": "MistralAsyncClient",
86-
"method": "completion_stream",
89+
"module": "mistralai",
90+
"object": "Mistral",
91+
"method": "fim.stream_async",
8792
"metadata": {
8893
"type": GenerationType.COMPLETION,
8994
},
@@ -239,7 +244,7 @@ async def before(context: BeforeContext, *args, **kwargs):
239244

240245
return before
241246

242-
from mistralai.models.chat_completion import DeltaMessage
247+
from mistralai import DeltaMessage
243248

244249
def process_delta(new_delta: DeltaMessage, message_completion: GenerationMessage):
245250
if new_delta.tool_calls:

requirements-dev.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,4 @@ mypy
88
langchain
99
llama-index
1010
pytest_httpx
11-
mistralai < 1.0.0
11+
mistralai

tests/e2e/test_mistralai.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,11 @@
33
from asyncio import sleep
44

55
import pytest
6-
from mistralai.async_client import MistralAsyncClient
7-
from mistralai.client import MistralClient
6+
from mistralai import Mistral
87
from pytest_httpx import HTTPXMock
98

109
from literalai.client import LiteralClient
11-
from literalai.observability.generation import CompletionGeneration, ChatGeneration
10+
from literalai.observability.generation import ChatGeneration, CompletionGeneration
1211

1312

1413
@pytest.fixture
@@ -63,13 +62,13 @@ async def test_chat(self, client: "LiteralClient", httpx_mock: "HTTPXMock"):
6362
},
6463
}
6564
)
66-
mai_client = MistralClient(api_key="j3s4V1z4")
65+
mai_client = Mistral(api_key="j3s4V1z4")
6766
thread_id = None
6867

6968
@client.thread
7069
def main():
7170
# https://docs.mistral.ai/api/#operation/createChatCompletion
72-
mai_client.chat(
71+
mai_client.chat.complete(
7372
model="open-mistral-7b",
7473
messages=[
7574
{
@@ -124,13 +123,13 @@ async def test_completion(self, client: "LiteralClient", httpx_mock: "HTTPXMock"
124123
},
125124
)
126125

127-
mai_client = MistralClient(api_key="j3s4V1z4")
126+
mai_client = Mistral(api_key="j3s4V1z4")
128127
thread_id = None
129128

130129
@client.thread
131130
def main():
132131
# https://docs.mistral.ai/api/#operation/createFIMCompletion
133-
mai_client.completion(
132+
mai_client.fim.complete(
134133
model="codestral-2405",
135134
prompt="1+1=",
136135
temperature=0,
@@ -183,13 +182,13 @@ async def test_async_chat(self, client: "LiteralClient", httpx_mock: "HTTPXMock"
183182
},
184183
)
185184

186-
mai_client = MistralAsyncClient(api_key="j3s4V1z4")
185+
mai_client = Mistral(api_key="j3s4V1z4")
187186
thread_id = None
188187

189188
@client.thread
190189
async def main():
191190
# https://docs.mistral.ai/api/#operation/createChatCompletion
192-
await mai_client.chat(
191+
await mai_client.chat.complete_async(
193192
model="open-mistral-7b",
194193
messages=[
195194
{
@@ -246,13 +245,13 @@ async def test_async_completion(
246245
},
247246
)
248247

249-
mai_client = MistralAsyncClient(api_key="j3s4V1z4")
248+
mai_client = Mistral(api_key="j3s4V1z4")
250249
thread_id = None
251250

252251
@client.thread
253252
async def main():
254253
# https://docs.mistral.ai/api/#operation/createFIMCompletion
255-
await mai_client.completion(
254+
await mai_client.fim.complete_async(
256255
model="codestral-2405",
257256
prompt="1+1=",
258257
temperature=0,

0 commit comments

Comments
 (0)