Skip to content
This repository was archived by the owner on Aug 5, 2025. It is now read-only.

Commit 4c4ce14

Browse files
committed
fix: mistral instrumentation
1 parent 89ddcfd commit 4c4ce14

File tree

2 files changed

+13
-13
lines changed

2 files changed

+13
-13
lines changed

literalai/helper.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@ def ensure_values_serializable(data):
1818
pass
1919

2020
try:
21-
from mistralai.models.chat_completion import ChatMessage
21+
from mistralai import UserMessage
2222

23-
if isinstance(data, ChatMessage):
23+
if isinstance(data, UserMessage):
2424
return filter_none_values(data.model_dump())
2525
except ImportError:
2626
pass

literalai/instrumentation/mistralai.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,7 @@ def process_delta(new_delta: DeltaMessage, message_completion: GenerationMessage
248248
if new_delta.tool_calls:
249249
if "tool_calls" not in message_completion:
250250
message_completion["tool_calls"] = []
251-
delta_tool_call = new_delta.tool_calls[0]
251+
delta_tool_call = new_delta.tool_calls[0] # type: ignore
252252
delta_function = delta_tool_call.function
253253
if not delta_function:
254254
return False
@@ -303,22 +303,22 @@ def streaming_response(
303303
token_count += 1
304304
elif generation and isinstance(generation, CompletionGeneration):
305305
if (
306-
len(chunk.choices) > 0
307-
and chunk.choices[0].message.content is not None
306+
len(chunk.data.choices) > 0
307+
and chunk.data.choices[0].delta.content is not None
308308
):
309309
if generation.tt_first_token is None:
310310
generation.tt_first_token = (
311311
time.time() - context["start"]
312312
) * 1000
313313
token_count += 1
314-
completion += chunk.choices[0].message.content
314+
completion += chunk.data.choices[0].delta.content
315315

316316
if (
317317
generation
318318
and getattr(chunk, "model", None)
319-
and generation.model != chunk.model
319+
and generation.model != chunk.data.model
320320
):
321-
generation.model = chunk.model
321+
generation.model = chunk.data.model
322322

323323
yield chunk
324324

@@ -415,22 +415,22 @@ async def async_streaming_response(
415415
token_count += 1
416416
elif generation and isinstance(generation, CompletionGeneration):
417417
if (
418-
len(chunk.choices) > 0
419-
and chunk.choices[0].message.content is not None
418+
len(chunk.data.choices) > 0
419+
and chunk.data.choices[0].delta is not None
420420
):
421421
if generation.tt_first_token is None:
422422
generation.tt_first_token = (
423423
time.time() - context["start"]
424424
) * 1000
425425
token_count += 1
426-
completion += chunk.choices[0].message.content
426+
completion += chunk.data.choices[0].delta.content or ""
427427

428428
if (
429429
generation
430430
and getattr(chunk, "model", None)
431-
and generation.model != chunk.model
431+
and generation.model != chunk.data.model
432432
):
433-
generation.model = chunk.model
433+
generation.model = chunk.data.model
434434

435435
yield chunk
436436

0 commit comments

Comments
 (0)