Skip to content

Commit 5e71b0d

Browse files
authored
Allow empty choices list (#26)
1 parent 2740e1f commit 5e71b0d

File tree

2 files changed

+4
-19
lines changed

2 files changed

+4
-19
lines changed

any_llm_client/clients/openai.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ class OneStreamingChoice(pydantic.BaseModel):
8686

8787

8888
class ChatCompletionsStreamingEvent(pydantic.BaseModel):
89-
choices: typing.Annotated[list[OneStreamingChoice], annotated_types.MinLen(1)]
89+
choices: list[OneStreamingChoice]
9090

9191

9292
class OneNotStreamingChoiceMessage(pydantic.BaseModel):
@@ -269,7 +269,8 @@ async def _iter_response_chunks(self, response: httpx.Response) -> typing.AsyncI
269269
_handle_validation_error(content=event.data.encode(), original_error=validation_error)
270270

271271
if not (
272-
(validated_delta := validated_response.choices[0].delta)
272+
(validated_choices := validated_response.choices)
273+
and (validated_delta := validated_choices[0].delta)
273274
and (validated_delta.content or validated_delta.reasoning_content)
274275
):
275276
continue

tests/test_openai_client.py

Lines changed: 1 addition & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,7 @@ async def test_ok(self, faker: faker.Faker, func_request: LLMFuncRequest) -> Non
9191
+ ChatCompletionsStreamingEvent(choices=[OneStreamingChoice(delta=one_message)]).model_dump_json()
9292
for one_message in generated_messages
9393
)
94+
+ f"\n\ndata: {ChatCompletionsStreamingEvent(choices=[]).model_dump_json()}"
9495
+ f"\n\ndata: [DONE]\n\ndata: {faker.pystr()}\n\n"
9596
)
9697
response: typing.Final = httpx.Response(
@@ -104,23 +105,6 @@ async def test_ok(self, faker: faker.Faker, func_request: LLMFuncRequest) -> Non
104105

105106
assert result == expected_result
106107

107-
async def test_fails_without_alternatives(self) -> None:
108-
response_content: typing.Final = (
109-
f"data: {ChatCompletionsStreamingEvent.model_construct(choices=[]).model_dump_json()}\n\n"
110-
)
111-
response: typing.Final = httpx.Response(
112-
200,
113-
headers={"Content-Type": "text/event-stream"},
114-
content=response_content,
115-
)
116-
client: typing.Final = any_llm_client.get_client(
117-
OpenAIConfigFactory.build(),
118-
transport=httpx.MockTransport(lambda _: response),
119-
)
120-
121-
with pytest.raises(LLMResponseValidationError):
122-
await consume_llm_message_chunks(client.stream_llm_message_chunks(**LLMFuncRequestFactory.build()))
123-
124108

125109
class TestOpenAILLMErrors:
126110
@pytest.mark.parametrize("stream", [True, False])

0 commit comments

Comments
 (0)