Skip to content

Commit 95de451

Browse files
committed
Merge pull request 'chore: gpt-5 not support topP' (#460) from spec_config into development
Reviewed-on: https://git.biggo.com/Funmula/dive-mcp-host/pulls/460
2 parents cab43db + a3a979c commit 95de451

File tree

2 files changed

+16
-8
lines changed

2 files changed

+16
-8
lines changed

dive_mcp_host/host/conf/llm.py

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -161,15 +161,19 @@ def temperature_top_p(self) -> Self:
161161
):
162162
self.configuration.top_p = None
163163

164-
if (
165-
"gpt-5" in self.model
166-
and self.configuration
167-
and (temperature := self.configuration.temperature)
168-
):
169-
if temperature > 0:
164+
if "gpt-5" in self.model and self.configuration:
165+
temperature = self.configuration.temperature
166+
top_p = self.configuration.top_p
167+
168+
if temperature and temperature > 0:
170169
self.configuration.temperature = 1
171-
else:
170+
elif temperature == 0:
172171
self.configuration.temperature = None
172+
173+
# gpt 5 is not supported for top_p
174+
if top_p is not None:
175+
self.configuration.top_p = None
176+
173177
return self
174178

175179

tests/test_models.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -304,11 +304,12 @@ def test_gpt_5_temperature() -> None:
304304
assert "temperature" not in kwargs
305305

306306
simple_2 = raw_config.copy()
307-
simple_2["configuration"] = {"temperature": 0.5}
307+
simple_2["configuration"] = {"temperature": 0.5, "top_p": 0.5}
308308
llm_config = LLMConfig.model_validate(simple_2)
309309
kwargs = llm_config.to_load_model_kwargs()
310310
assert "temperature" in kwargs
311311
assert kwargs["temperature"] == 1
312+
assert "top_p" not in kwargs
312313

313314
simple_3 = raw_config.copy()
314315
simple_3["configuration"] = {"temperature": 0}
@@ -323,6 +324,7 @@ def test_gpt_5_temperature() -> None:
323324
kwargs = llm_config.to_load_model_kwargs()
324325
assert "temperature" in kwargs
325326
assert kwargs["temperature"] == 1
327+
assert "top_p" not in kwargs
326328

327329
# test general llm config
328330
simple_5 = simple_2.copy()
@@ -331,3 +333,5 @@ def test_gpt_5_temperature() -> None:
331333
kwargs = llm_config.to_load_model_kwargs()
332334
assert "temperature" in kwargs
333335
assert kwargs["temperature"] == 0.5
336+
assert "top_p" in kwargs
337+
assert kwargs["top_p"] == 0.5

0 commit comments

Comments
 (0)