Skip to content

Commit e918df8

Browse files
committed
Merge branch 'development' of ssh://git.biggo.com:222/Funmula/dive-mcp-host into development
2 parents 0d31c10 + f75588a commit e918df8

File tree

3 files changed

+56
-1
lines changed

3 files changed

+56
-1
lines changed

dive_mcp_host/host/conf/llm.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
from typing import Annotated, Literal, Self
44

5+
from httpx import AsyncClient, Client
56
from pydantic import (
67
BaseModel,
78
BeforeValidator,
@@ -80,6 +81,7 @@ class LLMConfiguration(BaseModel):
8081
"""Configuration for the LLM model."""
8182

8283
base_url: str | None = Field(default=None, alias="baseURL")
84+
skip_tls_verify: bool | None = Field(default=None)
8385
temperature: float | None = Field(default=0)
8486
top_p: float | None = Field(default=None)
8587

@@ -90,6 +92,8 @@ def to_load_model_kwargs(self) -> dict:
9092
kwargs = {}
9193
if self.base_url:
9294
kwargs["base_url"] = self.base_url
95+
if self.skip_tls_verify:
96+
kwargs["skip_tls_verify"] = self.skip_tls_verify
9397
if self.temperature:
9498
kwargs["temperature"] = self.temperature
9599
if self.top_p:
@@ -125,6 +129,16 @@ def to_load_model_kwargs(self: LLMConfig) -> dict:
125129
remove_keys = []
126130
if self.model_provider == "openai" and self.model == "o3-mini":
127131
remove_keys.extend(["temperature", "top_p"])
132+
if kwargs.get("skip_tls_verify"):
133+
if self.model_provider == "ollama":
134+
kwargs.update({"client_kwargs": {"verify": False}})
135+
elif self.model_provider == "openai":
136+
kwargs.update(
137+
{
138+
"http_client": Client(verify=False), # noqa: S501
139+
"http_async_client": AsyncClient(verify=False), # noqa: S501
140+
}
141+
)
128142
for key in remove_keys:
129143
kwargs.pop(key, None)
130144
return to_snake_dict(kwargs)

dive_mcp_host/host/tools/server_session_store.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ class _SessionStoreItem:
4343

4444
async def waiting_loop(self) -> None:
4545
while True:
46-
await asyncio.sleep(1)
46+
await asyncio.sleep(60)
4747
if (
4848
time.time() - self.active_ts > MAX_IDLE_TIME
4949
and len(self.client_tasks) == 0

tests/test_models.py

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
from ssl import CERT_NONE
12
from typing import Any
23

34
import pytest
@@ -189,3 +190,43 @@ def test_model_single_config_validate() -> None:
189190
assert config.azure_deployment == "fake"
190191
assert config.api_key == SecretStr("fake")
191192
assert config.api_version == "2023-03-15-preview"
193+
194+
195+
def test_openai_skip_tls_verify() -> None:
196+
"""Test the OpenAI skip TLS verify."""
197+
config = LLMConfig(
198+
model="gpt-4o",
199+
model_provider="openai",
200+
api_key=SecretStr("fake"),
201+
configuration=LLMConfiguration(
202+
skip_tls_verify=True,
203+
),
204+
)
205+
model = load_model(
206+
config.model_provider, config.model, **(config.to_load_model_kwargs())
207+
)
208+
assert (
209+
model.client._client._client._transport._pool._ssl_context.verify_mode # type: ignore
210+
== CERT_NONE
211+
)
212+
assert (
213+
model.async_client._client._client._transport._pool._ssl_context.verify_mode # type: ignore
214+
== CERT_NONE
215+
)
216+
217+
218+
def test_ollama_skip_tls_verify() -> None:
219+
"""Test the Ollama skip TLS verify."""
220+
config = LLMConfig(
221+
model="llama3.1",
222+
model_provider="ollama",
223+
api_key=SecretStr("fake"),
224+
configuration=LLMConfiguration(
225+
skip_tls_verify=True,
226+
),
227+
)
228+
model = load_model(
229+
config.model_provider, config.model, **(config.to_load_model_kwargs())
230+
)
231+
232+
assert model._client._client._transport._pool._ssl_context.verify_mode == CERT_NONE # type: ignore

0 commit comments

Comments
 (0)