12
12
# See the License for the specific language governing permissions and
13
13
# limitations under the License.
14
14
15
+ import os
15
16
from collections import deque
16
17
from typing import List
18
+ from unittest .mock import MagicMock , patch
17
19
18
20
import pytest
21
+ from rai .agents .langchain import invoke_llm_with_tracing
19
22
from rai .agents .langchain .agent import LangChainAgent , newMessageBehaviorType
23
+ from rai .initialization import get_tracing_callbacks
20
24
21
25
22
26
@pytest .mark .parametrize (
@@ -39,3 +43,110 @@ def test_reduce_messages(
39
43
output_ = LangChainAgent ._apply_reduction_behavior (new_message_behavior , buffer )
40
44
assert output == output_
41
45
assert buffer == deque (out_buffer )
46
+
47
+
48
+ class TestTracingConfiguration :
49
+ """Test tracing configuration integration with langchain agents."""
50
+
51
+ def test_tracing_with_missing_config_file (self ):
52
+ """Test that tracing gracefully handles missing config.toml file in langchain context."""
53
+ # This should not crash even without config.toml
54
+ callbacks = get_tracing_callbacks ()
55
+ assert len (callbacks ) == 0
56
+
57
+ def test_tracing_with_config_file_present (self , test_config_toml ):
58
+ """Test that tracing works when config.toml is present in langchain context."""
59
+ config_path , cleanup = test_config_toml (
60
+ langfuse_enabled = True , langsmith_enabled = False
61
+ )
62
+
63
+ try :
64
+ # Mock environment variables to avoid actual API calls
65
+ with patch .dict (
66
+ os .environ ,
67
+ {
68
+ "LANGFUSE_PUBLIC_KEY" : "test_key" ,
69
+ "LANGFUSE_SECRET_KEY" : "test_secret" ,
70
+ },
71
+ ):
72
+ callbacks = get_tracing_callbacks (config_path = config_path )
73
+ # Should return 1 callback for langfuse
74
+ assert len (callbacks ) == 1
75
+ finally :
76
+ cleanup ()
77
+
78
+
79
+ class TestInvokeLLMWithTracing :
80
+ """Test the invoke_llm_with_tracing function."""
81
+
82
+ def test_invoke_llm_without_tracing (self ):
83
+ """Test that invoke_llm_with_tracing works when no tracing callbacks are available."""
84
+ # Mock LLM
85
+ mock_llm = MagicMock ()
86
+ mock_llm .invoke .return_value = "test response"
87
+
88
+ # Mock messages
89
+ mock_messages = ["test message" ]
90
+
91
+ # Mock get_tracing_callbacks to return empty list (no config.toml)
92
+ with patch (
93
+ "rai.agents.langchain.invocation_helpers.get_tracing_callbacks"
94
+ ) as mock_get_callbacks :
95
+ mock_get_callbacks .return_value = []
96
+
97
+ result = invoke_llm_with_tracing (mock_llm , mock_messages )
98
+
99
+ mock_llm .invoke .assert_called_once_with (mock_messages , config = None )
100
+ assert result == "test response"
101
+
102
+ def test_invoke_llm_with_tracing (self ):
103
+ """Test that invoke_llm_with_tracing works when tracing callbacks are available."""
104
+ # Mock LLM
105
+ mock_llm = MagicMock ()
106
+ mock_llm .invoke .return_value = "test response"
107
+
108
+ # Mock messages
109
+ mock_messages = ["test message" ]
110
+
111
+ # Mock get_tracing_callbacks to return some callbacks
112
+ with patch (
113
+ "rai.agents.langchain.invocation_helpers.get_tracing_callbacks"
114
+ ) as mock_get_callbacks :
115
+ mock_get_callbacks .return_value = ["tracing_callback" ]
116
+
117
+ _ = invoke_llm_with_tracing (mock_llm , mock_messages )
118
+
119
+ # Verify that the LLM was called with enhanced config
120
+ mock_llm .invoke .assert_called_once ()
121
+ call_args = mock_llm .invoke .call_args
122
+ assert call_args [0 ][0 ] == mock_messages
123
+ assert "callbacks" in call_args [1 ]["config" ]
124
+ assert "tracing_callback" in call_args [1 ]["config" ]["callbacks" ]
125
+
126
+ def test_invoke_llm_with_existing_config (self ):
127
+ """Test that invoke_llm_with_tracing preserves existing config."""
128
+ # Mock LLM
129
+ mock_llm = MagicMock ()
130
+ mock_llm .invoke .return_value = "test response"
131
+
132
+ # Mock messages
133
+ mock_messages = ["test message" ]
134
+
135
+ # Mock existing config
136
+ existing_config = {"callbacks" : ["existing_callback" ]}
137
+
138
+ # Mock get_tracing_callbacks to return some callbacks
139
+ with patch (
140
+ "rai.agents.langchain.invocation_helpers.get_tracing_callbacks"
141
+ ) as mock_get_callbacks :
142
+ mock_get_callbacks .return_value = ["tracing_callback" ]
143
+
144
+ _ = invoke_llm_with_tracing (mock_llm , mock_messages , existing_config )
145
+
146
+ # Verify that the LLM was called with enhanced config
147
+ mock_llm .invoke .assert_called_once ()
148
+ call_args = mock_llm .invoke .call_args
149
+ assert call_args [0 ][0 ] == mock_messages
150
+ assert "callbacks" in call_args [1 ]["config" ]
151
+ assert "existing_callback" in call_args [1 ]["config" ]["callbacks" ]
152
+ assert "tracing_callback" in call_args [1 ]["config" ]["callbacks" ]
0 commit comments