Skip to content

feat: enhance log for llm providers and settings #275

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Mar 22, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion apps/agent-tars/src/main/ipcRoutes/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ export const llmRoute = t.router({
messages,
requestId: input.requestId,
});
logger.info('[llmRoute.askLLMText] response', response);
return response;
}),

Expand All @@ -65,7 +66,6 @@ export const llmRoute = t.router({
.handle(async ({ input }) => {
logger.info('[llmRoute.askLLMTool] input', input);
const messages = input.messages.map((msg) => new Message(msg));
const llm = createLLM(currentLLMConfigRef.current);
logger.info(
'[llmRoute.askLLMTool] Current LLM Config',
maskSensitiveData(currentLLMConfigRef.current),
Expand All @@ -74,13 +74,15 @@ export const llmRoute = t.router({
'[llmRoute.askLLMTool] Current Search Config',
maskSensitiveData(SettingStore.get('search')),
);
const llm = createLLM(currentLLMConfigRef.current);
logger.info('[llmRoute.askLLMTool] tools', extractToolNames(input.tools));
const response = await llm.askTool({
messages,
tools: input.tools,
mcpServerKeys: input.mcpServerKeys,
requestId: input.requestId,
});
logger.info('[llmRoute.askLLMTool] response', response);
return response;
}),

Expand All @@ -104,6 +106,7 @@ export const llmRoute = t.router({
const windows = BrowserWindow.getAllWindows();
try {
const stream = llm.askLLMTextStream({ messages, requestId });
logger.info('[llmRoute.askLLMTextStream] stream', !!stream);

for await (const chunk of stream) {
if (!windows.length) {
Expand Down
25 changes: 23 additions & 2 deletions apps/agent-tars/src/main/ipcRoutes/settings.ts
Original file line number Diff line number Diff line change
@@ -1,25 +1,46 @@
import { AppSettings } from '@agent-infra/shared';
import { SettingStore } from '@main/store/setting';
import { initIpc } from '@ui-tars/electron-ipc/main';
import { logger } from '@main/utils/logger';
import { maskSensitiveData } from '@main/utils/maskSensitiveData';

const t = initIpc.create();

export const settingsRoute = t.router({
getSettings: t.procedure.input<void>().handle(async () => {
return SettingStore.getStore();
const settings = SettingStore.getStore();
// To many calls, hide it to avoid to many reduncdant log.
// logger.info(
// '[settingsRoute.getSettings] result',
// maskSensitiveData(settings),
// );
return settings;
}),
getFileSystemSettings: t.procedure.input<void>().handle(async () => {
return SettingStore.get('fileSystem');
const settings = SettingStore.get('fileSystem');
logger.info(
'[settingsRoute.getFileSystemSettings] result',
maskSensitiveData(settings),
);
return settings;
}),
updateAppSettings: t.procedure
.input<AppSettings>()
.handle(async ({ input }) => {
logger.info(
'[settingsRoute.updateAppSettings]',
maskSensitiveData(input),
);
SettingStore.setStore(input);
return true;
}),
updateFileSystemSettings: t.procedure
.input<AppSettings['fileSystem']>()
.handle(async ({ input }) => {
logger.info(
'[settingsRoute.updateFileSystemSettings]',
maskSensitiveData(input),
);
SettingStore.set('fileSystem', input);
return true;
}),
Expand Down
6 changes: 6 additions & 0 deletions apps/agent-tars/src/main/llmProvider/ProviderFactory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { AzureOpenAIProvider } from './providers/AzureOpenAIProvider';
import { GeminiProvider } from './providers/GeminiProvider';
import { MistralProvider } from './providers/MistralProvider';
import { logger } from '@main/utils/logger';
import { maskSensitiveData } from '@main/utils/maskSensitiveData';

// Define model prefixes that will be used to determine the provider
const MODEL_PREFIXES = {
Expand Down Expand Up @@ -80,6 +81,11 @@ export class ProviderFactory {
providerName: string,
config: LLMConfig,
): LLMProvider {
logger.info(
'[createProviderByName]',
providerName,
maskSensitiveData(config),
);
switch (providerName.toLowerCase()) {
case 'openai':
return new OpenAIProvider(config);
Expand Down
1 change: 1 addition & 0 deletions apps/agent-tars/src/main/llmProvider/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ export class LLM {

constructor(config: LLMConfig = {}) {
this.config = config;
// FIXME: config.configName does not exist !!!
// Create provider instance based on the model or explicit provider setting
this.provider = ProviderFactory.createProvider(config, config.configName);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import { Message, ToolCall } from '@agent-infra/shared';
import { ChatCompletionTool } from 'openai/resources';
import { BaseProvider } from './BaseProvider';
import { LLMConfig, LLMResponse, ToolChoice } from '../interfaces/LLMProvider';
import { logger } from '@main/utils/logger';
import { maskSensitiveData } from '@main/utils/maskSensitiveData';

/**
* Helper to convert OpenAI tool format to Anthropic tool format
Expand Down Expand Up @@ -42,6 +44,7 @@ export class AnthropicProvider extends BaseProvider {
);
}

logger.info('[AnthropicProvider]', maskSensitiveData({ apiKey, baseURL }));
this.client = new Anthropic({
apiKey,
...(baseURL && { baseURL }),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ import { ChatCompletionTool } from 'openai/resources';
import { BaseProvider } from './BaseProvider';
import { LLMConfig, LLMResponse, ToolChoice } from '../interfaces/LLMProvider';

import { logger } from '@main/utils/logger';
import { maskSensitiveData } from '@main/utils/maskSensitiveData';

/**
* Azure OpenAI provider implementation
*/
Expand Down Expand Up @@ -37,6 +40,11 @@ export class AzureOpenAIProvider extends BaseProvider {
);
}

logger.info(
'[AzureOpenAIProvider]',
maskSensitiveData({ apiKey, apiVersion, endpoint }),
);

this.client = new AzureOpenAI({ apiKey, apiVersion, endpoint });
this.model = config.model || process.env.AZURE_OPENAI_MODEL || 'gpt-4o';
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ import {
} from '@google/generative-ai';
import { BaseProvider } from './BaseProvider';
import { LLMConfig, LLMResponse, ToolChoice } from '../interfaces/LLMProvider';
import { logger } from '@main/utils/logger';
import { maskSensitiveData } from '@main/utils/maskSensitiveData';

/**
* Google Gemini provider implementation
Expand All @@ -28,12 +30,19 @@ export class GeminiProvider extends BaseProvider {
);
}

logger.info('[GeminiProvider]', maskSensitiveData({ apiKey }));

// Initialize client with the actual SDK
this.client = new GoogleGenerativeAI(apiKey);

// Set default model or use provided one
this.model =
config.model || process.env.GEMINI_DEFAULT_MODEL || 'gemini-1.5-pro';

logger.info(
'[GeminiProvider]',
maskSensitiveData({ apiKey, model: this.model }),
);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ import { Mistral } from '@mistralai/mistralai';
import { BaseProvider } from './BaseProvider';
import { LLMConfig, LLMResponse, ToolChoice } from '../interfaces/LLMProvider';
import { ChatCompletionResponse } from '@mistralai/mistralai/models/components';
import { logger } from '@main/utils/logger';
import { maskSensitiveData } from '@main/utils/maskSensitiveData';

/**
* Mistral provider implementation
Expand Down Expand Up @@ -37,6 +39,11 @@ export class MistralProvider extends BaseProvider {
config.model ||
process.env.MISTRAL_DEFAULT_MODEL ||
'mistral-large-latest';

logger.info(
'[MistralProvider]',
maskSensitiveData({ apiKey, model: this.model }),
);
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import { Message, ToolCall } from '@agent-infra/shared';
import { ChatCompletionTool } from 'openai/resources';
import { BaseProvider } from './BaseProvider';
import { LLMConfig, LLMResponse, ToolChoice } from '../interfaces/LLMProvider';
import { logger } from '@main/utils/logger';
import { maskSensitiveData } from '@main/utils/maskSensitiveData';

/**
* OpenAI provider implementation
Expand All @@ -24,6 +26,7 @@ export class OpenAIProvider extends BaseProvider {
);
}

logger.info('[OpenAIProvider]', maskSensitiveData({ apiKey, baseURL }));
this.client = new OpenAI({ apiKey, baseURL });
this.model = config.model || process.env.OPENAI_DEFAULT_MODEL || 'gpt-4o';
}
Expand Down
6 changes: 5 additions & 1 deletion apps/agent-tars/src/main/store/setting.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import {
AppSettings,
} from '@agent-infra/shared';
import { logger } from '@main/utils/logger';
import { maskSensitiveData } from '@main/utils/maskSensitiveData';

const DEFAULT_MODEL_SETTINGS: ModelSettings = {
provider: ModelProvider.OPENAI,
Expand Down Expand Up @@ -49,7 +50,10 @@ export class SettingStore {

SettingStore.instance.onDidAnyChange((newValue, oldValue) => {
logger.info(
`SettingStore: ${JSON.stringify(oldValue)} changed to ${JSON.stringify(newValue)}`,
`[SettingStore] Before: ${JSON.stringify(maskSensitiveData(oldValue!))}`,
);
logger.info(
`[SettingStore] After: ${JSON.stringify(maskSensitiveData(newValue!))}`,
);
// Notify that value updated
BrowserWindow.getAllWindows().forEach((win) => {
Expand Down