Skip to content
Merged
Show file tree
Hide file tree
Changes from 28 commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
fb90c9e
Add memory setting
wenzhengjiang Aug 29, 2025
32e42c5
Support recent conversation in memory
wenzhengjiang Aug 29, 2025
a51bbbe
Add conversation summary to recent conversations
wenzhengjiang Aug 30, 2025
1ee76ae
Load recent conversation to prompt
wenzhengjiang Aug 30, 2025
6bb8528
Implement user insights
wenzhengjiang Aug 31, 2025
0d86d43
Fix some bugs
wenzhengjiang Aug 31, 2025
9e89ec9
Refactor
wenzhengjiang Sep 1, 2025
16c6d98
Merge master into memory branch
wenzhengjiang Sep 4, 2025
52f7ad7
Support returning multiple insights
wenzhengjiang Sep 8, 2025
db08331
Use condensedMessage for recent conversation content
wenzhengjiang Sep 8, 2025
e012727
Remove user insights/long-term memory functionality
wenzhengjiang Sep 12, 2025
6dac374
Add maxRecentConversations setting and update memory formatting
wenzhengjiang Sep 12, 2025
e44c3d1
Support multiple conclusions and add unit tests
wenzhengjiang Sep 13, 2025
8841735
Merge master into memory branch
wenzhengjiang Sep 13, 2025
0a6b6fa
Fix user message formatting
wenzhengjiang Sep 13, 2025
24bf217
Fix PR issues
wenzhengjiang Sep 13, 2025
6c07880
Refactor UserMemoryManager to utilize utility function for folder cre…
wenzhengjiang Sep 13, 2025
addbefd
Use logError for error logging
wenzhengjiang Sep 13, 2025
06b635c
Improve documentation and remove unused code
wenzhengjiang Sep 15, 2025
3cd9eb5
Replace console.error with logError
wenzhengjiang Sep 15, 2025
2ff8020
Backfill condensedUserMessage
wenzhengjiang Sep 15, 2025
2a9ac44
Fix build error
wenzhengjiang Sep 15, 2025
c836de3
Refactor for a simpiler implementation
wenzhengjiang Sep 15, 2025
cbe7b8c
Better memory prompt and fix conversation parsing
wenzhengjiang Sep 15, 2025
502d57a
Rename memory functions
wenzhengjiang Sep 15, 2025
1765366
Implement memory tool.
wenzhengjiang Sep 15, 2025
3da77b4
Update design doc
wenzhengjiang Sep 15, 2025
deac33b
Fix test
wenzhengjiang Sep 15, 2025
9e3f501
Update savedMemory setting
wenzhengjiang Sep 16, 2025
d867546
Fix memory setting check
wenzhengjiang Sep 16, 2025
e3af03c
Support "@memory" in non-agent mode
wenzhengjiang Sep 20, 2025
ea72057
Improve timestamp format
wenzhengjiang Sep 20, 2025
dd47de2
Update design doc
wenzhengjiang Sep 20, 2025
70d37fe
Merge branch '3.1.0-preview' into memory
wenzhengjiang Sep 21, 2025
094e3f8
Add support for "@memory" command in builtin tools.
wenzhengjiang Sep 21, 2025
351e23e
Merge branch '3.1.0-preview' into memory
wenzhengjiang Sep 22, 2025
00b2427
Merge branch '3.1.0-preview' into memory
wenzhengjiang Sep 22, 2025
a4ea53d
Minor fixes
wenzhengjiang Sep 22, 2025
1e3a2ae
Fix test
wenzhengjiang Sep 22, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions src/LLMProviders/chainManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import { App, Notice } from "obsidian";
import ChatModelManager from "./chatModelManager";
import MemoryManager from "./memoryManager";
import PromptManager from "./promptManager";
import { UserMemoryManager } from "@/memory/UserMemoryManager";

export default class ChainManager {
// TODO: These chains are deprecated since we now use direct chat model calls in chain runners
Expand All @@ -45,13 +46,15 @@ export default class ChainManager {
public chatModelManager: ChatModelManager;
public memoryManager: MemoryManager;
public promptManager: PromptManager;
public userMemoryManager: UserMemoryManager;

constructor(app: App) {
// Instantiate singletons
this.app = app;
this.memoryManager = MemoryManager.getInstance();
this.chatModelManager = ChatModelManager.getInstance();
this.promptManager = PromptManager.getInstance();
this.userMemoryManager = new UserMemoryManager(app);

// Initialize async operations
this.initialize();
Expand Down
22 changes: 14 additions & 8 deletions src/LLMProviders/chainRunner/AutonomousAgentChainRunner.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { MessageContent } from "@/imageProcessing/imageProcessor";
import { logError, logInfo, logWarn } from "@/logger";
import { checkIsPlusUser } from "@/plusUtils";
import { getSettings, getSystemPrompt } from "@/settings/model";
import { getSettings, getSystemPromptWithMemory } from "@/settings/model";
import { UserMemoryManager } from "@/memory/UserMemoryManager";
import { initializeBuiltinTools } from "@/tools/builtinTools";
import { extractParametersFromZod, SimpleTool } from "@/tools/SimpleTool";
import { ToolRegistry } from "@/tools/ToolRegistry";
Expand Down Expand Up @@ -78,11 +79,12 @@ ${params}
.join("\n\n");
}

public static generateSystemPrompt(
public static async generateSystemPrompt(
availableTools: SimpleTool<any, any>[],
adapter: ModelAdapter
): string {
const basePrompt = getSystemPrompt();
adapter: ModelAdapter,
userMemoryManager?: UserMemoryManager
): Promise<string> {
const basePrompt = await getSystemPromptWithMemory(userMemoryManager);
const toolDescriptions = AutonomousAgentChainRunner.generateToolDescriptions(availableTools);

const toolNames = availableTools.map((tool) => tool.name);
Expand All @@ -96,14 +98,18 @@ ${params}
return adapter.enhanceSystemPrompt(basePrompt, toolDescriptions, toolNames, toolMetadata);
}

private generateSystemPrompt(): string {
private async generateSystemPrompt(): Promise<string> {
const availableTools = this.getAvailableTools();

// Use model adapter for clean model-specific handling
const chatModel = this.chainManager.chatModelManager.getChatModel();
const adapter = ModelAdapterFactory.createAdapter(chatModel);

return AutonomousAgentChainRunner.generateSystemPrompt(availableTools, adapter);
return AutonomousAgentChainRunner.generateSystemPrompt(
availableTools,
adapter,
this.chainManager.userMemoryManager
);
}

private getTemporaryToolCallId(toolName: string, index: number): string {
Expand Down Expand Up @@ -144,7 +150,7 @@ ${params}
const rawHistory = memoryVariables.history || [];

// Build initial conversation messages
const customSystemPrompt = this.generateSystemPrompt();
const customSystemPrompt = await this.generateSystemPrompt();

const chatModel = this.chainManager.chatModelManager.getChatModel();
const adapter = ModelAdapterFactory.createAdapter(chatModel);
Expand Down
4 changes: 2 additions & 2 deletions src/LLMProviders/chainRunner/CopilotPlusChainRunner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import {
import { BrevilabsClient } from "@/LLMProviders/brevilabsClient";
import { logError, logInfo, logWarn } from "@/logger";
import { checkIsPlusUser } from "@/plusUtils";
import { getSettings, getSystemPrompt } from "@/settings/model";
import { getSettings, getSystemPromptWithMemory } from "@/settings/model";
import { writeToFileTool } from "@/tools/ComposerTools";
import { ToolManager } from "@/tools/toolManager";
import { ChatMessage } from "@/types/message";
Expand Down Expand Up @@ -783,6 +783,6 @@ export class CopilotPlusChainRunner extends BaseChainRunner {
}

protected async getSystemPrompt(): Promise<string> {
return getSystemPrompt();
return getSystemPromptWithMemory(this.chainManager.userMemoryManager);
}
}
4 changes: 2 additions & 2 deletions src/LLMProviders/chainRunner/LLMChainRunner.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { ABORT_REASON } from "@/constants";
import { logInfo } from "@/logger";
import { getSystemPrompt } from "@/settings/model";
import { getSystemPromptWithMemory } from "@/settings/model";
import { ChatMessage } from "@/types/message";
import { extractChatHistory, getMessageRole, withSuppressedTokenWarnings } from "@/utils";
import { BaseChainRunner } from "./BaseChainRunner";
Expand Down Expand Up @@ -30,7 +30,7 @@ export class LLMChainRunner extends BaseChainRunner {
const messages: any[] = [];

// Add system message if available
const systemPrompt = getSystemPrompt();
const systemPrompt = await getSystemPromptWithMemory(this.chainManager.userMemoryManager);
const chatModel = this.chainManager.chatModelManager.getChatModel();

if (systemPrompt) {
Expand Down
1 change: 1 addition & 0 deletions src/LLMProviders/chainRunner/ProjectChainRunner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { CopilotPlusChainRunner } from "./CopilotPlusChainRunner";

export class ProjectChainRunner extends CopilotPlusChainRunner {
protected async getSystemPrompt(): Promise<string> {
// NOTE: Currently memory is not enabled for project mode, so we don't need to use getSystemPromptWithMemory
let finalPrompt = getSystemPrompt();
const projectConfig = getCurrentProject();
if (!projectConfig) {
Expand Down
34 changes: 24 additions & 10 deletions src/components/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import {
} from "@/aiParams";
import { ChainType } from "@/chainFactory";
import { useProjectContextStatus } from "@/hooks/useProjectContextStatus";
import { logInfo } from "@/logger";
import { logInfo, logError } from "@/logger";

import { ChatControls, reloadCurrentProject } from "@/components/chat-components/ChatControls";
import ChatInput from "@/components/chat-components/ChatInput";
Expand Down Expand Up @@ -255,7 +255,7 @@ const Chat: React.FC<ChatProps> = ({
handleSaveAsNote();
}
} catch (error) {
console.error("Error sending message:", error);
logError("Error sending message:", error);
new Notice("Failed to send message. Please try again.");
} finally {
safeSet.setLoading(false);
Expand All @@ -265,15 +265,15 @@ const Chat: React.FC<ChatProps> = ({

const handleSaveAsNote = useCallback(async () => {
if (!app) {
console.error("App instance is not available.");
logError("App instance is not available.");
return;
}

try {
// Use the new ChatManager persistence functionality
await chatUIState.saveChat(currentModelKey);
} catch (error) {
console.error("Error saving chat as note:", err2String(error));
logError("Error saving chat as note:", err2String(error));
new Notice("Failed to save chat as note. Check console for details.");
}
}, [app, chatUIState, currentModelKey]);
Expand Down Expand Up @@ -338,7 +338,7 @@ const Chat: React.FC<ChatProps> = ({
handleSaveAsNote();
}
} catch (error) {
console.error("Error regenerating message:", error);
logError("Error regenerating message:", error);
new Notice("Failed to regenerate message. Please try again.");
} finally {
safeSet.setLoading(false);
Expand Down Expand Up @@ -399,7 +399,7 @@ const Chat: React.FC<ChatProps> = ({
);
}
} catch (error) {
console.error("Error regenerating AI response:", error);
logError("Error regenerating AI response:", error);
new Notice("Failed to regenerate AI response. Please try again.");
} finally {
safeSet.setLoading(false);
Expand All @@ -412,7 +412,7 @@ const Chat: React.FC<ChatProps> = ({
handleSaveAsNote();
}
} catch (error) {
console.error("Error editing message:", error);
logError("Error editing message:", error);
new Notice("Failed to edit message. Please try again.");
}
},
Expand Down Expand Up @@ -459,7 +459,7 @@ const Chat: React.FC<ChatProps> = ({
new Notice(`${project.name} added and context loaded`);
})
.catch((error: Error) => {
console.error("Error loading project context:", error);
logError("Error loading project context:", error);
new Notice(`${project.name} added but context loading failed`);
});
} else {
Expand Down Expand Up @@ -494,7 +494,7 @@ const Chat: React.FC<ChatProps> = ({
new Notice(`${originP.name} updated and context reloaded`);
})
.catch((error: Error) => {
console.error("Error reloading project context:", error);
logError("Error reloading project context:", error);
new Notice(`${originP.name} updated but context reload failed`);
});
} else {
Expand Down Expand Up @@ -528,7 +528,7 @@ const Chat: React.FC<ChatProps> = ({
new Notice("Failed to delete message. Please try again.");
}
} catch (error) {
console.error("Error deleting message:", error);
logError("Error deleting message:", error);
new Notice("Failed to delete message. Please try again.");
}
},
Expand All @@ -538,6 +538,17 @@ const Chat: React.FC<ChatProps> = ({
const handleNewChat = useCallback(async () => {
handleStopGenerating(ABORT_REASON.NEW_CHAT);

// Analyze chat messages for memory if enabled
if (settings.enableRecentConversations) {
try {
// Get the current chat model from the chain manager
const chatModel = chainManager.chatModelManager.getChatModel();
plugin.userMemoryManager.addRecentConversation(chatUIState.getMessages(), chatModel);
} catch (error) {
logInfo("Failed to analyze chat messages for memory:", error);
}
}

// First autosave the current chat if the setting is enabled
if (settings.autosaveChat) {
await handleSaveAsNote();
Expand All @@ -559,12 +570,15 @@ const Chat: React.FC<ChatProps> = ({
}
}, [
handleStopGenerating,
chainManager.chatModelManager,
chatUIState,
settings.autosaveChat,
settings.enableRecentConversations,
settings.includeActiveNoteAsContext,
selectedChain,
handleSaveAsNote,
safeSet,
plugin.userMemoryManager,
]);

const handleLoadHistory = useCallback(() => {
Expand Down
5 changes: 5 additions & 0 deletions src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -751,9 +751,14 @@ export const DEFAULT_SETTINGS: CopilotSettings = {
"youtubeTranscription",
"writeToFile",
"replaceInFile",
"memoryTool",
],
reasoningEffort: DEFAULT_MODEL_SETTING.REASONING_EFFORT,
verbosity: DEFAULT_MODEL_SETTING.VERBOSITY,
memoryFolderName: "copilot/memory",
enableRecentConversations: false,
maxRecentConversations: 30,
enableSavedMemory: false,
};

export const EVENT_NAMES = {
Expand Down
2 changes: 1 addition & 1 deletion src/integration_tests/AgentPrompt.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ async function generateSystemPrompt(availableTools: any[]): Promise<string> {
});

const adapter = ModelAdapterFactory.createAdapter(mockModel);
return AutonomousAgentChainRunner.generateSystemPrompt(availableTools, adapter);
return AutonomousAgentChainRunner.generateSystemPrompt(availableTools, adapter, undefined);
}

// Helper function to mock tool execution
Expand Down
17 changes: 17 additions & 0 deletions src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import { MessageRepository } from "@/core/MessageRepository";
import { encryptAllKeys } from "@/encryptionService";
import { logInfo } from "@/logger";
import { logFileManager } from "@/logFileManager";
import { UserMemoryManager } from "@/memory/UserMemoryManager";
import { checkIsPlusUser } from "@/plusUtils";
import VectorStoreManager from "@/search/vectorStoreManager";
import { CopilotSettingTab } from "@/settings/SettingsPage";
Expand Down Expand Up @@ -57,6 +58,7 @@ export default class CopilotPlugin extends Plugin {
settingsUnsubscriber?: () => void;
private autocompleteService: AutocompleteService;
chatUIState: ChatUIState;
userMemoryManager: UserMemoryManager;

async onload(): Promise<void> {
await this.loadSettings();
Expand Down Expand Up @@ -95,6 +97,9 @@ export default class CopilotPlugin extends Plugin {
const chatManager = new ChatManager(messageRepo, chainManager, this.fileParserManager, this);
this.chatUIState = new ChatUIState(chatManager);

// Initialize UserMemoryManager
this.userMemoryManager = new UserMemoryManager(this.app);

this.registerView(CHAT_VIEWTYPE, (leaf: WorkspaceLeaf) => new CopilotView(leaf, this));
this.registerView(APPLY_VIEW_TYPE, (leaf: WorkspaceLeaf) => new ApplyView(leaf));

Expand Down Expand Up @@ -381,6 +386,18 @@ export default class CopilotPlugin extends Plugin {
}

async handleNewChat() {
// Analyze chat messages for memory if enabled
if (getSettings().enableRecentConversations) {
try {
// Get the current chat model from the chain manager
const chainManager = this.projectManager.getCurrentChainManager();
const chatModel = chainManager.chatModelManager.getChatModel();
this.userMemoryManager.addRecentConversation(this.chatUIState.getMessages(), chatModel);
} catch (error) {
logInfo("Failed to analyze chat messages for memory:", error);
}
}

// First autosave the current chat if the setting is enabled
await this.autosaveCurrentChat();

Expand Down
Loading