Skip to content
Open
Show file tree
Hide file tree
Changes from 14 commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
fb90c9e
Add memory setting
wenzhengjiang Aug 29, 2025
32e42c5
Support recent conversation in memory
wenzhengjiang Aug 29, 2025
a51bbbe
Add conversation summary to recent conversations
wenzhengjiang Aug 30, 2025
1ee76ae
Load recent conversation to prompt
wenzhengjiang Aug 30, 2025
6bb8528
Implement user insights
wenzhengjiang Aug 31, 2025
0d86d43
Fix some bugs
wenzhengjiang Aug 31, 2025
9e89ec9
Refactor
wenzhengjiang Sep 1, 2025
16c6d98
Merge master into memory branch
wenzhengjiang Sep 4, 2025
52f7ad7
Support returning multiple insights
wenzhengjiang Sep 8, 2025
db08331
Use condensedMessage for recent conversation content
wenzhengjiang Sep 8, 2025
e012727
Remove user insights/long-term memory functionality
wenzhengjiang Sep 12, 2025
6dac374
Add maxRecentConversations setting and update memory formatting
wenzhengjiang Sep 12, 2025
e44c3d1
Support multiple conclusions and add unit tests
wenzhengjiang Sep 13, 2025
8841735
Merge master into memory branch
wenzhengjiang Sep 13, 2025
0a6b6fa
Fix user message formatting
wenzhengjiang Sep 13, 2025
24bf217
Fix PR issues
wenzhengjiang Sep 13, 2025
6c07880
Refactor UserMemoryManager to utilize utility function for folder cre…
wenzhengjiang Sep 13, 2025
addbefd
Use logError for error logging
wenzhengjiang Sep 13, 2025
06b635c
Improve documentation and remove unused code
wenzhengjiang Sep 15, 2025
3cd9eb5
Replace console.error with logError
wenzhengjiang Sep 15, 2025
2ff8020
Backfill condensedUserMessage
wenzhengjiang Sep 15, 2025
2a9ac44
Fix build error
wenzhengjiang Sep 15, 2025
c836de3
Refactor for a simpiler implementation
wenzhengjiang Sep 15, 2025
cbe7b8c
Better memory prompt and fix conversation parsing
wenzhengjiang Sep 15, 2025
502d57a
Rename memory functions
wenzhengjiang Sep 15, 2025
1765366
Implement memory tool.
wenzhengjiang Sep 15, 2025
3da77b4
Update design doc
wenzhengjiang Sep 15, 2025
deac33b
Fix test
wenzhengjiang Sep 15, 2025
9e3f501
Update savedMemory setting
wenzhengjiang Sep 16, 2025
d867546
Fix memory setting check
wenzhengjiang Sep 16, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion src/LLMProviders/chainManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import { App, Notice } from "obsidian";
import ChatModelManager from "./chatModelManager";
import MemoryManager from "./memoryManager";
import PromptManager from "./promptManager";
import CopilotPlugin from "@/main";

export default class ChainManager {
// TODO: These chains are deprecated since we now use direct chat model calls in chain runners
Expand All @@ -45,10 +46,12 @@ export default class ChainManager {
public chatModelManager: ChatModelManager;
public memoryManager: MemoryManager;
public promptManager: PromptManager;
public plugin?: CopilotPlugin;

constructor(app: App) {
constructor(app: App, plugin?: CopilotPlugin) {
// Instantiate singletons
this.app = app;
this.plugin = plugin;
this.memoryManager = MemoryManager.getInstance();
this.chatModelManager = ChatModelManager.getInstance();
this.promptManager = PromptManager.getInstance();
Expand Down
23 changes: 15 additions & 8 deletions src/LLMProviders/chainRunner/AutonomousAgentChainRunner.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import { MessageContent } from "@/imageProcessing/imageProcessor";
import { logError, logInfo, logWarn } from "@/logger";
import { checkIsPlusUser } from "@/plusUtils";
import { getSettings, getSystemPrompt } from "@/settings/model";
import { getSettings, getSystemPromptWithMemory } from "@/settings/model";
import { UserMemoryManager } from "@/memory/UserMemoryManager";
import { initializeBuiltinTools } from "@/tools/builtinTools";
import { extractParametersFromZod, SimpleTool } from "@/tools/SimpleTool";
import { ToolRegistry } from "@/tools/ToolRegistry";
Expand Down Expand Up @@ -78,11 +79,12 @@ ${params}
.join("\n\n");
}

public static generateSystemPrompt(
public static async generateSystemPrompt(
availableTools: SimpleTool<any, any>[],
adapter: ModelAdapter
): string {
const basePrompt = getSystemPrompt();
adapter: ModelAdapter,
userMemoryManager?: UserMemoryManager
): Promise<string> {
const basePrompt = await getSystemPromptWithMemory(userMemoryManager);
const toolDescriptions = AutonomousAgentChainRunner.generateToolDescriptions(availableTools);

const toolNames = availableTools.map((tool) => tool.name);
Expand All @@ -96,14 +98,18 @@ ${params}
return adapter.enhanceSystemPrompt(basePrompt, toolDescriptions, toolNames, toolMetadata);
}

private generateSystemPrompt(): string {
private async generateSystemPrompt(): Promise<string> {
const availableTools = this.getAvailableTools();

// Use model adapter for clean model-specific handling
const chatModel = this.chainManager.chatModelManager.getChatModel();
const adapter = ModelAdapterFactory.createAdapter(chatModel);

return AutonomousAgentChainRunner.generateSystemPrompt(availableTools, adapter);
return AutonomousAgentChainRunner.generateSystemPrompt(
availableTools,
adapter,
this.chainManager.plugin?.userMemoryManager
);
}

private getTemporaryToolCallId(toolName: string, index: number): string {
Expand Down Expand Up @@ -144,7 +150,8 @@ ${params}
const rawHistory = memoryVariables.history || [];

// Build initial conversation messages
const customSystemPrompt = this.generateSystemPrompt();
const customSystemPrompt = await this.generateSystemPrompt();
console.log("customSystemPrompt", customSystemPrompt);

const chatModel = this.chainManager.chatModelManager.getChatModel();
const adapter = ModelAdapterFactory.createAdapter(chatModel);
Expand Down
4 changes: 2 additions & 2 deletions src/LLMProviders/chainRunner/CopilotPlusChainRunner.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import {
import { BrevilabsClient } from "@/LLMProviders/brevilabsClient";
import { logError, logInfo, logWarn } from "@/logger";
import { checkIsPlusUser } from "@/plusUtils";
import { getSettings, getSystemPrompt } from "@/settings/model";
import { getSettings, getSystemPromptWithMemory } from "@/settings/model";
import { writeToFileTool } from "@/tools/ComposerTools";
import { ToolManager } from "@/tools/toolManager";
import { ChatMessage } from "@/types/message";
Expand Down Expand Up @@ -783,6 +783,6 @@ export class CopilotPlusChainRunner extends BaseChainRunner {
}

protected async getSystemPrompt(): Promise<string> {
return getSystemPrompt();
return getSystemPromptWithMemory(this.chainManager.plugin?.userMemoryManager);
}
}
2 changes: 1 addition & 1 deletion src/LLMProviders/projectManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ export default class ProjectManager {
this.app = app;
this.plugin = plugin;
this.currentProjectId = null;
this.chainMangerInstance = new ChainManager(app);
this.chainMangerInstance = new ChainManager(app, plugin);
this.projectContextCache = ProjectContextCache.getInstance();
this.fileParserManager = new FileParserManager(
BrevilabsClient.getInstance(),
Expand Down
14 changes: 14 additions & 0 deletions src/components/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -538,6 +538,17 @@ const Chat: React.FC<ChatProps> = ({
const handleNewChat = useCallback(async () => {
handleStopGenerating(ABORT_REASON.NEW_CHAT);

// Analyze chat messages for memory if enabled
if (settings.enableMemory) {
try {
// Get the current chat model from the chain manager
const chatModel = chainManager.chatModelManager.getChatModel();
plugin.userMemoryManager.updateUserMemory(chatUIState.getMessages(), chatModel);
} catch (error) {
logInfo("Failed to analyze chat messages for memory:", error);
}
}

// First autosave the current chat if the setting is enabled
if (settings.autosaveChat) {
await handleSaveAsNote();
Expand All @@ -559,12 +570,15 @@ const Chat: React.FC<ChatProps> = ({
}
}, [
handleStopGenerating,
chainManager.chatModelManager,
chatUIState,
settings.autosaveChat,
settings.enableMemory,
settings.includeActiveNoteAsContext,
selectedChain,
handleSaveAsNote,
safeSet,
plugin.userMemoryManager,
]);

const handleLoadHistory = useCallback(() => {
Expand Down
3 changes: 3 additions & 0 deletions src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -754,6 +754,9 @@ export const DEFAULT_SETTINGS: CopilotSettings = {
],
reasoningEffort: DEFAULT_MODEL_SETTING.REASONING_EFFORT,
verbosity: DEFAULT_MODEL_SETTING.VERBOSITY,
memoryFolderName: "copilot/memory",
enableMemory: false,
maxRecentConversations: 30,
};

export const EVENT_NAMES = {
Expand Down
31 changes: 31 additions & 0 deletions src/core/ChatManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,37 @@ export class ChatManager {
// Update the processed content
currentRepo.updateProcessedText(messageId, processedContent);

// Create condensed message for user messages
if (message.sender === USER_SENDER && this.plugin.userMemoryManager) {
try {
const settings = getSettings();
if (settings.enableMemory) {
const chainManager = this.plugin.projectManager.getCurrentChainManager();
const chatModel = chainManager.chatModelManager.getChatModel();

// Create condensed message asynchronously (fire and forget)
this.plugin.userMemoryManager
.createCondensedMessage(displayText, chatModel)
.then((condensedMessage) => {
if (condensedMessage) {
currentRepo.updateCondensedMessage(messageId, condensedMessage);
logInfo(
`[ChatManager] Created condensed message for ${messageId}: "${condensedMessage}"`
);
}
})
.catch((error) => {
logInfo(
`[ChatManager] Failed to create condensed message for ${messageId}:`,
error
);
});
}
} catch (error) {
logInfo(`[ChatManager] Error setting up condensed message creation:`, error);
}
}

logInfo(`[ChatManager] Successfully sent message ${messageId}`);
return messageId;
} catch (error) {
Expand Down
21 changes: 21 additions & 0 deletions src/core/MessageRepository.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ export class MessageRepository {
id,
displayText: message.message,
processedText: message.originalMessage || message.message,
condensedMessage: message.condensedMessage,
sender: message.sender,
timestamp,
context: message.context,
Expand Down Expand Up @@ -148,6 +149,21 @@ export class MessageRepository {
return true;
}

/**
* Update condensed message for a message
*/
updateCondensedMessage(id: string, condensedMessage: string): boolean {
const message = this.messages.find((msg) => msg.id === id);
if (!message) {
logInfo(`[MessageRepository] Message not found for condensed message update: ${id}`);
return false;
}

message.condensedMessage = condensedMessage;
logInfo(`[MessageRepository] Updated condensed message for message: ${id}`);
return true;
}

/**
* Delete a message
*/
Expand Down Expand Up @@ -201,6 +217,7 @@ export class MessageRepository {
id: msg.id,
message: msg.displayText,
originalMessage: msg.displayText,
condensedMessage: msg.condensedMessage,
sender: msg.sender,
timestamp: msg.timestamp,
isVisible: true,
Expand All @@ -223,6 +240,7 @@ export class MessageRepository {
id: msg.id,
message: msg.processedText,
originalMessage: msg.displayText,
condensedMessage: msg.condensedMessage,
sender: msg.sender,
timestamp: msg.timestamp,
isVisible: false, // LLM messages are not for display
Expand All @@ -242,6 +260,7 @@ export class MessageRepository {
id: msg.id,
message: msg.processedText,
originalMessage: msg.displayText,
condensedMessage: msg.condensedMessage,
sender: msg.sender,
timestamp: msg.timestamp,
isVisible: false,
Expand All @@ -263,6 +282,7 @@ export class MessageRepository {
id: msg.id,
message: msg.displayText,
originalMessage: msg.displayText,
condensedMessage: msg.condensedMessage,
sender: msg.sender,
timestamp: msg.timestamp,
isVisible: msg.isVisible,
Expand All @@ -283,6 +303,7 @@ export class MessageRepository {
id: msg.id || this.generateId(),
displayText: msg.message,
processedText: msg.originalMessage || msg.message,
condensedMessage: msg.condensedMessage,
sender: msg.sender,
timestamp: msg.timestamp || formatDateTime(new Date()),
context: msg.context,
Expand Down
2 changes: 1 addition & 1 deletion src/integration_tests/AgentPrompt.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ async function generateSystemPrompt(availableTools: any[]): Promise<string> {
});

const adapter = ModelAdapterFactory.createAdapter(mockModel);
return AutonomousAgentChainRunner.generateSystemPrompt(availableTools, adapter);
return AutonomousAgentChainRunner.generateSystemPrompt(availableTools, adapter, undefined);
}

// Helper function to mock tool execution
Expand Down
17 changes: 17 additions & 0 deletions src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import { MessageRepository } from "@/core/MessageRepository";
import { encryptAllKeys } from "@/encryptionService";
import { logInfo } from "@/logger";
import { logFileManager } from "@/logFileManager";
import { UserMemoryManager } from "@/memory/UserMemoryManager";
import { checkIsPlusUser } from "@/plusUtils";
import VectorStoreManager from "@/search/vectorStoreManager";
import { CopilotSettingTab } from "@/settings/SettingsPage";
Expand Down Expand Up @@ -57,6 +58,7 @@ export default class CopilotPlugin extends Plugin {
settingsUnsubscriber?: () => void;
private autocompleteService: AutocompleteService;
chatUIState: ChatUIState;
userMemoryManager: UserMemoryManager;

async onload(): Promise<void> {
await this.loadSettings();
Expand Down Expand Up @@ -95,6 +97,9 @@ export default class CopilotPlugin extends Plugin {
const chatManager = new ChatManager(messageRepo, chainManager, this.fileParserManager, this);
this.chatUIState = new ChatUIState(chatManager);

// Initialize UserMemoryManager
this.userMemoryManager = new UserMemoryManager(this.app);

this.registerView(CHAT_VIEWTYPE, (leaf: WorkspaceLeaf) => new CopilotView(leaf, this));
this.registerView(APPLY_VIEW_TYPE, (leaf: WorkspaceLeaf) => new ApplyView(leaf));

Expand Down Expand Up @@ -381,6 +386,18 @@ export default class CopilotPlugin extends Plugin {
}

async handleNewChat() {
// Analyze chat messages for memory if enabled
if (getSettings().enableMemory) {
try {
// Get the current chat model from the chain manager
const chainManager = this.projectManager.getCurrentChainManager();
const chatModel = chainManager.chatModelManager.getChatModel();
this.userMemoryManager.updateUserMemory(this.chatUIState.getMessages(), chatModel);
} catch (error) {
logInfo("Failed to analyze chat messages for memory:", error);
}
}

// First autosave the current chat if the setting is enabled
await this.autosaveCurrentChat();

Expand Down
Loading