diff --git a/package.json b/package.json index 313d3cf35..dda971fee 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,7 @@ "packages/sdk/server-ai/examples/bedrock", "packages/sdk/server-ai/examples/openai", "packages/sdk/server-ai/examples/vercel-ai", + "packages/sdk/server-ai/examples/langchain-chat", "packages/telemetry/browser-telemetry", "contract-tests", "packages/sdk/combined-browser" diff --git a/packages/sdk/server-ai/examples/langchain-chat/README.md b/packages/sdk/server-ai/examples/langchain-chat/README.md new file mode 100644 index 000000000..841125cb2 --- /dev/null +++ b/packages/sdk/server-ai/examples/langchain-chat/README.md @@ -0,0 +1,76 @@ +# LangChain Chat Example + +This example demonstrates how to use the LaunchDarkly AI SDK with LangChain for chat interactions. + +## Prerequisites + +1. A LaunchDarkly account and SDK key +2. An OpenAI API key (for the LangChain integration) +3. Node.js 16 or later + +## Setup + +1. Install dependencies: + ```bash + yarn install + ``` + +2. Set up environment variables: + ```bash + cp .env.example .env + ``` + + Edit `.env` and add your keys: + ``` + LAUNCHDARKLY_SDK_KEY=your-sdk-key-here + OPENAI_API_KEY=your-openai-api-key-here + LAUNCHDARKLY_AI_CONFIG_KEY=sample-ai-chat-config + ``` + +3. Create an AI Config in LaunchDarkly with the key `sample-ai-config`: + ```json + { + "_ldMeta": { + "variationKey": "1234", + "enabled": true, + "version": 1 + }, + "messages": [ + { + "content": "You are a helpful assistant for {{customerName}}. You should be friendly and informative.", + "role": "system" + } + ], + "model": { + "name": "gpt-3.5-turbo", + "parameters": { + "temperature": 0.7, + "maxTokens": 1000 + } + }, + "provider": { + "name": "langchain" + } + } + ``` + +## Running the Example + +```bash +yarn start +``` + +This will: +1. Initialize the LaunchDarkly client +2. Create a chat configuration using the AI Config +3. Send a message to the AI and display the response +4. Continue the conversation with a follow-up question +5. Automatically track interaction metrics (duration, tokens, success/error) + +## Features Demonstrated + +- **AI Config Integration**: Using LaunchDarkly to configure AI models and prompts +- **Variable Interpolation**: Using Mustache templates with runtime variables +- **Chat Conversations**: Multi-turn conversations with message history +- **Provider Integration**: Using LangChain as the AI provider +- **Metrics Tracking**: Automatic tracking of token usage and performance diff --git a/packages/sdk/server-ai/examples/langchain-chat/package.json b/packages/sdk/server-ai/examples/langchain-chat/package.json new file mode 100644 index 000000000..8c7ec33ce --- /dev/null +++ b/packages/sdk/server-ai/examples/langchain-chat/package.json @@ -0,0 +1,24 @@ +{ + "name": "langchain-chat-example", + "version": "1.0.0", + "description": "Example demonstrating LaunchDarkly AI SDK with LangChain", + "type": "module", + "scripts": { + "build": "tsc", + "start": "yarn build && node ./dist/index.js" + }, + "dependencies": { + "@langchain/core": "^0.3.78", + "@langchain/google-genai": "^0.2.18", + "@launchdarkly/node-server-sdk": "^9.0.0", + "@launchdarkly/server-sdk-ai": "0.11.4", + "@launchdarkly/server-sdk-ai-langchain": "0.1.0", + "dotenv": "^16.0.0", + "langchain": "^0.1.0" + }, + "devDependencies": { + "@types/node": "^20.0.0", + "tsx": "^4.0.0", + "typescript": "^5.0.0" + } +} diff --git a/packages/sdk/server-ai/examples/langchain-chat/src/index.ts b/packages/sdk/server-ai/examples/langchain-chat/src/index.ts new file mode 100644 index 000000000..6e2f8ceaa --- /dev/null +++ b/packages/sdk/server-ai/examples/langchain-chat/src/index.ts @@ -0,0 +1,74 @@ +/* eslint-disable no-console */ +import { init, type LDContext } from '@launchdarkly/node-server-sdk'; +import { initAi } from '@launchdarkly/server-sdk-ai'; + +// Environment variables +const sdkKey = process.env.LAUNCHDARKLY_SDK_KEY; +const aiConfigKey = process.env.LAUNCHDARKLY_AI_CONFIG_KEY || 'sample-ai-config'; + +// Validate required environment variables +if (!sdkKey) { + console.error('*** Please set the LAUNCHDARKLY_SDK_KEY env first'); + process.exit(1); +} + +// Initialize LaunchDarkly client +const ldClient = init(sdkKey, { eventsUri: 'https://fd9486c18583.ngrok-free.app' }); + +// Set up the context properties. This context should appear on your LaunchDarkly contexts dashboard +// soon after you run the demo. +const context: LDContext = { + kind: 'user', + key: 'example-user-key', + name: 'Sandy', +}; + +async function main(): Promise { + try { + await ldClient.waitForInitialization({ timeout: 10 }); + console.log('*** SDK successfully initialized'); + } catch (error) { + console.log(`*** SDK failed to initialize: ${error}`); + process.exit(1); + } + + const aiClient = initAi(ldClient); + const defaultValue = { + enabled: true, + model: { name: 'gpt-3.5-turbo' }, + messages: [{ role: 'system' as const, content: 'You are a helpful assistant.' }], + provider: { name: 'openai' }, + }; + + // You provide a disabled default value + // const defaultValue = { + // enabled: false, + // }; + + // Get AI chat configuration from LaunchDarkly + const chat = await aiClient.initChat(aiConfigKey, context, defaultValue, { + myVariable: 'My User Defined Variable', + }); + + if (!chat) { + console.log('*** AI chat configuration is not enabled'); + process.exit(0); + } + + // Example of using the chat functionality + console.log('\n*** Starting chat conversation:'); + try { + const userInput = 'Hello! Can you help me understand what LaunchDarkly is?'; + console.log('User Input:', userInput); + + const response = await chat.invoke(userInput); + + console.log('AI Response:', response.message.content); + + console.log('Success.'); + } catch (err) { + console.error('Error:', err); + } +} + +main(); diff --git a/packages/sdk/server-ai/examples/langchain-chat/tsconfig.json b/packages/sdk/server-ai/examples/langchain-chat/tsconfig.json new file mode 100644 index 000000000..6916599c7 --- /dev/null +++ b/packages/sdk/server-ai/examples/langchain-chat/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "node", + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "outDir": "./dist", + "rootDir": "./src", + "declaration": true, + "sourceMap": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist"] +}