From b1ebb34ecbab49369d4b503abe9145c7139add05 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Thu, 2 Oct 2025 14:47:33 +0000 Subject: [PATCH 01/10] chore: Add example for LangChain provider integration --- .../examples/langchain-chat/README.md | 76 +++++++++++++++++++ .../examples/langchain-chat/package.json | 24 ++++++ .../examples/langchain-chat/src/index.ts | 74 ++++++++++++++++++ .../examples/langchain-chat/tsconfig.json | 18 +++++ 4 files changed, 192 insertions(+) create mode 100644 packages/sdk/server-ai/examples/langchain-chat/README.md create mode 100644 packages/sdk/server-ai/examples/langchain-chat/package.json create mode 100644 packages/sdk/server-ai/examples/langchain-chat/src/index.ts create mode 100644 packages/sdk/server-ai/examples/langchain-chat/tsconfig.json diff --git a/packages/sdk/server-ai/examples/langchain-chat/README.md b/packages/sdk/server-ai/examples/langchain-chat/README.md new file mode 100644 index 0000000000..841125cb2c --- /dev/null +++ b/packages/sdk/server-ai/examples/langchain-chat/README.md @@ -0,0 +1,76 @@ +# LangChain Chat Example + +This example demonstrates how to use the LaunchDarkly AI SDK with LangChain for chat interactions. + +## Prerequisites + +1. A LaunchDarkly account and SDK key +2. An OpenAI API key (for the LangChain integration) +3. Node.js 16 or later + +## Setup + +1. Install dependencies: + ```bash + yarn install + ``` + +2. Set up environment variables: + ```bash + cp .env.example .env + ``` + + Edit `.env` and add your keys: + ``` + LAUNCHDARKLY_SDK_KEY=your-sdk-key-here + OPENAI_API_KEY=your-openai-api-key-here + LAUNCHDARKLY_AI_CONFIG_KEY=sample-ai-chat-config + ``` + +3. Create an AI Config in LaunchDarkly with the key `sample-ai-config`: + ```json + { + "_ldMeta": { + "variationKey": "1234", + "enabled": true, + "version": 1 + }, + "messages": [ + { + "content": "You are a helpful assistant for {{customerName}}. You should be friendly and informative.", + "role": "system" + } + ], + "model": { + "name": "gpt-3.5-turbo", + "parameters": { + "temperature": 0.7, + "maxTokens": 1000 + } + }, + "provider": { + "name": "langchain" + } + } + ``` + +## Running the Example + +```bash +yarn start +``` + +This will: +1. Initialize the LaunchDarkly client +2. Create a chat configuration using the AI Config +3. Send a message to the AI and display the response +4. Continue the conversation with a follow-up question +5. Automatically track interaction metrics (duration, tokens, success/error) + +## Features Demonstrated + +- **AI Config Integration**: Using LaunchDarkly to configure AI models and prompts +- **Variable Interpolation**: Using Mustache templates with runtime variables +- **Chat Conversations**: Multi-turn conversations with message history +- **Provider Integration**: Using LangChain as the AI provider +- **Metrics Tracking**: Automatic tracking of token usage and performance diff --git a/packages/sdk/server-ai/examples/langchain-chat/package.json b/packages/sdk/server-ai/examples/langchain-chat/package.json new file mode 100644 index 0000000000..8c7ec33ce5 --- /dev/null +++ b/packages/sdk/server-ai/examples/langchain-chat/package.json @@ -0,0 +1,24 @@ +{ + "name": "langchain-chat-example", + "version": "1.0.0", + "description": "Example demonstrating LaunchDarkly AI SDK with LangChain", + "type": "module", + "scripts": { + "build": "tsc", + "start": "yarn build && node ./dist/index.js" + }, + "dependencies": { + "@langchain/core": "^0.3.78", + "@langchain/google-genai": "^0.2.18", + "@launchdarkly/node-server-sdk": "^9.0.0", + "@launchdarkly/server-sdk-ai": "0.11.4", + "@launchdarkly/server-sdk-ai-langchain": "0.1.0", + "dotenv": "^16.0.0", + "langchain": "^0.1.0" + }, + "devDependencies": { + "@types/node": "^20.0.0", + "tsx": "^4.0.0", + "typescript": "^5.0.0" + } +} diff --git a/packages/sdk/server-ai/examples/langchain-chat/src/index.ts b/packages/sdk/server-ai/examples/langchain-chat/src/index.ts new file mode 100644 index 0000000000..6e2f8ceaaa --- /dev/null +++ b/packages/sdk/server-ai/examples/langchain-chat/src/index.ts @@ -0,0 +1,74 @@ +/* eslint-disable no-console */ +import { init, type LDContext } from '@launchdarkly/node-server-sdk'; +import { initAi } from '@launchdarkly/server-sdk-ai'; + +// Environment variables +const sdkKey = process.env.LAUNCHDARKLY_SDK_KEY; +const aiConfigKey = process.env.LAUNCHDARKLY_AI_CONFIG_KEY || 'sample-ai-config'; + +// Validate required environment variables +if (!sdkKey) { + console.error('*** Please set the LAUNCHDARKLY_SDK_KEY env first'); + process.exit(1); +} + +// Initialize LaunchDarkly client +const ldClient = init(sdkKey, { eventsUri: 'https://fd9486c18583.ngrok-free.app' }); + +// Set up the context properties. This context should appear on your LaunchDarkly contexts dashboard +// soon after you run the demo. +const context: LDContext = { + kind: 'user', + key: 'example-user-key', + name: 'Sandy', +}; + +async function main(): Promise { + try { + await ldClient.waitForInitialization({ timeout: 10 }); + console.log('*** SDK successfully initialized'); + } catch (error) { + console.log(`*** SDK failed to initialize: ${error}`); + process.exit(1); + } + + const aiClient = initAi(ldClient); + const defaultValue = { + enabled: true, + model: { name: 'gpt-3.5-turbo' }, + messages: [{ role: 'system' as const, content: 'You are a helpful assistant.' }], + provider: { name: 'openai' }, + }; + + // You provide a disabled default value + // const defaultValue = { + // enabled: false, + // }; + + // Get AI chat configuration from LaunchDarkly + const chat = await aiClient.initChat(aiConfigKey, context, defaultValue, { + myVariable: 'My User Defined Variable', + }); + + if (!chat) { + console.log('*** AI chat configuration is not enabled'); + process.exit(0); + } + + // Example of using the chat functionality + console.log('\n*** Starting chat conversation:'); + try { + const userInput = 'Hello! Can you help me understand what LaunchDarkly is?'; + console.log('User Input:', userInput); + + const response = await chat.invoke(userInput); + + console.log('AI Response:', response.message.content); + + console.log('Success.'); + } catch (err) { + console.error('Error:', err); + } +} + +main(); diff --git a/packages/sdk/server-ai/examples/langchain-chat/tsconfig.json b/packages/sdk/server-ai/examples/langchain-chat/tsconfig.json new file mode 100644 index 0000000000..6916599c7d --- /dev/null +++ b/packages/sdk/server-ai/examples/langchain-chat/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "node", + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "outDir": "./dist", + "rootDir": "./src", + "declaration": true, + "sourceMap": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist"] +} From f227b28d630b2a59a9a5f0c64802fef98afcd8fb Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Thu, 2 Oct 2025 17:12:49 +0000 Subject: [PATCH 02/10] Add chat to root package --- package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/package.json b/package.json index 313d3cf351..dda971fee4 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,7 @@ "packages/sdk/server-ai/examples/bedrock", "packages/sdk/server-ai/examples/openai", "packages/sdk/server-ai/examples/vercel-ai", + "packages/sdk/server-ai/examples/langchain-chat", "packages/telemetry/browser-telemetry", "contract-tests", "packages/sdk/combined-browser" From 64bdd6cf125d62ec22c625c056ba3a2331e126d5 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Mon, 6 Oct 2025 20:02:44 +0000 Subject: [PATCH 03/10] update example and remove test code --- .../server-ai/examples/langchain-chat/README.md | 16 ++++++++-------- .../examples/langchain-chat/src/index.ts | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/sdk/server-ai/examples/langchain-chat/README.md b/packages/sdk/server-ai/examples/langchain-chat/README.md index 841125cb2c..dd4d4370b7 100644 --- a/packages/sdk/server-ai/examples/langchain-chat/README.md +++ b/packages/sdk/server-ai/examples/langchain-chat/README.md @@ -5,8 +5,8 @@ This example demonstrates how to use the LaunchDarkly AI SDK with LangChain for ## Prerequisites 1. A LaunchDarkly account and SDK key -2. An OpenAI API key (for the LangChain integration) -3. Node.js 16 or later +1. An OpenAI API key (for the LangChain integration) +1. Node.js 16 or later ## Setup @@ -15,7 +15,7 @@ This example demonstrates how to use the LaunchDarkly AI SDK with LangChain for yarn install ``` -2. Set up environment variables: +1. Set up environment variables: ```bash cp .env.example .env ``` @@ -27,7 +27,7 @@ This example demonstrates how to use the LaunchDarkly AI SDK with LangChain for LAUNCHDARKLY_AI_CONFIG_KEY=sample-ai-chat-config ``` -3. Create an AI Config in LaunchDarkly with the key `sample-ai-config`: +1. Create an AI Config in LaunchDarkly with the key `sample-ai-config`: ```json { "_ldMeta": { @@ -62,10 +62,10 @@ yarn start This will: 1. Initialize the LaunchDarkly client -2. Create a chat configuration using the AI Config -3. Send a message to the AI and display the response -4. Continue the conversation with a follow-up question -5. Automatically track interaction metrics (duration, tokens, success/error) +1. Create a chat configuration using the AI Config +1. Send a message to the AI and display the response +1. Continue the conversation with a follow-up question +1. Automatically track interaction metrics (duration, tokens, success/error) ## Features Demonstrated diff --git a/packages/sdk/server-ai/examples/langchain-chat/src/index.ts b/packages/sdk/server-ai/examples/langchain-chat/src/index.ts index 6e2f8ceaaa..f244039530 100644 --- a/packages/sdk/server-ai/examples/langchain-chat/src/index.ts +++ b/packages/sdk/server-ai/examples/langchain-chat/src/index.ts @@ -13,7 +13,7 @@ if (!sdkKey) { } // Initialize LaunchDarkly client -const ldClient = init(sdkKey, { eventsUri: 'https://fd9486c18583.ngrok-free.app' }); +const ldClient = init(sdkKey); // Set up the context properties. This context should appear on your LaunchDarkly contexts dashboard // soon after you run the demo. From ef43c503a69ba300ec840ac216ce7ce703126e45 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Mon, 13 Oct 2025 14:30:32 +0000 Subject: [PATCH 04/10] update name of AI SDK example to be non provider specific --- package.json | 2 +- .../{langchain-chat => tracked-chat}/README.md | 8 ++++---- .../{langchain-chat => tracked-chat}/package.json | 8 +++++--- .../{langchain-chat => tracked-chat}/src/index.ts | 15 +++++++++++++-- .../tsconfig.json | 0 5 files changed, 23 insertions(+), 10 deletions(-) rename packages/sdk/server-ai/examples/{langchain-chat => tracked-chat}/README.md (85%) rename packages/sdk/server-ai/examples/{langchain-chat => tracked-chat}/package.json (63%) rename packages/sdk/server-ai/examples/{langchain-chat => tracked-chat}/src/index.ts (87%) rename packages/sdk/server-ai/examples/{langchain-chat => tracked-chat}/tsconfig.json (100%) diff --git a/package.json b/package.json index dda971fee4..106684adfd 100644 --- a/package.json +++ b/package.json @@ -33,8 +33,8 @@ "packages/sdk/server-ai", "packages/sdk/server-ai/examples/bedrock", "packages/sdk/server-ai/examples/openai", + "packages/sdk/server-ai/examples/tracked-chat", "packages/sdk/server-ai/examples/vercel-ai", - "packages/sdk/server-ai/examples/langchain-chat", "packages/telemetry/browser-telemetry", "contract-tests", "packages/sdk/combined-browser" diff --git a/packages/sdk/server-ai/examples/langchain-chat/README.md b/packages/sdk/server-ai/examples/tracked-chat/README.md similarity index 85% rename from packages/sdk/server-ai/examples/langchain-chat/README.md rename to packages/sdk/server-ai/examples/tracked-chat/README.md index dd4d4370b7..cbfe6ef537 100644 --- a/packages/sdk/server-ai/examples/langchain-chat/README.md +++ b/packages/sdk/server-ai/examples/tracked-chat/README.md @@ -1,11 +1,11 @@ -# LangChain Chat Example +# Tracked Chat Example -This example demonstrates how to use the LaunchDarkly AI SDK with LangChain for chat interactions. +This example demonstrates how to use the LaunchDarkly AI SDK chat functionality with multiple providers for tracked chat interactions. ## Prerequisites 1. A LaunchDarkly account and SDK key -1. An OpenAI API key (for the LangChain integration) +1. An OpenAI API key (for the AI provider) 1. Node.js 16 or later ## Setup @@ -72,5 +72,5 @@ This will: - **AI Config Integration**: Using LaunchDarkly to configure AI models and prompts - **Variable Interpolation**: Using Mustache templates with runtime variables - **Chat Conversations**: Multi-turn conversations with message history -- **Provider Integration**: Using LangChain as the AI provider +- **Provider Integration**: Using multiple AI providers through the LaunchDarkly AI SDK - **Metrics Tracking**: Automatic tracking of token usage and performance diff --git a/packages/sdk/server-ai/examples/langchain-chat/package.json b/packages/sdk/server-ai/examples/tracked-chat/package.json similarity index 63% rename from packages/sdk/server-ai/examples/langchain-chat/package.json rename to packages/sdk/server-ai/examples/tracked-chat/package.json index 8c7ec33ce5..30b3452b3e 100644 --- a/packages/sdk/server-ai/examples/langchain-chat/package.json +++ b/packages/sdk/server-ai/examples/tracked-chat/package.json @@ -1,18 +1,20 @@ { - "name": "langchain-chat-example", + "name": "tracked-chat-example", "version": "1.0.0", - "description": "Example demonstrating LaunchDarkly AI SDK with LangChain", + "description": "Example demonstrating LaunchDarkly AI SDK chat functionality with multiple providers", "type": "module", "scripts": { "build": "tsc", "start": "yarn build && node ./dist/index.js" }, "dependencies": { + "@ai-sdk/google": "^2.0.20", "@langchain/core": "^0.3.78", "@langchain/google-genai": "^0.2.18", "@launchdarkly/node-server-sdk": "^9.0.0", "@launchdarkly/server-sdk-ai": "0.11.4", - "@launchdarkly/server-sdk-ai-langchain": "0.1.0", + "@launchdarkly/server-sdk-ai-langchain": "0.0.0", + "@launchdarkly/server-sdk-ai-vercel": "0.0.0", "dotenv": "^16.0.0", "langchain": "^0.1.0" }, diff --git a/packages/sdk/server-ai/examples/langchain-chat/src/index.ts b/packages/sdk/server-ai/examples/tracked-chat/src/index.ts similarity index 87% rename from packages/sdk/server-ai/examples/langchain-chat/src/index.ts rename to packages/sdk/server-ai/examples/tracked-chat/src/index.ts index f244039530..35937ddc0d 100644 --- a/packages/sdk/server-ai/examples/langchain-chat/src/index.ts +++ b/packages/sdk/server-ai/examples/tracked-chat/src/index.ts @@ -1,5 +1,11 @@ /* eslint-disable no-console */ -import { init, type LDContext } from '@launchdarkly/node-server-sdk'; +import { + basicLogger, + init, + type LDContext, + LDLogger, + LDOptions, +} from '@launchdarkly/node-server-sdk'; import { initAi } from '@launchdarkly/server-sdk-ai'; // Environment variables @@ -12,8 +18,13 @@ if (!sdkKey) { process.exit(1); } +const logger = basicLogger({ level: 'debug', destination: console.log }); +const options: LDOptions = { + logger, +}; + // Initialize LaunchDarkly client -const ldClient = init(sdkKey); +const ldClient = init(sdkKey, options); // Set up the context properties. This context should appear on your LaunchDarkly contexts dashboard // soon after you run the demo. diff --git a/packages/sdk/server-ai/examples/langchain-chat/tsconfig.json b/packages/sdk/server-ai/examples/tracked-chat/tsconfig.json similarity index 100% rename from packages/sdk/server-ai/examples/langchain-chat/tsconfig.json rename to packages/sdk/server-ai/examples/tracked-chat/tsconfig.json From 7e3422db317ba0589eadc721e816e7910863b248 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Mon, 13 Oct 2025 16:25:10 +0000 Subject: [PATCH 05/10] remove debug logger from example --- packages/sdk/server-ai/examples/tracked-chat/src/index.ts | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/packages/sdk/server-ai/examples/tracked-chat/src/index.ts b/packages/sdk/server-ai/examples/tracked-chat/src/index.ts index 35937ddc0d..b29e868aff 100644 --- a/packages/sdk/server-ai/examples/tracked-chat/src/index.ts +++ b/packages/sdk/server-ai/examples/tracked-chat/src/index.ts @@ -3,7 +3,6 @@ import { basicLogger, init, type LDContext, - LDLogger, LDOptions, } from '@launchdarkly/node-server-sdk'; import { initAi } from '@launchdarkly/server-sdk-ai'; @@ -18,13 +17,8 @@ if (!sdkKey) { process.exit(1); } -const logger = basicLogger({ level: 'debug', destination: console.log }); -const options: LDOptions = { - logger, -}; - // Initialize LaunchDarkly client -const ldClient = init(sdkKey, options); +const ldClient = init(sdkKey); // Set up the context properties. This context should appear on your LaunchDarkly contexts dashboard // soon after you run the demo. From 5dc4ae9e6f3e190306078beece2052d25206f10f Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Mon, 13 Oct 2025 16:27:20 +0000 Subject: [PATCH 06/10] remove unused imports --- packages/sdk/server-ai/examples/tracked-chat/src/index.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/sdk/server-ai/examples/tracked-chat/src/index.ts b/packages/sdk/server-ai/examples/tracked-chat/src/index.ts index b29e868aff..63ec110723 100644 --- a/packages/sdk/server-ai/examples/tracked-chat/src/index.ts +++ b/packages/sdk/server-ai/examples/tracked-chat/src/index.ts @@ -1,9 +1,7 @@ /* eslint-disable no-console */ import { - basicLogger, init, type LDContext, - LDOptions, } from '@launchdarkly/node-server-sdk'; import { initAi } from '@launchdarkly/server-sdk-ai'; From 41af95de9038199ccbe6582315099952204a2132 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Tue, 14 Oct 2025 20:28:02 +0000 Subject: [PATCH 07/10] fix lint error and simplify instructions --- .../server-ai/examples/tracked-chat/README.md | 34 +++++-------------- .../examples/tracked-chat/src/index.ts | 7 ++-- 2 files changed, 10 insertions(+), 31 deletions(-) diff --git a/packages/sdk/server-ai/examples/tracked-chat/README.md b/packages/sdk/server-ai/examples/tracked-chat/README.md index cbfe6ef537..93817c981d 100644 --- a/packages/sdk/server-ai/examples/tracked-chat/README.md +++ b/packages/sdk/server-ai/examples/tracked-chat/README.md @@ -27,32 +27,14 @@ This example demonstrates how to use the LaunchDarkly AI SDK chat functionality LAUNCHDARKLY_AI_CONFIG_KEY=sample-ai-chat-config ``` -1. Create an AI Config in LaunchDarkly with the key `sample-ai-config`: - ```json - { - "_ldMeta": { - "variationKey": "1234", - "enabled": true, - "version": 1 - }, - "messages": [ - { - "content": "You are a helpful assistant for {{customerName}}. You should be friendly and informative.", - "role": "system" - } - ], - "model": { - "name": "gpt-3.5-turbo", - "parameters": { - "temperature": 0.7, - "maxTokens": 1000 - } - }, - "provider": { - "name": "langchain" - } - } - ``` +1. Create an AI Config in LaunchDarkly: + - Navigate to the AI Configs section in your LaunchDarkly dashboard + - Create a new AI Config with the key `sample-ai-config` + - Add a variation with the following settings: + - **Model Selection**: Select "OpenAI" as the provider and "gpt-3.5-turbo" as the model + - **Messages**: Add a system message with the content: "You are a helpful assistant for {{companyName}}. You should be friendly and informative." + - Save the variation + - Update the default target rule to use the newly created variation ## Running the Example diff --git a/packages/sdk/server-ai/examples/tracked-chat/src/index.ts b/packages/sdk/server-ai/examples/tracked-chat/src/index.ts index 63ec110723..cf2514cf34 100644 --- a/packages/sdk/server-ai/examples/tracked-chat/src/index.ts +++ b/packages/sdk/server-ai/examples/tracked-chat/src/index.ts @@ -1,8 +1,5 @@ /* eslint-disable no-console */ -import { - init, - type LDContext, -} from '@launchdarkly/node-server-sdk'; +import { init, type LDContext } from '@launchdarkly/node-server-sdk'; import { initAi } from '@launchdarkly/server-sdk-ai'; // Environment variables @@ -50,7 +47,7 @@ async function main(): Promise { // Get AI chat configuration from LaunchDarkly const chat = await aiClient.initChat(aiConfigKey, context, defaultValue, { - myVariable: 'My User Defined Variable', + companyName: 'LaunchDarkly', }); if (!chat) { From eb11847ba196e0355abd6a908a966b576d1b4b57 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Tue, 14 Oct 2025 20:48:33 +0000 Subject: [PATCH 08/10] simplify the example and align runtime variable in code and readme --- packages/sdk/server-ai/examples/tracked-chat/README.md | 9 --------- .../sdk/server-ai/examples/tracked-chat/package.json | 5 +++-- .../sdk/server-ai/examples/tracked-chat/src/index.ts | 2 +- 3 files changed, 4 insertions(+), 12 deletions(-) diff --git a/packages/sdk/server-ai/examples/tracked-chat/README.md b/packages/sdk/server-ai/examples/tracked-chat/README.md index 93817c981d..e366ef2fb8 100644 --- a/packages/sdk/server-ai/examples/tracked-chat/README.md +++ b/packages/sdk/server-ai/examples/tracked-chat/README.md @@ -46,13 +46,4 @@ This will: 1. Initialize the LaunchDarkly client 1. Create a chat configuration using the AI Config 1. Send a message to the AI and display the response -1. Continue the conversation with a follow-up question 1. Automatically track interaction metrics (duration, tokens, success/error) - -## Features Demonstrated - -- **AI Config Integration**: Using LaunchDarkly to configure AI models and prompts -- **Variable Interpolation**: Using Mustache templates with runtime variables -- **Chat Conversations**: Multi-turn conversations with message history -- **Provider Integration**: Using multiple AI providers through the LaunchDarkly AI SDK -- **Metrics Tracking**: Automatic tracking of token usage and performance diff --git a/packages/sdk/server-ai/examples/tracked-chat/package.json b/packages/sdk/server-ai/examples/tracked-chat/package.json index 30b3452b3e..c32bdb1ded 100644 --- a/packages/sdk/server-ai/examples/tracked-chat/package.json +++ b/packages/sdk/server-ai/examples/tracked-chat/package.json @@ -13,8 +13,9 @@ "@langchain/google-genai": "^0.2.18", "@launchdarkly/node-server-sdk": "^9.0.0", "@launchdarkly/server-sdk-ai": "0.11.4", - "@launchdarkly/server-sdk-ai-langchain": "0.0.0", - "@launchdarkly/server-sdk-ai-vercel": "0.0.0", + "@launchdarkly/server-sdk-ai-langchain": "0.1.1", + "@launchdarkly/server-sdk-ai-openai": "0.1.0", + "@launchdarkly/server-sdk-ai-vercel": "0.1.0", "dotenv": "^16.0.0", "langchain": "^0.1.0" }, diff --git a/packages/sdk/server-ai/examples/tracked-chat/src/index.ts b/packages/sdk/server-ai/examples/tracked-chat/src/index.ts index cf2514cf34..44c63f8faa 100644 --- a/packages/sdk/server-ai/examples/tracked-chat/src/index.ts +++ b/packages/sdk/server-ai/examples/tracked-chat/src/index.ts @@ -58,7 +58,7 @@ async function main(): Promise { // Example of using the chat functionality console.log('\n*** Starting chat conversation:'); try { - const userInput = 'Hello! Can you help me understand what LaunchDarkly is?'; + const userInput = 'Hello! Can you help me understand how your company can help me?'; console.log('User Input:', userInput); const response = await chat.invoke(userInput); From 69f90f7f59c18bd7fc5df67a228f62d2bfed90cb Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Tue, 14 Oct 2025 21:25:27 +0000 Subject: [PATCH 09/10] auto update server-sdk-ai versions in examples --- .../sdk/server-ai/examples/tracked-chat/package.json | 2 +- release-please-config.json | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/packages/sdk/server-ai/examples/tracked-chat/package.json b/packages/sdk/server-ai/examples/tracked-chat/package.json index c32bdb1ded..a113216f33 100644 --- a/packages/sdk/server-ai/examples/tracked-chat/package.json +++ b/packages/sdk/server-ai/examples/tracked-chat/package.json @@ -12,7 +12,7 @@ "@langchain/core": "^0.3.78", "@langchain/google-genai": "^0.2.18", "@launchdarkly/node-server-sdk": "^9.0.0", - "@launchdarkly/server-sdk-ai": "0.11.4", + "@launchdarkly/server-sdk-ai": "0.12.1", "@launchdarkly/server-sdk-ai-langchain": "0.1.1", "@launchdarkly/server-sdk-ai-openai": "0.1.0", "@launchdarkly/server-sdk-ai-vercel": "0.1.0", diff --git a/release-please-config.json b/release-please-config.json index 446afbcc86..eb417cd4d8 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -86,6 +86,16 @@ "type": "json", "path": "examples/openai/package.json", "jsonpath": "$.dependencies['@launchdarkly/server-sdk-ai']" + }, + { + "type": "json", + "path": "examples/tracked-chat/package.json", + "jsonpath": "$.dependencies['@launchdarkly/server-sdk-ai']" + }, + { + "type": "json", + "path": "examples/vercel-ai/package.json", + "jsonpath": "$.dependencies['@launchdarkly/server-sdk-ai']" } ] }, From 564fcce033a5ef93a52a1ce6142bc9636eea67f7 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Tue, 14 Oct 2025 21:35:57 +0000 Subject: [PATCH 10/10] allow more flexible updates for the AIProvider packages --- packages/sdk/server-ai/examples/tracked-chat/package.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/sdk/server-ai/examples/tracked-chat/package.json b/packages/sdk/server-ai/examples/tracked-chat/package.json index a113216f33..6f2992279a 100644 --- a/packages/sdk/server-ai/examples/tracked-chat/package.json +++ b/packages/sdk/server-ai/examples/tracked-chat/package.json @@ -13,9 +13,9 @@ "@langchain/google-genai": "^0.2.18", "@launchdarkly/node-server-sdk": "^9.0.0", "@launchdarkly/server-sdk-ai": "0.12.1", - "@launchdarkly/server-sdk-ai-langchain": "0.1.1", - "@launchdarkly/server-sdk-ai-openai": "0.1.0", - "@launchdarkly/server-sdk-ai-vercel": "0.1.0", + "@launchdarkly/server-sdk-ai-langchain": "^0.1.0", + "@launchdarkly/server-sdk-ai-openai": "^0.1.0", + "@launchdarkly/server-sdk-ai-vercel": "^0.1.0", "dotenv": "^16.0.0", "langchain": "^0.1.0" },