Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions packages/sdk/server-ai/src/LDAIClientImpl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import * as Mustache from 'mustache';
import { LDContext } from '@launchdarkly/js-server-sdk-common';

import { LDAIAgent, LDAIAgentConfig, LDAIAgentDefaults } from './api/agents';
import { BaseTrackedChat, TrackedChatFactory } from './api/chat';
import {
LDAIConfig,
LDAIConfigTracker,
Expand Down Expand Up @@ -222,4 +223,26 @@ export class LDAIClientImpl implements LDAIClient {

return agents;
}

async initChat(
key: string,
context: LDContext,
defaultValue: LDAIDefaults,
variables?: Record<string, unknown>,
): Promise<BaseTrackedChat | undefined> {
// Track chat initialization
this._ldClient.track('$ld:ai:config:function:initChat', context, key, 1);

const config = await this.config(key, context, defaultValue, variables);

// Return null if the configuration is disabled
if (!config.enabled) {
return undefined;
}

// Create the TrackedChat instance based on the provider
const chat = await TrackedChatFactory.create(config, config.tracker);

return chat;
}
}
44 changes: 44 additions & 0 deletions packages/sdk/server-ai/src/api/LDAIClient.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { LDContext } from '@launchdarkly/js-server-sdk-common';

import { LDAIAgent, LDAIAgentConfig, LDAIAgentDefaults } from './agents';
import { BaseTrackedChat } from './chat';
import { LDAIConfig, LDAIDefaults } from './config/LDAIConfig';

/**
Expand Down Expand Up @@ -143,4 +144,47 @@ export interface LDAIClient {
agentConfigs: T,
context: LDContext,
): Promise<Record<T[number]['key'], LDAIAgent>>;

/**
* Initializes and returns a new TrackedChat instance for chat interactions.
* This method serves as the primary entry point for creating TrackedChat instances from configuration.
*
* @param key The key identifying the AI chat configuration to use.
* @param context The standard LDContext used when evaluating flags.
* @param defaultValue A default value representing a standard AI chat config result.
* @param variables Dictionary of values for instruction interpolation.
* @returns A promise that resolves to the TrackedChat instance, or null if the configuration is disabled.
*
* @example
* ```
* const key = "customer_support_chat";
* const context = {...};
* const defaultValue = {
* config: {
* enabled: false,
* model: { name: "gpt-4" },
* messages: [
* { role: "system", content: "You are a helpful customer support agent." }
* ]
* }
* };
* const variables = { customerName: 'John' };
*
* const chat = await client.initChat(key, context, defaultValue, variables);
* if (chat) {
* const response = await chat.invoke("I need help with my order");
* console.log(response.message.content);
*
* // Access configuration and tracker if needed
* console.log('Model:', chat.getConfig().model?.name);
* chat.getTracker().trackSuccess();
* }
* ```
*/
initChat(
key: string,
context: LDContext,
defaultValue: LDAIDefaults,
variables?: Record<string, unknown>,
): Promise<BaseTrackedChat | undefined>;
}
82 changes: 82 additions & 0 deletions packages/sdk/server-ai/src/api/chat/BaseTrackedChat.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import { LDAIConfig, LDMessage } from '../config/LDAIConfig';
import { LDAIConfigTracker } from '../config/LDAIConfigTracker';
import { ChatResponse } from './TrackedChat';

/**
* Base implementation of TrackedChat that provides common functionality.
* This can be extended by provider-specific implementations.
*/
export abstract class BaseTrackedChat {
protected messages: LDMessage[];

constructor(
protected readonly aiConfig: LDAIConfig,
protected readonly tracker: LDAIConfigTracker,
) {
this.messages = aiConfig.messages || [];
}

/**
* Invoke the chat model with a prompt string.
* This method handles conversation management and tracking, delegating to the provider's invokeModel method.
*/
async invoke(prompt: string): Promise<ChatResponse> {
// Convert prompt string to LDMessage with role 'user' and add to conversation history
const userMessage: LDMessage = {
role: 'user',
content: prompt,
};
this.messages.push(userMessage);

// Delegate to provider-specific implementation with tracking
const response = await this.trackMetricsOf(() => this.invokeModel(this.messages));

// Add the assistant response to the conversation history
this.messages.push(response.message);

return response;
}

/**
* Abstract method that providers must implement to handle the actual model invocation.
* This method should convert messages to provider format, invoke the model, and return a ChatResponse.
*/
protected abstract invokeModel(messages: LDMessage[]): Promise<ChatResponse>;

/**
* Track metrics for a ChatResponse execution.
* This method handles duration tracking, token usage tracking, and success/error tracking.
*/
protected async trackMetricsOf(callable: () => Promise<ChatResponse>): Promise<ChatResponse> {
return this.tracker.trackDurationOf(async () => {
try {
const result = await callable();

// Track token usage if available
if (result.usage) {
this.tracker.trackTokens(result.usage);
}

this.tracker.trackSuccess();
return result;
} catch (error) {
this.tracker.trackError();
throw error;
}
});
}

/**
* Get the underlying AI configuration used to initialize this TrackedChat.
*/
getConfig(): LDAIConfig {
return this.aiConfig;
}

/**
* Get the underlying AI configuration tracker used to initialize this TrackedChat.
*/
getTracker(): LDAIConfigTracker {
return this.tracker;
}
}
60 changes: 60 additions & 0 deletions packages/sdk/server-ai/src/api/chat/TrackedChat.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import { LDAIConfig, LDMessage } from '../config/LDAIConfig';
import { LDAIConfigTracker } from '../config/LDAIConfigTracker';
import { LDTokenUsage } from '../metrics/LDTokenUsage';

/**
* Chat response structure.
*/
export interface ChatResponse {
/**
* The response message from the AI.
*/
message: LDMessage;

/**
* Token usage information.
*/
usage?: LDTokenUsage;

/**
* Additional metadata from the provider.
*/
metadata?: Record<string, unknown>;
}

/**
* Interface for provider-specific tracked chat implementations.
*/
export interface ProviderTrackedChat {
/**
* Invoke the chat model with the provided messages.
* This method provides a consistent interface for chat model execution while integrating
* LaunchDarkly-specific functionality.
*
* @param prompt A prompt string that will be converted to a user message and added to the conversation history.
* @returns A promise that resolves to the chat response.
*/
invoke(prompt: string): Promise<ChatResponse>;

/**
* Get the underlying AI configuration used to initialize this TrackedChat.
*
* @returns The AI configuration.
*/
getConfig(): LDAIConfig;

/**
* Get the underlying AI configuration tracker used to initialize this TrackedChat.
*
* @returns The AI configuration tracker.
*/
getTracker(): LDAIConfigTracker;

/**
* Get the underlying provider-specific chat model instance.
* This provides direct access to the underlying provider chat model for advanced use cases.
*
* @returns The configured provider-specific chat model instance.
*/
getChatModel(): unknown;
}
68 changes: 68 additions & 0 deletions packages/sdk/server-ai/src/api/chat/TrackedChatFactory.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import { LDAIConfig } from '../config/LDAIConfig';
import { LDAIConfigTracker } from '../config/LDAIConfigTracker';
import { BaseTrackedChat } from './BaseTrackedChat';

/**
* Factory for creating TrackedChat instances based on the provider configuration.
*/
export class TrackedChatFactory {
/**
* Create a TrackedChat instance based on the AI configuration.
* This method attempts to load provider-specific implementations dynamically.
* Returns undefined if the provider is not supported.
*/
static async create(
aiConfig: LDAIConfig,
tracker: LDAIConfigTracker,
): Promise<BaseTrackedChat | undefined> {
const providerName = aiConfig.provider?.name?.toLowerCase();
let trackedChat: BaseTrackedChat | undefined;

// Try specific implementations for the provider
switch (providerName) {
case 'openai':
trackedChat = undefined;
break;
case 'bedrock':
trackedChat = undefined;
break;
default:
trackedChat = undefined;
}
Comment on lines 50 to 62
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I assume the plan is to fill this out later?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, we will add support for additional providers


// If no specific implementation worked, try LangChain as fallback
if (!trackedChat) {
trackedChat = await this._createLangChainTrackedChat(aiConfig, tracker);
}

// If LangChain didn't work, try Vercel as fallback
if (!trackedChat) {
// TODO: Return Vercel AI SDK implementation when available
// trackedChat = this._createVercelTrackedChat(aiConfig, tracker);
}

return trackedChat;
}

/**
* Create a LangChain TrackedChat instance if the LangChain provider is available.
*/
private static async _createLangChainTrackedChat(
aiConfig: LDAIConfig,
tracker: LDAIConfigTracker,
): Promise<BaseTrackedChat | undefined> {
try {
// Try to dynamically import the LangChain provider
// This will work if @launchdarkly/server-sdk-ai-langchain is installed
// eslint-disable-next-line @typescript-eslint/no-require-imports, import/no-extraneous-dependencies
const { LangChainTrackedChat, LangChainProvider } = require('@launchdarkly/server-sdk-ai-langchain');

Check failure on line 58 in packages/sdk/server-ai/src/api/chat/TrackedChatFactory.ts

View workflow job for this annotation

GitHub Actions / build-test-server-sdk-ai

Unexpected require()

Check failure on line 58 in packages/sdk/server-ai/src/api/chat/TrackedChatFactory.ts

View workflow job for this annotation

GitHub Actions / build-test-server-sdk-ai

Replace `·LangChainTrackedChat,·LangChainProvider` with `⏎········LangChainTrackedChat,⏎········LangChainProvider,⏎·····`

Check failure on line 58 in packages/sdk/server-ai/src/api/chat/TrackedChatFactory.ts

View workflow job for this annotation

GitHub Actions / build-test-server-sdk-ai

Unexpected require()

Check failure on line 58 in packages/sdk/server-ai/src/api/chat/TrackedChatFactory.ts

View workflow job for this annotation

GitHub Actions / build-test-server-sdk-ai

Replace `·LangChainTrackedChat,·LangChainProvider` with `⏎········LangChainTrackedChat,⏎········LangChainProvider,⏎·····`

// Build the LLM during factory creation to catch errors early
const llm = await LangChainProvider.createLangChainModel(aiConfig);
return new LangChainTrackedChat(aiConfig, tracker, llm);
} catch (error) {
// If the LangChain provider is not available or LLM creation fails, return undefined
return undefined;
}
}
}
3 changes: 3 additions & 0 deletions packages/sdk/server-ai/src/api/chat/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
export * from './BaseTrackedChat';
export * from './TrackedChat';
export * from './TrackedChatFactory';
1 change: 1 addition & 0 deletions packages/sdk/server-ai/src/api/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
export * from './config';
export * from './agents';
export * from './chat';
export * from './metrics';
export * from './LDAIClient';
Loading