-
Notifications
You must be signed in to change notification settings - Fork 31
feat: Add support for TrackedChats in the AI SDK #939
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 1 commit
50aef7f
cb4a25e
cd534e3
0b9f894
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,82 @@ | ||
| import { LDAIConfig, LDMessage } from '../config/LDAIConfig'; | ||
| import { LDAIConfigTracker } from '../config/LDAIConfigTracker'; | ||
| import { ChatResponse } from './TrackedChat'; | ||
|
|
||
| /** | ||
| * Base implementation of TrackedChat that provides common functionality. | ||
| * This can be extended by provider-specific implementations. | ||
| */ | ||
| export abstract class BaseTrackedChat { | ||
| protected messages: LDMessage[]; | ||
|
|
||
| constructor( | ||
| protected readonly aiConfig: LDAIConfig, | ||
| protected readonly tracker: LDAIConfigTracker, | ||
| ) { | ||
| this.messages = aiConfig.messages || []; | ||
| } | ||
|
|
||
| /** | ||
| * Invoke the chat model with a prompt string. | ||
| * This method handles conversation management and tracking, delegating to the provider's invokeModel method. | ||
| */ | ||
| async invoke(prompt: string): Promise<ChatResponse> { | ||
| // Convert prompt string to LDMessage with role 'user' and add to conversation history | ||
| const userMessage: LDMessage = { | ||
| role: 'user', | ||
| content: prompt, | ||
| }; | ||
| this.messages.push(userMessage); | ||
|
|
||
| // Delegate to provider-specific implementation with tracking | ||
| const response = await this.trackMetricsOf(() => this.invokeModel(this.messages)); | ||
|
|
||
| // Add the assistant response to the conversation history | ||
| this.messages.push(response.message); | ||
|
|
||
| return response; | ||
| } | ||
|
|
||
| /** | ||
| * Abstract method that providers must implement to handle the actual model invocation. | ||
| * This method should convert messages to provider format, invoke the model, and return a ChatResponse. | ||
| */ | ||
| protected abstract invokeModel(messages: LDMessage[]): Promise<ChatResponse>; | ||
|
|
||
| /** | ||
| * Track metrics for a ChatResponse execution. | ||
| * This method handles duration tracking, token usage tracking, and success/error tracking. | ||
| */ | ||
| protected async trackMetricsOf(callable: () => Promise<ChatResponse>): Promise<ChatResponse> { | ||
| return this.tracker.trackDurationOf(async () => { | ||
| try { | ||
| const result = await callable(); | ||
|
|
||
| // Track token usage if available | ||
| if (result.usage) { | ||
| this.tracker.trackTokens(result.usage); | ||
| } | ||
|
|
||
| this.tracker.trackSuccess(); | ||
| return result; | ||
| } catch (error) { | ||
| this.tracker.trackError(); | ||
| throw error; | ||
| } | ||
| }); | ||
| } | ||
|
|
||
| /** | ||
| * Get the underlying AI configuration used to initialize this TrackedChat. | ||
| */ | ||
| getConfig(): LDAIConfig { | ||
| return this.aiConfig; | ||
| } | ||
|
|
||
| /** | ||
| * Get the underlying AI configuration tracker used to initialize this TrackedChat. | ||
| */ | ||
| getTracker(): LDAIConfigTracker { | ||
| return this.tracker; | ||
| } | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,60 @@ | ||
| import { LDAIConfig, LDMessage } from '../config/LDAIConfig'; | ||
| import { LDAIConfigTracker } from '../config/LDAIConfigTracker'; | ||
| import { LDTokenUsage } from '../metrics/LDTokenUsage'; | ||
|
|
||
| /** | ||
| * Chat response structure. | ||
| */ | ||
| export interface ChatResponse { | ||
| /** | ||
| * The response message from the AI. | ||
| */ | ||
| message: LDMessage; | ||
|
|
||
| /** | ||
| * Token usage information. | ||
| */ | ||
| usage?: LDTokenUsage; | ||
|
|
||
| /** | ||
| * Additional metadata from the provider. | ||
| */ | ||
| metadata?: Record<string, unknown>; | ||
| } | ||
|
|
||
| /** | ||
| * Interface for provider-specific tracked chat implementations. | ||
| */ | ||
| export interface ProviderTrackedChat { | ||
| /** | ||
| * Invoke the chat model with the provided messages. | ||
| * This method provides a consistent interface for chat model execution while integrating | ||
| * LaunchDarkly-specific functionality. | ||
| * | ||
| * @param prompt A prompt string that will be converted to a user message and added to the conversation history. | ||
| * @returns A promise that resolves to the chat response. | ||
| */ | ||
| invoke(prompt: string): Promise<ChatResponse>; | ||
|
|
||
| /** | ||
| * Get the underlying AI configuration used to initialize this TrackedChat. | ||
| * | ||
| * @returns The AI configuration. | ||
| */ | ||
| getConfig(): LDAIConfig; | ||
|
|
||
| /** | ||
| * Get the underlying AI configuration tracker used to initialize this TrackedChat. | ||
| * | ||
| * @returns The AI configuration tracker. | ||
| */ | ||
| getTracker(): LDAIConfigTracker; | ||
|
|
||
| /** | ||
| * Get the underlying provider-specific chat model instance. | ||
| * This provides direct access to the underlying provider chat model for advanced use cases. | ||
| * | ||
| * @returns The configured provider-specific chat model instance. | ||
| */ | ||
| getChatModel(): unknown; | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,68 @@ | ||
| import { LDAIConfig } from '../config/LDAIConfig'; | ||
| import { LDAIConfigTracker } from '../config/LDAIConfigTracker'; | ||
| import { BaseTrackedChat } from './BaseTrackedChat'; | ||
|
|
||
| /** | ||
| * Factory for creating TrackedChat instances based on the provider configuration. | ||
| */ | ||
| export class TrackedChatFactory { | ||
| /** | ||
| * Create a TrackedChat instance based on the AI configuration. | ||
| * This method attempts to load provider-specific implementations dynamically. | ||
| * Returns undefined if the provider is not supported. | ||
| */ | ||
| static async create( | ||
| aiConfig: LDAIConfig, | ||
| tracker: LDAIConfigTracker, | ||
| ): Promise<BaseTrackedChat | undefined> { | ||
| const providerName = aiConfig.provider?.name?.toLowerCase(); | ||
| let trackedChat: BaseTrackedChat | undefined; | ||
|
|
||
| // Try specific implementations for the provider | ||
| switch (providerName) { | ||
| case 'openai': | ||
| trackedChat = undefined; | ||
| break; | ||
| case 'bedrock': | ||
| trackedChat = undefined; | ||
| break; | ||
| default: | ||
| trackedChat = undefined; | ||
| } | ||
|
Comment on lines
50
to
62
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I assume the plan is to fill this out later?
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes, we will add support for additional providers |
||
|
|
||
| // If no specific implementation worked, try LangChain as fallback | ||
| if (!trackedChat) { | ||
| trackedChat = await this._createLangChainTrackedChat(aiConfig, tracker); | ||
| } | ||
|
|
||
| // If LangChain didn't work, try Vercel as fallback | ||
| if (!trackedChat) { | ||
| // TODO: Return Vercel AI SDK implementation when available | ||
| // trackedChat = this._createVercelTrackedChat(aiConfig, tracker); | ||
| } | ||
|
|
||
| return trackedChat; | ||
| } | ||
|
|
||
| /** | ||
| * Create a LangChain TrackedChat instance if the LangChain provider is available. | ||
| */ | ||
| private static async _createLangChainTrackedChat( | ||
| aiConfig: LDAIConfig, | ||
| tracker: LDAIConfigTracker, | ||
| ): Promise<BaseTrackedChat | undefined> { | ||
| try { | ||
| // Try to dynamically import the LangChain provider | ||
| // This will work if @launchdarkly/server-sdk-ai-langchain is installed | ||
| // eslint-disable-next-line @typescript-eslint/no-require-imports, import/no-extraneous-dependencies | ||
| const { LangChainTrackedChat, LangChainProvider } = require('@launchdarkly/server-sdk-ai-langchain'); | ||
|
Check failure on line 58 in packages/sdk/server-ai/src/api/chat/TrackedChatFactory.ts
|
||
|
|
||
| // Build the LLM during factory creation to catch errors early | ||
| const llm = await LangChainProvider.createLangChainModel(aiConfig); | ||
| return new LangChainTrackedChat(aiConfig, tracker, llm); | ||
| } catch (error) { | ||
| // If the LangChain provider is not available or LLM creation fails, return undefined | ||
| return undefined; | ||
| } | ||
| } | ||
| } | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,3 @@ | ||
| export * from './BaseTrackedChat'; | ||
| export * from './TrackedChat'; | ||
| export * from './TrackedChatFactory'; |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,4 +1,5 @@ | ||
| export * from './config'; | ||
| export * from './agents'; | ||
| export * from './chat'; | ||
| export * from './metrics'; | ||
| export * from './LDAIClient'; |
Uh oh!
There was an error while loading. Please reload this page.