From 9c8aff53fa1f409f186c7fc077540a27d57e6a97 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Tue, 9 Sep 2025 14:39:00 +0000 Subject: [PATCH 1/2] fix: Chat to preserve message history --- examples/chatbot/README.md | 9 +- examples/chatbot/aws-bedrock/hello_bedrock.rb | 96 +++++++------------ examples/chatbot/openai/hello_openai.rb | 78 +++++++-------- 3 files changed, 80 insertions(+), 103 deletions(-) diff --git a/examples/chatbot/README.md b/examples/chatbot/README.md index c86f8a8..8956166 100644 --- a/examples/chatbot/README.md +++ b/examples/chatbot/README.md @@ -13,15 +13,14 @@ This repository includes examples for `OpenAI` and `Bedrock`. Depending on your ### General setup 1. Install the required dependencies with `bundle install` in the appropriate example directory. -1. Set the environment variable `LAUNCHDARKLY_SDK_KEY` to your LaunchDarkly SDK key. If there is an existing an AI Config in your LaunchDarkly project that you want to evaluate, set `LAUNCHDARKLY_AI_CONFIG_KEY` to the flag key; otherwise, an AI Config of `sample-ai-config` will be assumed. +1. [Create an AI Config](https://launchdarkly.com/docs/home/ai-configs/create) using the key specified in the examples, or copy the key of an existing AI Config in your LaunchDarkly project that you want to use. +1. Set the environment variable `LAUNCHDARKLY_SDK_KEY` to your LaunchDarkly SDK key and `LAUNCHDARKLY_AI_CONFIG_KEY` to the AI Config key; otherwise, an AI Config of `sample-ai-config` will be assumed. ```bash export LAUNCHDARKLY_SDK_KEY="1234567890abcdef" export LAUNCHDARKLY_AI_CONFIG_KEY="sample-ai-config" ``` -1. Replace `my-default-model` with your preferred model if the application cannot connect to LaunchDarkly Services. - ### OpenAI setup 1. Set the environment variable `OPENAI_API_KEY` to your OpenAI key. @@ -39,6 +38,10 @@ This repository includes examples for `OpenAI` and `Bedrock`. Depending on your ```bash export AWS_ACCESS_KEY_ID="0987654321fedcba" export AWS_SECRET_ACCESS_KEY="0987654321fedcba" + + # or + + export AWS_PROFILE="aws-profile-name" ``` 1. Run the program `bundle exec ruby hello_bedrock.rb` diff --git a/examples/chatbot/aws-bedrock/hello_bedrock.rb b/examples/chatbot/aws-bedrock/hello_bedrock.rb index ffe6527..6c291cb 100644 --- a/examples/chatbot/aws-bedrock/hello_bedrock.rb +++ b/examples/chatbot/aws-bedrock/hello_bedrock.rb @@ -10,9 +10,6 @@ # Set config_key to the AI Config key you want to evaluate. ai_config_key = ENV['LAUNCHDARKLY_AI_CONFIG_KEY'] || 'sample-ai-config' -# Set aws_access_key_id and aws_secret_access_key for AWS credentials. -aws_access_key_id = ENV['AWS_ACCESS_KEY_ID'] -aws_secret_access_key = ENV['AWS_SECRET_ACCESS_KEY'] region = ENV['AWS_REGION'] || 'us-east-1' if sdk_key.nil? || sdk_key.empty? @@ -20,47 +17,20 @@ exit 1 end -if aws_access_key_id.nil? || aws_access_key_id.empty? - puts '*** Please set the AWS_ACCESS_KEY_ID env variable first' - exit 1 -end - -if aws_secret_access_key.nil? || aws_secret_access_key.empty? - puts '*** Please set the AWS_SECRET_ACCESS_KEY env variable first' - exit 1 -end - # # Chatbot class that interacts with LaunchDarkly AI and AWS Bedrock # class BedrockChatbot - attr_reader :aiclient, :ai_config_key, :bedrock_client - - DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new( - enabled: true, - model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'my-default-model'), - messages: [ - LaunchDarkly::Server::AI::Message.new('system', - 'You are a default unhelpful assistant with the persona of HAL 9000 talking with {{ldctx.name}}'), - LaunchDarkly::Server::AI::Message.new('user', '{{user_question}}'), - ] - ) - - def initialize(aiclient, ai_config_key, bedrock_client, context) - @aiclient = aiclient - @ai_config_key = ai_config_key + attr_reader :ai_config, :bedrock_client, :messages + + def initialize(ai_config, bedrock_client) + @ai_config = ai_config + @messages = ai_config.messages @bedrock_client = bedrock_client - @context = context end def ask_agent(question) - ai_config = aiclient.config( - @ai_config_key, - @context, - DEFAULT_VALUE, - { user_question: question } - ) - + @messages << LaunchDarkly::Server::AI::Message.new('user', question) begin response = ai_config.tracker.track_bedrock_converse_metrics do @bedrock_client.converse( @@ -70,15 +40,16 @@ def ask_agent(question) ) ) end - [response.output.message.content[0].text, ai_config.tracker] + @messages << LaunchDarkly::Server::AI::Message.new('assistant', response.output.message.content[0].text) + response.output.message.content[0].text rescue StandardError => e - ["An error occured: #{e.message}", nil] + "An error occured: #{e.message}" end end - def agent_was_helpful(tracker, helpful) + def agent_was_helpful(helpful) kind = helpful ? :positive : :negative - tracker.track_feedback(kind: kind) + ai_config.tracker.track_feedback(kind: kind) end def map_converse_arguments(model_id, messages) @@ -86,13 +57,8 @@ def map_converse_arguments(model_id, messages) model_id: model_id, } - mapped_messages = [] - user_messages = messages.select { |msg| msg.role == 'user' } - mapped_messages << { role: 'user', content: user_messages.map { |msg| { text: msg.content } } } unless user_messages.empty? - - assistant_messages = messages.select { |msg| msg.role == 'assistant' } - mapped_messages << { role: 'assistant', content: assistant_messages.map { |msg| { text: msg.content } } } unless assistant_messages.empty? - args[:messages] = mapped_messages unless mapped_messages.empty? + chat_messages = messages.select { |msg| msg.role != 'system' } + args[:messages] = chat_messages.map { |msg| { role: msg.role, content: [{ text: msg.content }] } } system_messages = messages.select { |msg| msg.role == 'system' } args[:system] = system_messages.map { |msg| { text: msg.content } } unless system_messages.empty? @@ -118,27 +84,39 @@ def map_converse_arguments(model_id, messages) }) bedrock_client = Aws::BedrockRuntime::Client.new( - aws_access_key_id: aws_access_key_id, - aws_secret_access_key: aws_secret_access_key, region: region ) -chatbot = BedrockChatbot.new(ai_client, ai_config_key, bedrock_client, context) + + +DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new( + enabled: false +) + +ai_config = ai_client.config( + ai_config_key, + context, + DEFAULT_VALUE +) + +unless ai_config.enabled + puts '*** AI features are disabled' + exit 1 +end + +chatbot = BedrockChatbot.new(ai_config, bedrock_client) loop do - print "Ask a question: (or type 'exit'): " + print "Ask a question (or type 'exit'): " question = gets&.chomp break if question.nil? || question.strip.downcase == 'exit' - response, tracker = chatbot.ask_agent(question) + response = chatbot.ask_agent(question) puts "AI Response: #{response}" +end - next if tracker.nil? # If tracker is nil, skip feedback collection - - print "Was the response helpful? [yes/no] (or type 'exit'): " - feedback = gets&.chomp - break if feedback.nil? || feedback.strip.downcase == 'exit' +print "Was the chat helpful? [yes/no]: " +feedback = gets&.chomp - chatbot.agent_was_helpful(tracker, feedback == 'yes') -end +chatbot.agent_was_helpful(feedback == 'yes') unless feedback.nil? ld_client.close \ No newline at end of file diff --git a/examples/chatbot/openai/hello_openai.rb b/examples/chatbot/openai/hello_openai.rb index 9fc6fe6..0efe582 100644 --- a/examples/chatbot/openai/hello_openai.rb +++ b/examples/chatbot/openai/hello_openai.rb @@ -27,52 +27,41 @@ # Chatbot class that interacts with LaunchDarkly AI and OpenAI # class Chatbot - attr_reader :aiclient, :ai_config_key, :openai_client, :context - - DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new( - enabled: true, - model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'my-default-model'), - messages: [ - LaunchDarkly::Server::AI::Message.new('system', - 'You are a default unhelpful assistant with the persona of HAL 9000 talking with {{ldctx.name}}'), - LaunchDarkly::Server::AI::Message.new('user', '{{user_question}}'), - ] - ) - - def initialize(aiclient, ai_config_key, openai_client, context) - @aiclient = aiclient - @ai_config_key = ai_config_key + attr_reader :ai_config, :openai_client, :messages + + def initialize(ai_config, openai_client) + @ai_config = ai_config + @messages = ai_config.messages @openai_client = openai_client - @context = context end def ask_agent(question) - ai_config = aiclient.config( - @ai_config_key, - @context, - DEFAULT_VALUE, - { user_question: question } - ) - + @messages << LaunchDarkly::Server::AI::Message.new('user', question) begin completion = ai_config.tracker.track_openai_metrics do @openai_client.chat.completions.create( model: ai_config.model.name, - messages: ai_config.messages.map(&:to_h) + messages: @messages.map(&:to_h) ) end - [completion[:choices][0][:message][:content], ai_config.tracker] + response_content = completion[:choices][0][:message][:content] + @messages << LaunchDarkly::Server::AI::Message.new('assistant', response_content) + response_content rescue StandardError => e - ["An error occurred: #{e.message}", nil] + "An error occurred: #{e.message}" end end - def agent_was_helpful(tracker, helpful) + def agent_was_helpful(helpful) kind = helpful ? :positive : :negative - tracker.track_feedback(kind: kind) + ai_config.tracker.track_feedback(kind: kind) end end +DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new( + enabled: false +) + ld_client = LaunchDarkly::LDClient.new(sdk_key) ai_client = LaunchDarkly::Server::AI::Client.new(ld_client) @@ -90,24 +79,31 @@ def agent_was_helpful(tracker, helpful) name: 'Lucy', }) -chatbot = Chatbot.new(ai_client, ai_config_key, OpenAI::Client.new(api_key: openai_api_key), context) +ai_config = ai_client.config( + ai_config_key, + context, + DEFAULT_VALUE +) + +unless ai_config.enabled + puts '*** AI features are disabled' + exit 1 +end + +chatbot = Chatbot.new(ai_config, OpenAI::Client.new(api_key: openai_api_key)) loop do print "Ask a question (or type 'exit'): " - input = gets&.chomp - break if input.nil? || input.strip.downcase == 'exit' + question = gets&.chomp + break if question.nil? || question.strip.downcase == 'exit' - response, tracker = chatbot.ask_agent(input) + response = chatbot.ask_agent(question) puts "AI Response: #{response}" +end - next if tracker.nil? # If tracker is nil, skip feedback collection - - print "Was the response helpful? [yes/no] (or type 'exit'): " - feedback = gets&.chomp - break if feedback.nil? || feedback.strip.downcase == 'exit' +print "Was the chat helpful? [yes/no]: " +feedback = gets&.chomp - helpful = feedback.strip.downcase == 'yes' - chatbot.agent_was_helpful(tracker, helpful) -end +chatbot.agent_was_helpful(feedback == 'yes') unless feedback.nil? -ld_client.close \ No newline at end of file +ld_client.close From 4a188db6204568c6592d0bf4d1d164f4334f9bad Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Wed, 17 Sep 2025 15:58:51 +0000 Subject: [PATCH 2/2] include a working default value for best practice --- examples/chatbot/README.md | 1 + examples/chatbot/aws-bedrock/hello_bedrock.rb | 14 ++++++++++++-- examples/chatbot/openai/hello_openai.rb | 12 +++++++++++- 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/examples/chatbot/README.md b/examples/chatbot/README.md index 8956166..7032c85 100644 --- a/examples/chatbot/README.md +++ b/examples/chatbot/README.md @@ -14,6 +14,7 @@ This repository includes examples for `OpenAI` and `Bedrock`. Depending on your 1. Install the required dependencies with `bundle install` in the appropriate example directory. 1. [Create an AI Config](https://launchdarkly.com/docs/home/ai-configs/create) using the key specified in the examples, or copy the key of an existing AI Config in your LaunchDarkly project that you want to use. +1. Update the default model (`replace-with-your-model`) in the example file. 1. Set the environment variable `LAUNCHDARKLY_SDK_KEY` to your LaunchDarkly SDK key and `LAUNCHDARKLY_AI_CONFIG_KEY` to the AI Config key; otherwise, an AI Config of `sample-ai-config` will be assumed. ```bash diff --git a/examples/chatbot/aws-bedrock/hello_bedrock.rb b/examples/chatbot/aws-bedrock/hello_bedrock.rb index 6c291cb..256ee9a 100644 --- a/examples/chatbot/aws-bedrock/hello_bedrock.rb +++ b/examples/chatbot/aws-bedrock/hello_bedrock.rb @@ -89,9 +89,19 @@ def map_converse_arguments(model_id, messages) DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new( - enabled: false + enabled: true, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'replace-with-your-model'), + messages: [ + LaunchDarkly::Server::AI::Message.new('system', + 'You are the backup assistant when something prevents retrieving LaunchDarkly configured assistant. You have the persona of HAL 9000 talking with {{ldctx.name}}'), + ] ) +# You can also default to disabled if you are unable to connect to LaunchDarkly services. +# DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new( +# enabled: false +# ) + ai_config = ai_client.config( ai_config_key, context, @@ -119,4 +129,4 @@ def map_converse_arguments(model_id, messages) chatbot.agent_was_helpful(feedback == 'yes') unless feedback.nil? -ld_client.close \ No newline at end of file +ld_client.close diff --git a/examples/chatbot/openai/hello_openai.rb b/examples/chatbot/openai/hello_openai.rb index 0efe582..c07768a 100644 --- a/examples/chatbot/openai/hello_openai.rb +++ b/examples/chatbot/openai/hello_openai.rb @@ -59,9 +59,19 @@ def agent_was_helpful(helpful) end DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new( - enabled: false + enabled: true, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'replace-with-your-model'), + messages: [ + LaunchDarkly::Server::AI::Message.new('system', + 'You are the backup assistant when something prevents retrieving LaunchDarkly configured assistant. You have the persona of HAL 9000 talking with {{ldctx.name}}'), + ] ) +# You can also default to disabled if you are unable to connect to LaunchDarkly services. +# DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new( +# enabled: false +# ) + ld_client = LaunchDarkly::LDClient.new(sdk_key) ai_client = LaunchDarkly::Server::AI::Client.new(ld_client)