Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions examples/chatbot/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,15 @@ This repository includes examples for `OpenAI` and `Bedrock`. Depending on your
### General setup

1. Install the required dependencies with `bundle install` in the appropriate example directory.
1. Set the environment variable `LAUNCHDARKLY_SDK_KEY` to your LaunchDarkly SDK key. If there is an existing an AI Config in your LaunchDarkly project that you want to evaluate, set `LAUNCHDARKLY_AI_CONFIG_KEY` to the flag key; otherwise, an AI Config of `sample-ai-config` will be assumed.
1. [Create an AI Config](https://launchdarkly.com/docs/home/ai-configs/create) using the key specified in the examples, or copy the key of an existing AI Config in your LaunchDarkly project that you want to use.
1. Update the default model (`replace-with-your-model`) in the example file.
1. Set the environment variable `LAUNCHDARKLY_SDK_KEY` to your LaunchDarkly SDK key and `LAUNCHDARKLY_AI_CONFIG_KEY` to the AI Config key; otherwise, an AI Config of `sample-ai-config` will be assumed.

```bash
export LAUNCHDARKLY_SDK_KEY="1234567890abcdef"
export LAUNCHDARKLY_AI_CONFIG_KEY="sample-ai-config"
```

1. Replace `my-default-model` with your preferred model if the application cannot connect to LaunchDarkly Services.

### OpenAI setup

1. Set the environment variable `OPENAI_API_KEY` to your OpenAI key.
Expand All @@ -39,6 +39,10 @@ This repository includes examples for `OpenAI` and `Bedrock`. Depending on your
```bash
export AWS_ACCESS_KEY_ID="0987654321fedcba"
export AWS_SECRET_ACCESS_KEY="0987654321fedcba"

# or

export AWS_PROFILE="aws-profile-name"
```

1. Run the program `bundle exec ruby hello_bedrock.rb`
Expand Down
108 changes: 48 additions & 60 deletions examples/chatbot/aws-bedrock/hello_bedrock.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,57 +10,27 @@
# Set config_key to the AI Config key you want to evaluate.
ai_config_key = ENV['LAUNCHDARKLY_AI_CONFIG_KEY'] || 'sample-ai-config'

# Set aws_access_key_id and aws_secret_access_key for AWS credentials.
aws_access_key_id = ENV['AWS_ACCESS_KEY_ID']
aws_secret_access_key = ENV['AWS_SECRET_ACCESS_KEY']
region = ENV['AWS_REGION'] || 'us-east-1'

if sdk_key.nil? || sdk_key.empty?
puts '*** Please set the LAUNCHDARKLY_SDK_KEY env first'
exit 1
end

if aws_access_key_id.nil? || aws_access_key_id.empty?
puts '*** Please set the AWS_ACCESS_KEY_ID env variable first'
exit 1
end

if aws_secret_access_key.nil? || aws_secret_access_key.empty?
puts '*** Please set the AWS_SECRET_ACCESS_KEY env variable first'
exit 1
end

#
# Chatbot class that interacts with LaunchDarkly AI and AWS Bedrock
#
class BedrockChatbot
attr_reader :aiclient, :ai_config_key, :bedrock_client

DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new(
enabled: true,
model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'my-default-model'),
messages: [
LaunchDarkly::Server::AI::Message.new('system',
'You are a default unhelpful assistant with the persona of HAL 9000 talking with {{ldctx.name}}'),
LaunchDarkly::Server::AI::Message.new('user', '{{user_question}}'),
]
)

def initialize(aiclient, ai_config_key, bedrock_client, context)
@aiclient = aiclient
@ai_config_key = ai_config_key
attr_reader :ai_config, :bedrock_client, :messages

def initialize(ai_config, bedrock_client)
@ai_config = ai_config
@messages = ai_config.messages
@bedrock_client = bedrock_client
@context = context
end

def ask_agent(question)
ai_config = aiclient.config(
@ai_config_key,
@context,
DEFAULT_VALUE,
{ user_question: question }
)

@messages << LaunchDarkly::Server::AI::Message.new('user', question)
begin
response = ai_config.tracker.track_bedrock_converse_metrics do
@bedrock_client.converse(
Expand All @@ -70,29 +40,25 @@ def ask_agent(question)
)
)
end
[response.output.message.content[0].text, ai_config.tracker]
@messages << LaunchDarkly::Server::AI::Message.new('assistant', response.output.message.content[0].text)
response.output.message.content[0].text
rescue StandardError => e
["An error occured: #{e.message}", nil]
"An error occured: #{e.message}"
end
end

def agent_was_helpful(tracker, helpful)
def agent_was_helpful(helpful)
kind = helpful ? :positive : :negative
tracker.track_feedback(kind: kind)
ai_config.tracker.track_feedback(kind: kind)
end

def map_converse_arguments(model_id, messages)
args = {
model_id: model_id,
}

mapped_messages = []
user_messages = messages.select { |msg| msg.role == 'user' }
mapped_messages << { role: 'user', content: user_messages.map { |msg| { text: msg.content } } } unless user_messages.empty?

assistant_messages = messages.select { |msg| msg.role == 'assistant' }
mapped_messages << { role: 'assistant', content: assistant_messages.map { |msg| { text: msg.content } } } unless assistant_messages.empty?
args[:messages] = mapped_messages unless mapped_messages.empty?
chat_messages = messages.select { |msg| msg.role != 'system' }
args[:messages] = chat_messages.map { |msg| { role: msg.role, content: [{ text: msg.content }] } }

system_messages = messages.select { |msg| msg.role == 'system' }
args[:system] = system_messages.map { |msg| { text: msg.content } } unless system_messages.empty?
Expand All @@ -118,27 +84,49 @@ def map_converse_arguments(model_id, messages)
})

bedrock_client = Aws::BedrockRuntime::Client.new(
aws_access_key_id: aws_access_key_id,
aws_secret_access_key: aws_secret_access_key,
region: region
)
chatbot = BedrockChatbot.new(ai_client, ai_config_key, bedrock_client, context)


DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new(
enabled: true,
model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'replace-with-your-model'),
messages: [
LaunchDarkly::Server::AI::Message.new('system',
'You are the backup assistant when something prevents retrieving LaunchDarkly configured assistant. You have the persona of HAL 9000 talking with {{ldctx.name}}'),
]
)

# You can also default to disabled if you are unable to connect to LaunchDarkly services.
# DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new(
# enabled: false
# )

ai_config = ai_client.config(
ai_config_key,
context,
DEFAULT_VALUE
)

unless ai_config.enabled
puts '*** AI features are disabled'
exit 1
end

chatbot = BedrockChatbot.new(ai_config, bedrock_client)

loop do
print "Ask a question: (or type 'exit'): "
print "Ask a question (or type 'exit'): "
question = gets&.chomp
break if question.nil? || question.strip.downcase == 'exit'

response, tracker = chatbot.ask_agent(question)
response = chatbot.ask_agent(question)
puts "AI Response: #{response}"
end

next if tracker.nil? # If tracker is nil, skip feedback collection

print "Was the response helpful? [yes/no] (or type 'exit'): "
feedback = gets&.chomp
break if feedback.nil? || feedback.strip.downcase == 'exit'
print "Was the chat helpful? [yes/no]: "
feedback = gets&.chomp

chatbot.agent_was_helpful(tracker, feedback == 'yes')
end
chatbot.agent_was_helpful(feedback == 'yes') unless feedback.nil?

ld_client.close
ld_client.close
88 changes: 47 additions & 41 deletions examples/chatbot/openai/hello_openai.rb
Original file line number Diff line number Diff line change
Expand Up @@ -27,52 +27,51 @@
# Chatbot class that interacts with LaunchDarkly AI and OpenAI
#
class Chatbot
attr_reader :aiclient, :ai_config_key, :openai_client, :context

DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new(
enabled: true,
model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'my-default-model'),
messages: [
LaunchDarkly::Server::AI::Message.new('system',
'You are a default unhelpful assistant with the persona of HAL 9000 talking with {{ldctx.name}}'),
LaunchDarkly::Server::AI::Message.new('user', '{{user_question}}'),
]
)

def initialize(aiclient, ai_config_key, openai_client, context)
@aiclient = aiclient
@ai_config_key = ai_config_key
attr_reader :ai_config, :openai_client, :messages

def initialize(ai_config, openai_client)
@ai_config = ai_config
@messages = ai_config.messages
@openai_client = openai_client
@context = context
end

def ask_agent(question)
ai_config = aiclient.config(
@ai_config_key,
@context,
DEFAULT_VALUE,
{ user_question: question }
)

@messages << LaunchDarkly::Server::AI::Message.new('user', question)
begin
completion = ai_config.tracker.track_openai_metrics do
@openai_client.chat.completions.create(
model: ai_config.model.name,
messages: ai_config.messages.map(&:to_h)
messages: @messages.map(&:to_h)
)
end
[completion[:choices][0][:message][:content], ai_config.tracker]
response_content = completion[:choices][0][:message][:content]
@messages << LaunchDarkly::Server::AI::Message.new('assistant', response_content)
response_content
rescue StandardError => e
["An error occurred: #{e.message}", nil]
"An error occurred: #{e.message}"
end
end

def agent_was_helpful(tracker, helpful)
def agent_was_helpful(helpful)
kind = helpful ? :positive : :negative
tracker.track_feedback(kind: kind)
ai_config.tracker.track_feedback(kind: kind)
end
end

DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new(
enabled: true,
model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'replace-with-your-model'),
messages: [
LaunchDarkly::Server::AI::Message.new('system',
'You are the backup assistant when something prevents retrieving LaunchDarkly configured assistant. You have the persona of HAL 9000 talking with {{ldctx.name}}'),
]
)

# You can also default to disabled if you are unable to connect to LaunchDarkly services.
# DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new(
# enabled: false
# )

ld_client = LaunchDarkly::LDClient.new(sdk_key)
ai_client = LaunchDarkly::Server::AI::Client.new(ld_client)

Expand All @@ -90,24 +89,31 @@ def agent_was_helpful(tracker, helpful)
name: 'Lucy',
})

chatbot = Chatbot.new(ai_client, ai_config_key, OpenAI::Client.new(api_key: openai_api_key), context)
ai_config = ai_client.config(
ai_config_key,
context,
DEFAULT_VALUE
)

unless ai_config.enabled
puts '*** AI features are disabled'
exit 1
end

chatbot = Chatbot.new(ai_config, OpenAI::Client.new(api_key: openai_api_key))

loop do
print "Ask a question (or type 'exit'): "
input = gets&.chomp
break if input.nil? || input.strip.downcase == 'exit'
question = gets&.chomp
break if question.nil? || question.strip.downcase == 'exit'

response, tracker = chatbot.ask_agent(input)
response = chatbot.ask_agent(question)
puts "AI Response: #{response}"
end

next if tracker.nil? # If tracker is nil, skip feedback collection

print "Was the response helpful? [yes/no] (or type 'exit'): "
feedback = gets&.chomp
break if feedback.nil? || feedback.strip.downcase == 'exit'
print "Was the chat helpful? [yes/no]: "
feedback = gets&.chomp

helpful = feedback.strip.downcase == 'yes'
chatbot.agent_was_helpful(tracker, helpful)
end
chatbot.agent_was_helpful(feedback == 'yes') unless feedback.nil?

ld_client.close
ld_client.close
Loading