Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -96,13 +96,18 @@ def lambda_handler(event, context):


content = bedrock.invoke_model(
body=body, modelId=modelId, accept=accept, contentType=contentType
)
modelId=modelId, body=body, accept=accept, contentType=contentType
)

response = json.loads(content.get("body").read())
print(response)

answer = get_llm_answer(model_id, response)


#Anthropic Claude V3 response comes back as python list, not a string, so need to convert in order to be compatible with the UI
if not isinstance(answer, str):
answer = '.'.join(str(x) for x in answer)

body = {
"source_page": source_page_info if should_source_be_included(answer) else [],
"answer": answer,
Expand Down Expand Up @@ -139,10 +144,15 @@ def get_llm_answer(model_id, response):
return response.get('results')[0].get('outputText')
elif model_id == "Anthropic-Claude-V2":
return response.get("completion")
elif model_id == "Anthropic-Claude-V3-Sonnet":
return response.get("content")[0].get('text')
elif model_id == "Anthropic-Claude-V3-Haiku":
return response.get("content")[0].get('text')
elif model_id == "Anthropic-Claude-V3-Opus":
return response.get("content")[0].get('text')
elif model_id == "AI21-Jurassic-2-Ultra":
return response['completions'][0]['data']['text']


def should_source_be_included(ans):

include = True
Expand All @@ -158,6 +168,7 @@ def should_source_be_included(ans):
return False

return include


def get_relevant_doc_names(relevant_documents):
sources = []
Expand All @@ -176,6 +187,7 @@ def get_relevant_doc_names(relevant_documents):
most_relevant_docs = most_relevant_docs[:int(NO_OF_SOURCES_TO_LIST)]
for doc_path in most_relevant_docs:
doc_names.append( { "file_name": get_source_file_name(doc_path), "file": get_presigned_url(doc_path) })
#Use this if the Kendra index uses a webcrawler instead of docs on S3 --> doc_names.append( { "file_name": get_source_file_name(doc_path), "file": doc_path })

return doc_names

Expand Down
76 changes: 76 additions & 0 deletions Kendra-Foundational-LLM-Chatbot/api/chat-handler/llm_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,76 @@ def get_model_args(model_id, prompt):
"temperature": 0.0,
"top_p": 0.9
}

elif model_id == "Anthropic-Claude-V3-Sonnet":
question_llm_model_args = {
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 1024,
"messages": [
{
"role": "user",
"content": [{"type": "text", "text": prompt}],
}
],
}

qa_llm_model_args = {
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 1024,
"messages": [
{
"role": "user",
"content": [{"type": "text", "text": prompt}],
}
],
}

elif model_id == "Anthropic-Claude-V3-Haiku":
question_llm_model_args = {
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 1024,
"messages": [
{
"role": "user",
"content": [{"type": "text", "text": prompt}],
}
],
}

qa_llm_model_args = {
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 1024,
"messages": [
{
"role": "user",
"content": [{"type": "text", "text": prompt}],
}
],
}

elif model_id == "Anthropic-Claude-V3-Opus":
question_llm_model_args = {
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 1024,
"messages": [
{
"role": "user",
"content": [{"type": "text", "text": prompt}],
}
],
}

qa_llm_model_args = {
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 1024,
"messages": [
{
"role": "user",
"content": [{"type": "text", "text": prompt}],
}
],
}

elif model_id == "AI21-Jurassic-2-Ultra":
question_llm_model_args = {
"prompt": prompt,
Expand Down Expand Up @@ -79,6 +149,12 @@ def get_model_id(model_id):
return "amazon.titan-text-express-v1"
elif model_id == "Anthropic-Claude-V2":
return "anthropic.claude-v2"
elif model_id == "Anthropic-Claude-V3-Sonnet":
return "anthropic.claude-3-sonnet-20240229-v1:0"
elif model_id == "Anthropic-Claude-V3-Haiku":
return "anthropic.claude-3-haiku-20240307-v1:0"
elif model_id == "Anthropic-Claude-V3-Opus":
return "anthropic.claude-3-opus-20240229-v1:0"
elif model_id == "AI21-Jurassic-2-Ultra":
return "ai21.j2-ultra-v1"
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,12 @@ def get_prompts(model_id, question, context):
return get_titan_document_prompt(question, context)
elif model_id == "Anthropic-Claude-V2":
return get_claude_document_prompt(question, context)
elif model_id == "Anthropic-Claude-V3-Sonnet":
return get_claude_document_prompt(question, context)
elif model_id == "Anthropic-Claude-V3-Haiku":
return get_claude_document_prompt(question, context)
elif model_id == "Anthropic-Claude-V3-Opus":
return get_claude_document_prompt(question, context)
elif model_id == "AI21-Jurassic-2-Ultra":
return get_jurassic_document_prompt(question, context)
else:
Expand Down
50 changes: 41 additions & 9 deletions Kendra-Foundational-LLM-Chatbot/deploy/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading