Skip to content

Commit c011da0

Browse files
authored
fix(ask-baml): ask-baml-client is not correctly pointed at sage-backend (#2267)
<!-- ELLIPSIS_HIDDEN --> > [!IMPORTANT] > Fix environment variables and endpoints in ask-baml-client to correctly point to sage-backend. > > - **Environment Variables**: > - Change `TARGET_ENV` from `prod` to `production` in `.github/workflows/publish-docs.yml`. > - Change `NODE_ENV` to `PINECONE_ENV` in `.github/workflows/publish-docs.yml`. > - **Endpoints and Constants**: > - Replace hardcoded `API_ENDPOINT` with `ALGOLIA_SEARCH_CREDENTIALS_ENDPOINT` in `AlgoliaSearch.tsx`. > - Replace hardcoded `API_ENDPOINT` with `CHAT_ENDPOINT` in `ChatBot.tsx`. > - Replace hardcoded `FEEDBACK_API_ENDPOINT` with `FEEDBACK_ENDPOINT` in `AssistantResponseFeedback.tsx`. > - Define `SAGE_URL`, `CHAT_ENDPOINT`, `FEEDBACK_ENDPOINT`, `ALGOLIA_SEARCH_CREDENTIALS_ENDPOINT`, and `ALGOLIA_SEARCH_INDEX_NAME` in `constants.ts`. > - **File Renames**: > - Rename `typescript/apps/sage-backend/app/api/ask-baml-chat/route.ts` to `typescript/apps/sage-backend/app/api/ask-baml/chat/route.ts`. > - Rename `typescript/apps/sage-backend/app/api/ask-baml-feedback/route.ts` to `typescript/apps/sage-backend/app/api/ask-baml/feedback/route.ts`. > - **Miscellaneous**: > - Update `PINECONE_INDEX_NAME` logic in `pinecone-api.ts` to use `PINECONE_ENV`. > > <sup>This description was created by </sup>[<img alt="Ellipsis" src="https://img.shields.io/badge/Ellipsis-blue?color=175173">](https://www.ellipsis.dev?ref=BoundaryML%2Fbaml&utm_source=github&utm_medium=referral)<sup> for 927081e. You can [customize](https://app.ellipsis.dev/BoundaryML/settings/summaries) this summary. It will automatically update as commits are pushed.</sup> <!-- ELLIPSIS_HIDDEN -->
1 parent 71b2c5a commit c011da0

File tree

7 files changed

+45
-17
lines changed

7 files changed

+45
-17
lines changed

typescript/apps/ask-baml-client/src/AlgoliaSearch.tsx

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,8 @@ import { useEffect, useRef, useState } from 'react';
66
import { Configure, InstantSearch, useHits, useSearchBox } from 'react-instantsearch';
77
import { z } from 'zod';
88
import BamlLambWhite from './baml-lamb-white.svg';
9+
import { ALGOLIA_SEARCH_CREDENTIALS_ENDPOINT, ALGOLIA_SEARCH_INDEX_NAME } from './constants';
910

10-
const SEARCH_INDEX_NAME = 'fern_docs_search';
11-
const API_ENDPOINT = 'https://docs.boundaryml.com/api/fern-docs/search/v2/key';
1211

1312
// Zod schema for API response validation
1413
const SearchCredentialsSchema = z.object({
@@ -21,7 +20,7 @@ type SearchCredentials = z.infer<typeof SearchCredentialsSchema>;
2120
// Function to fetch Algolia search credentials from API
2221
async function fetchSearchCredentials(): Promise<SearchCredentials> {
2322
try {
24-
const response = await fetch(API_ENDPOINT);
23+
const response = await fetch(ALGOLIA_SEARCH_CREDENTIALS_ENDPOINT);
2524
if (!response.ok) {
2625
throw new Error(`Failed to fetch search credentials: ${response.status}`);
2726
}
@@ -954,7 +953,7 @@ export default function AlgoliaSearch({
954953
return (
955954
<div ref={containerRef} style={{ position: 'relative', width: '100%' }}>
956955
<InstantSearch
957-
indexName={SEARCH_INDEX_NAME}
956+
indexName={ALGOLIA_SEARCH_INDEX_NAME}
958957
searchClient={searchClient}
959958
future={{
960959
preserveSharedStateOnUnmount: true,

typescript/apps/ask-baml-client/src/ChatBot.tsx

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ import {
1212
resetSessionAtom,
1313
sessionIdAtom,
1414
} from './store';
15+
import { CHAT_ENDPOINT } from './constants';
1516

1617
const OPEN_BY_DEFAULT = true;
1718
const SESSION_STORAGE_KEY = 'baml-ai-context';
@@ -63,7 +64,7 @@ const serializeError = (
6364
};
6465

6566
const postDocChat = async (req: QueryRequest) => {
66-
const response = await fetch(API_ENDPOINT, {
67+
const response = await fetch(CHAT_ENDPOINT, {
6768
method: 'POST',
6869
headers: {
6970
'Content-Type': 'application/json',
@@ -79,11 +80,6 @@ const postDocChat = async (req: QueryRequest) => {
7980
return QueryResponseSchema.parse(data);
8081
};
8182

82-
const API_ENDPOINT =
83-
process.env.NODE_ENV === 'development'
84-
? 'http://localhost:4000/api/doc-chat'
85-
: 'https://boundary-sage-backend.vercel.app/api/ask-baml-chat';
86-
8783
const ChatBot: React.FC<ChatBotProps> = ({ isOpen = OPEN_BY_DEFAULT, onClose }) => {
8884
const [messages, setMessages] = useAtom(messagesAtom);
8985
const sessionId = useAtomValue(sessionIdAtom);
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
import { z } from 'zod';
2+
// IfChange
3+
export const QueryRequestSchema = z.object({
4+
query: z.string(),
5+
language_preference: z.string().optional(),
6+
prev_messages: z.array(
7+
z.object({
8+
role: z.enum(['user', 'assistant']),
9+
text: z.string(),
10+
}),
11+
),
12+
});
13+
export type QueryRequest = z.infer<typeof QueryRequestSchema>;
14+
15+
export const QueryResponseSchema = z.object({
16+
ranked_docs: z.array(
17+
z.object({
18+
title: z.string(),
19+
url: z.string(),
20+
relevance: z.enum(['very-relevant', 'relevant', 'not-relevant']),
21+
}),
22+
),
23+
answer: z.string().optional().or(z.null()),
24+
suggested_messages: z.array(z.string()).optional(),
25+
});
26+
export type QueryResponse = z.infer<typeof QueryResponseSchema>;
27+
// ThenChange baml/sage-backend/app/types.ts
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
const SAGE_URL =
2+
process.env.NODE_ENV === 'development'
3+
? 'http://localhost:4000'
4+
: 'https://boundary-sage-backend.vercel.app';
5+
export const CHAT_ENDPOINT = `${SAGE_URL}/api/ask-baml/chat`;
6+
export const FEEDBACK_ENDPOINT = `${SAGE_URL}/api/ask-baml/feedback`;
7+
8+
export const ALGOLIA_SEARCH_CREDENTIALS_ENDPOINT =
9+
'https://docs.boundaryml.com/api/fern-docs/search/v2/key';
10+
export const ALGOLIA_SEARCH_INDEX_NAME = 'fern_docs_search';

typescript/apps/ask-baml-client/src/lib/AssistantResponseFeedback.tsx

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,12 @@ import { ThumbsDown, ThumbsUp } from 'lucide-react';
44
import type React from 'react';
55
import { useState } from 'react';
66
import { messagesAtom, sessionIdAtom } from '../store';
7+
import { FEEDBACK_ENDPOINT } from '../constants';
78

89
interface AssistantResponseFeedbackProps {
910
messageId: string;
1011
}
1112

12-
const FEEDBACK_API_ENDPOINT =
13-
process.env.NODE_ENV === 'development'
14-
? 'http://localhost:4000/api/feedback'
15-
: 'https://boundary-sage-backend.vercel.app/api/ask-baml-feedback';
16-
1713
export const AssistantResponseFeedback: React.FC<AssistantResponseFeedbackProps> = ({
1814
messageId,
1915
}) => {
@@ -96,7 +92,7 @@ export const AssistantResponseFeedback: React.FC<AssistantResponseFeedbackProps>
9692
messages: filteredMessages,
9793
};
9894

99-
const response = await fetch(FEEDBACK_API_ENDPOINT, {
95+
const response = await fetch(FEEDBACK_ENDPOINT, {
10096
method: 'POST',
10197
headers: {
10298
'Content-Type': 'application/json',

typescript/apps/sage-backend/app/api/ask-baml-chat/route.ts renamed to typescript/apps/sage-backend/app/api/ask-baml/chat/route.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import { NotionLogger } from '@/lib/notion-api';
22
import { QueryRequestSchema } from '@baml/sage-interface';
33
import type { NextRequest } from 'next/server';
44
import { NextResponse } from 'next/server';
5-
import { submitQuery } from '../../actions/query';
5+
import { submitQuery } from '@/app/actions/query';
66

77
const notionLogger = new NotionLogger();
88

File renamed without changes.

0 commit comments

Comments
 (0)