Skip to content

Commit 6e0b79e

Browse files
committed
feat: Add chat completion support for Meilisearch v1.6+
- Implement ChatManager class with chat workspace and completion methods - Add 6 new MCP tools for chat functionality: - chat-completion: Generate responses with RAG using indexed documents - create-chat-workspace: Create workspaces with default settings - update-chat-workspace: Modify existing workspace configurations - list-chat-workspaces: List all available workspaces - get-chat-workspace: Get specific workspace details - delete-chat-workspace: Remove workspaces - Add streaming support for chat completions - Include comprehensive tests for all chat features - Update README with chat feature documentation and examples This enables Meilisearch MCP users to leverage the new chat completions feature introduced in Meilisearch v1.6.0, allowing LLMs to generate contextual responses using indexed documents as knowledge base.
1 parent 2821835 commit 6e0b79e

File tree

6 files changed

+917
-3
lines changed

6 files changed

+917
-3
lines changed

README.md

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,20 @@ AI: Searching across all indices... Found 47 results from 3 indices:
110110
- 'tutorials': 9 hands-on tutorials
111111
```
112112

113+
### 🤖 Chat Completions with RAG (v1.6+):
114+
115+
```
116+
You: "Create a chat workspace for customer support with my products and FAQs indices"
117+
AI: I'll create that workspace... ✓ Chat workspace 'support-chat' created!
118+
119+
You: "Using the support workspace, how do I return a defective product?"
120+
AI: Based on your FAQs and product policies, here's the return process...
121+
[Generates contextual response using indexed documents]
122+
123+
You: "Generate a response about our warranty policy"
124+
AI: [Streams response] According to your documentation, the warranty covers...
125+
```
126+
113127
## 🔧 Installation
114128

115129
### Prerequisites
@@ -352,6 +366,14 @@ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file
352366
#### Search
353367
- `search`: Flexible search across single or multiple indices with filtering and sorting options
354368

369+
#### Chat Completions (Meilisearch v1.6+)
370+
- `chat-completion`: Generate chat responses with RAG using indexed documents as context (supports streaming)
371+
- `create-chat-workspace`: Create a chat workspace with default settings for consistent interactions
372+
- `update-chat-workspace`: Modify existing chat workspace configurations
373+
- `list-chat-workspaces`: List all available chat workspaces
374+
- `get-chat-workspace`: Get details of a specific chat workspace
375+
- `delete-chat-workspace`: Remove a chat workspace
376+
355377
#### Settings Management
356378
- `get-settings`: View current settings for an index
357379
- `update-settings`: Update index settings (ranking, faceting, etc.)

src/meilisearch_mcp/chat.py

Lines changed: 278 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,278 @@
1+
from typing import Dict, Any, List, Optional, AsyncIterator
2+
from meilisearch import Client
3+
import httpx
4+
import json
5+
6+
7+
class ChatManager:
8+
"""Manage Meilisearch chat completions and workspaces"""
9+
10+
def __init__(self, client: Client):
11+
self.client = client
12+
self.base_url = client.config.url.rstrip("/")
13+
self.headers = {
14+
"Authorization": (
15+
f"Bearer {client.config.api_key}" if client.config.api_key else None
16+
),
17+
"Content-Type": "application/json",
18+
}
19+
# Remove None values from headers
20+
self.headers = {k: v for k, v in self.headers.items() if v is not None}
21+
22+
async def chat_completion_stream(
23+
self,
24+
query: str,
25+
model: Optional[str] = None,
26+
temperature: Optional[float] = None,
27+
max_tokens: Optional[int] = None,
28+
index_uids: Optional[List[str]] = None,
29+
workspace_uid: Optional[str] = None,
30+
) -> AsyncIterator[str]:
31+
"""
32+
Stream chat completion responses from Meilisearch.
33+
34+
Args:
35+
query: The user's query/prompt
36+
model: The model to use for chat completion (e.g., "gpt-4", "gpt-3.5-turbo")
37+
temperature: Controls randomness (0-1)
38+
max_tokens: Maximum tokens in response
39+
index_uids: List of index UIDs to search for context
40+
workspace_uid: Chat workspace UID to use
41+
42+
Yields:
43+
Streaming response chunks
44+
"""
45+
endpoint = f"{self.base_url}/chat/completions"
46+
47+
payload = {"query": query, "stream": True}
48+
49+
if model:
50+
payload["model"] = model
51+
if temperature is not None:
52+
payload["temperature"] = temperature
53+
if max_tokens:
54+
payload["maxTokens"] = max_tokens
55+
if index_uids:
56+
payload["indexUids"] = index_uids
57+
if workspace_uid:
58+
payload["workspaceUid"] = workspace_uid
59+
60+
async with httpx.AsyncClient() as client:
61+
async with client.stream(
62+
"POST", endpoint, headers=self.headers, json=payload, timeout=60.0
63+
) as response:
64+
response.raise_for_status()
65+
async for line in response.aiter_lines():
66+
if line.startswith("data: "):
67+
data = line[6:] # Remove "data: " prefix
68+
if data == "[DONE]":
69+
break
70+
try:
71+
chunk = json.loads(data)
72+
if "choices" in chunk and chunk["choices"]:
73+
content = (
74+
chunk["choices"][0]
75+
.get("delta", {})
76+
.get("content", "")
77+
)
78+
if content:
79+
yield content
80+
except json.JSONDecodeError:
81+
continue
82+
83+
def chat_completion(
84+
self,
85+
query: str,
86+
model: Optional[str] = None,
87+
temperature: Optional[float] = None,
88+
max_tokens: Optional[int] = None,
89+
index_uids: Optional[List[str]] = None,
90+
workspace_uid: Optional[str] = None,
91+
) -> Dict[str, Any]:
92+
"""
93+
Get a non-streaming chat completion response.
94+
95+
Args:
96+
query: The user's query/prompt
97+
model: The model to use for chat completion
98+
temperature: Controls randomness (0-1)
99+
max_tokens: Maximum tokens in response
100+
index_uids: List of index UIDs to search for context
101+
workspace_uid: Chat workspace UID to use
102+
103+
Returns:
104+
Chat completion response
105+
"""
106+
endpoint = f"{self.base_url}/chat/completions"
107+
108+
payload = {"query": query, "stream": False}
109+
110+
if model:
111+
payload["model"] = model
112+
if temperature is not None:
113+
payload["temperature"] = temperature
114+
if max_tokens:
115+
payload["maxTokens"] = max_tokens
116+
if index_uids:
117+
payload["indexUids"] = index_uids
118+
if workspace_uid:
119+
payload["workspaceUid"] = workspace_uid
120+
121+
with httpx.Client() as client:
122+
response = client.post(
123+
endpoint, headers=self.headers, json=payload, timeout=60.0
124+
)
125+
response.raise_for_status()
126+
return response.json()
127+
128+
def create_chat_workspace(
129+
self,
130+
uid: str,
131+
name: str,
132+
description: Optional[str] = None,
133+
model: Optional[str] = None,
134+
temperature: Optional[float] = None,
135+
max_tokens: Optional[int] = None,
136+
index_uids: Optional[List[str]] = None,
137+
) -> Dict[str, Any]:
138+
"""
139+
Create a new chat workspace.
140+
141+
Args:
142+
uid: Unique identifier for the workspace
143+
name: Name of the workspace
144+
description: Description of the workspace
145+
model: Default model for this workspace
146+
temperature: Default temperature for this workspace
147+
max_tokens: Default max tokens for this workspace
148+
index_uids: Default index UIDs for this workspace
149+
150+
Returns:
151+
Created workspace information
152+
"""
153+
endpoint = f"{self.base_url}/chat/workspaces"
154+
155+
payload = {"uid": uid, "name": name}
156+
157+
if description:
158+
payload["description"] = description
159+
if model:
160+
payload["model"] = model
161+
if temperature is not None:
162+
payload["temperature"] = temperature
163+
if max_tokens:
164+
payload["maxTokens"] = max_tokens
165+
if index_uids:
166+
payload["indexUids"] = index_uids
167+
168+
with httpx.Client() as client:
169+
response = client.post(endpoint, headers=self.headers, json=payload)
170+
response.raise_for_status()
171+
return response.json()
172+
173+
def update_chat_workspace(
174+
self,
175+
uid: str,
176+
name: Optional[str] = None,
177+
description: Optional[str] = None,
178+
model: Optional[str] = None,
179+
temperature: Optional[float] = None,
180+
max_tokens: Optional[int] = None,
181+
index_uids: Optional[List[str]] = None,
182+
) -> Dict[str, Any]:
183+
"""
184+
Update an existing chat workspace.
185+
186+
Args:
187+
uid: Unique identifier of the workspace to update
188+
name: New name for the workspace
189+
description: New description for the workspace
190+
model: New default model for this workspace
191+
temperature: New default temperature for this workspace
192+
max_tokens: New default max tokens for this workspace
193+
index_uids: New default index UIDs for this workspace
194+
195+
Returns:
196+
Updated workspace information
197+
"""
198+
endpoint = f"{self.base_url}/chat/workspaces/{uid}"
199+
200+
payload = {}
201+
202+
if name:
203+
payload["name"] = name
204+
if description:
205+
payload["description"] = description
206+
if model:
207+
payload["model"] = model
208+
if temperature is not None:
209+
payload["temperature"] = temperature
210+
if max_tokens:
211+
payload["maxTokens"] = max_tokens
212+
if index_uids:
213+
payload["indexUids"] = index_uids
214+
215+
with httpx.Client() as client:
216+
response = client.patch(endpoint, headers=self.headers, json=payload)
217+
response.raise_for_status()
218+
return response.json()
219+
220+
def list_chat_workspaces(
221+
self, limit: Optional[int] = None, offset: Optional[int] = None
222+
) -> Dict[str, Any]:
223+
"""
224+
List all chat workspaces.
225+
226+
Args:
227+
limit: Maximum number of workspaces to return
228+
offset: Number of workspaces to skip
229+
230+
Returns:
231+
List of chat workspaces
232+
"""
233+
endpoint = f"{self.base_url}/chat/workspaces"
234+
235+
params = {}
236+
if limit:
237+
params["limit"] = limit
238+
if offset:
239+
params["offset"] = offset
240+
241+
with httpx.Client() as client:
242+
response = client.get(endpoint, headers=self.headers, params=params)
243+
response.raise_for_status()
244+
return response.json()
245+
246+
def get_chat_workspace(self, uid: str) -> Dict[str, Any]:
247+
"""
248+
Get details of a specific chat workspace.
249+
250+
Args:
251+
uid: Unique identifier of the workspace
252+
253+
Returns:
254+
Workspace details
255+
"""
256+
endpoint = f"{self.base_url}/chat/workspaces/{uid}"
257+
258+
with httpx.Client() as client:
259+
response = client.get(endpoint, headers=self.headers)
260+
response.raise_for_status()
261+
return response.json()
262+
263+
def delete_chat_workspace(self, uid: str) -> Dict[str, Any]:
264+
"""
265+
Delete a chat workspace.
266+
267+
Args:
268+
uid: Unique identifier of the workspace to delete
269+
270+
Returns:
271+
Deletion confirmation
272+
"""
273+
endpoint = f"{self.base_url}/chat/workspaces/{uid}"
274+
275+
with httpx.Client() as client:
276+
response = client.delete(endpoint, headers=self.headers)
277+
response.raise_for_status()
278+
return response.json()

src/meilisearch_mcp/client.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
from .keys import KeyManager
1010
from .logging import MCPLogger
1111
from .monitoring import MonitoringManager
12+
from .chat import ChatManager
1213
from .__version__ import __version__
1314

1415
logger = MCPLogger()
@@ -31,6 +32,7 @@ def __init__(
3132
self.tasks = TaskManager(self.client)
3233
self.keys = KeyManager(self.client)
3334
self.monitoring = MonitoringManager(self.client)
35+
self.chat = ChatManager(self.client)
3436

3537
def health_check(self) -> bool:
3638
"""Check if Meilisearch is healthy"""

0 commit comments

Comments
 (0)