Skip to content

Commit 03d0116

Browse files
feat: Add OpenAI Conversations API
Signed-off-by: Francisco Javier Arceo <[email protected]>
1 parent 28bbbcf commit 03d0116

File tree

18 files changed

+5304
-3106
lines changed

18 files changed

+5304
-3106
lines changed

docs/static/llama-stack-spec.html

Lines changed: 2571 additions & 1881 deletions
Large diffs are not rendered by default.

docs/static/llama-stack-spec.yaml

Lines changed: 1789 additions & 1216 deletions
Large diffs are not rendered by default.
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
# Copyright (c) Meta Platforms, Inc. and affiliates.
2+
# All rights reserved.
3+
#
4+
# This source code is licensed under the terms described in the LICENSE file in
5+
# the root directory of this source tree.
6+
7+
from .conversations import (
8+
Conversation,
9+
ConversationCreateRequest,
10+
ConversationDeletedResource,
11+
ConversationItem,
12+
ConversationItemCreateRequest,
13+
ConversationItemDeletedResource,
14+
ConversationItemList,
15+
Conversations,
16+
ConversationUpdateRequest,
17+
Metadata,
18+
)
19+
20+
__all__ = [
21+
"Conversation",
22+
"ConversationCreateRequest",
23+
"ConversationDeletedResource",
24+
"ConversationItem",
25+
"ConversationItemCreateRequest",
26+
"ConversationItemDeletedResource",
27+
"ConversationItemList",
28+
"Conversations",
29+
"ConversationUpdateRequest",
30+
"Metadata",
31+
]
Lines changed: 260 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,260 @@
1+
# Copyright (c) Meta Platforms, Inc. and affiliates.
2+
# All rights reserved.
3+
#
4+
# This source code is licensed under the terms described in the LICENSE file in
5+
# the root directory of this source tree.
6+
7+
from typing import Annotated, Literal, Protocol, runtime_checkable
8+
9+
from openai import NOT_GIVEN
10+
from openai._types import NotGiven
11+
from openai.types.responses.response_includable import ResponseIncludable
12+
from pydantic import BaseModel, Field
13+
14+
from llama_stack.apis.agents.openai_responses import (
15+
OpenAIResponseMessage,
16+
OpenAIResponseOutputMessageFileSearchToolCall,
17+
OpenAIResponseOutputMessageFunctionToolCall,
18+
OpenAIResponseOutputMessageMCPCall,
19+
OpenAIResponseOutputMessageMCPListTools,
20+
OpenAIResponseOutputMessageWebSearchToolCall,
21+
)
22+
from llama_stack.apis.version import LLAMA_STACK_API_V1
23+
from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol
24+
from llama_stack.schema_utils import json_schema_type, register_schema, webmethod
25+
26+
Metadata = dict[str, str]
27+
28+
29+
@json_schema_type
30+
class Conversation(BaseModel):
31+
"""OpenAI-compatible conversation object."""
32+
33+
id: str = Field(..., description="The unique ID of the conversation.")
34+
object: Literal["conversation"] = Field(
35+
default="conversation", description="The object type, which is always conversation."
36+
)
37+
created_at: int = Field(
38+
..., description="The time at which the conversation was created, measured in seconds since the Unix epoch."
39+
)
40+
metadata: Metadata | None = Field(
41+
default=None,
42+
description="Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard.",
43+
)
44+
items: list[dict] | None = Field(
45+
default=None,
46+
description="Initial items to include in the conversation context. You may add up to 20 items at a time.",
47+
)
48+
49+
50+
@json_schema_type
51+
class ConversationMessage(BaseModel):
52+
"""OpenAI-compatible message item for conversations."""
53+
54+
id: str = Field(..., description="unique identifier for this message")
55+
content: list[dict] = Field(..., description="message content")
56+
role: str = Field(..., description="message role")
57+
status: str = Field(..., description="message status")
58+
type: Literal["message"] = "message"
59+
object: Literal["message"] = "message"
60+
61+
62+
ConversationItem = Annotated[
63+
OpenAIResponseMessage
64+
| OpenAIResponseOutputMessageFunctionToolCall
65+
| OpenAIResponseOutputMessageFileSearchToolCall
66+
| OpenAIResponseOutputMessageWebSearchToolCall
67+
| OpenAIResponseOutputMessageMCPCall
68+
| OpenAIResponseOutputMessageMCPListTools,
69+
Field(discriminator="type"),
70+
]
71+
register_schema(ConversationItem, name="ConversationItem")
72+
73+
# Using OpenAI types directly caused issues but some notes for reference:
74+
# Note that ConversationItem is a Annotated Union of the types below:
75+
# from openai.types.responses import *
76+
# from openai.types.responses.response_item import *
77+
# from openai.types.conversations import ConversationItem
78+
# f = [
79+
# ResponseFunctionToolCallItem,
80+
# ResponseFunctionToolCallOutputItem,
81+
# ResponseFileSearchToolCall,
82+
# ResponseFunctionWebSearch,
83+
# ImageGenerationCall,
84+
# ResponseComputerToolCall,
85+
# ResponseComputerToolCallOutputItem,
86+
# ResponseReasoningItem,
87+
# ResponseCodeInterpreterToolCall,
88+
# LocalShellCall,
89+
# LocalShellCallOutput,
90+
# McpListTools,
91+
# McpApprovalRequest,
92+
# McpApprovalResponse,
93+
# McpCall,
94+
# ResponseCustomToolCall,
95+
# ResponseCustomToolCallOutput
96+
# ]
97+
98+
99+
@json_schema_type
100+
class ConversationCreateRequest(BaseModel):
101+
"""Request body for creating a conversation."""
102+
103+
items: list[ConversationItem] | None = Field(
104+
default=[],
105+
description="Initial items to include in the conversation context. You may add up to 20 items at a time.",
106+
max_length=20,
107+
)
108+
metadata: Metadata | None = Field(
109+
default={},
110+
description="Set of 16 key-value pairs that can be attached to an object. Useful for storing additional information",
111+
max_length=16,
112+
)
113+
114+
115+
@json_schema_type
116+
class ConversationUpdateRequest(BaseModel):
117+
"""Request body for updating a conversation."""
118+
119+
metadata: Metadata = Field(
120+
...,
121+
description="Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a structured format, and querying for objects via API or the dashboard. Keys are strings with a maximum length of 64 characters. Values are strings with a maximum length of 512 characters.",
122+
)
123+
124+
125+
@json_schema_type
126+
class ConversationDeletedResource(BaseModel):
127+
"""Response for deleted conversation."""
128+
129+
id: str = Field(..., description="The deleted conversation identifier")
130+
object: str = Field(default="conversation.deleted", description="Object type")
131+
deleted: bool = Field(default=True, description="Whether the object was deleted")
132+
133+
134+
@json_schema_type
135+
class ConversationItemCreateRequest(BaseModel):
136+
"""Request body for creating conversation items."""
137+
138+
items: list[ConversationItem] = Field(
139+
...,
140+
description="Items to include in the conversation context. You may add up to 20 items at a time.",
141+
max_length=20,
142+
)
143+
144+
145+
@json_schema_type
146+
class ConversationItemList(BaseModel):
147+
"""List of conversation items with pagination."""
148+
149+
object: str = Field(default="list", description="Object type")
150+
data: list[ConversationItem] = Field(..., description="List of conversation items")
151+
first_id: str | None = Field(default=None, description="The ID of the first item in the list")
152+
last_id: str | None = Field(default=None, description="The ID of the last item in the list")
153+
has_more: bool = Field(default=False, description="Whether there are more items available")
154+
155+
156+
@json_schema_type
157+
class ConversationItemDeletedResource(BaseModel):
158+
"""Response for deleted conversation item."""
159+
160+
id: str = Field(..., description="The deleted item identifier")
161+
object: str = Field(default="conversation.item.deleted", description="Object type")
162+
deleted: bool = Field(default=True, description="Whether the object was deleted")
163+
164+
165+
@runtime_checkable
166+
@trace_protocol
167+
class Conversations(Protocol):
168+
"""Protocol for conversation management operations."""
169+
170+
@webmethod(route="/conversations", method="POST", level=LLAMA_STACK_API_V1)
171+
async def create_conversation(
172+
self, items: list[ConversationItem] | None = None, metadata: Metadata | None = None
173+
) -> Conversation:
174+
"""Create a conversation.
175+
176+
:param items: Initial items to include in the conversation context.
177+
:param metadata: Set of key-value pairs that can be attached to an object.
178+
:returns: The created conversation object.
179+
"""
180+
...
181+
182+
@webmethod(route="/conversations/{conversation_id}", method="GET", level=LLAMA_STACK_API_V1)
183+
async def get_conversation(self, conversation_id: str) -> Conversation:
184+
"""Get a conversation with the given ID.
185+
186+
:param conversation_id: The conversation identifier.
187+
:returns: The conversation object.
188+
"""
189+
...
190+
191+
@webmethod(route="/conversations/{conversation_id}", method="POST", level=LLAMA_STACK_API_V1)
192+
async def update_conversation(self, conversation_id: str, metadata: Metadata) -> Conversation:
193+
"""Update a conversation's metadata with the given ID.
194+
195+
:param conversation_id: The conversation identifier.
196+
:param metadata: Set of key-value pairs that can be attached to an object.
197+
:returns: The updated conversation object.
198+
"""
199+
...
200+
201+
@webmethod(route="/conversations/{conversation_id}", method="DELETE", level=LLAMA_STACK_API_V1)
202+
async def openai_delete_conversation(self, conversation_id: str) -> ConversationDeletedResource:
203+
"""Delete a conversation with the given ID.
204+
205+
:param conversation_id: The conversation identifier.
206+
:returns: The deleted conversation resource.
207+
"""
208+
...
209+
210+
@webmethod(route="/conversations/{conversation_id}/items", method="POST", level=LLAMA_STACK_API_V1)
211+
async def create(self, conversation_id: str, items: list[ConversationItem]) -> ConversationItemList:
212+
"""Create items in the conversation.
213+
214+
:param conversation_id: The conversation identifier.
215+
:param items: Items to include in the conversation context.
216+
:returns: List of created items.
217+
"""
218+
...
219+
220+
@webmethod(route="/conversations/{conversation_id}/items/{item_id}", method="GET", level=LLAMA_STACK_API_V1)
221+
async def retrieve(self, conversation_id: str, item_id: str) -> ConversationItem:
222+
"""Retrieve a conversation item.
223+
224+
:param conversation_id: The conversation identifier.
225+
:param item_id: The item identifier.
226+
:returns: The conversation item.
227+
"""
228+
...
229+
230+
@webmethod(route="/conversations/{conversation_id}/items", method="GET", level=LLAMA_STACK_API_V1)
231+
async def list(
232+
self,
233+
conversation_id: str,
234+
after: str | NotGiven = NOT_GIVEN,
235+
include: list[ResponseIncludable] | NotGiven = NOT_GIVEN,
236+
limit: int | NotGiven = NOT_GIVEN,
237+
order: Literal["asc", "desc"] | NotGiven = NOT_GIVEN,
238+
) -> ConversationItemList:
239+
"""List items in the conversation.
240+
241+
:param conversation_id: The conversation identifier.
242+
:param after: An item ID to list items after, used in pagination.
243+
:param include: Specify additional output data to include in the response.
244+
:param limit: A limit on the number of objects to be returned (1-100, default 20).
245+
:param order: The order to return items in (asc or desc, default desc).
246+
:returns: List of conversation items.
247+
"""
248+
...
249+
250+
@webmethod(route="/conversations/{conversation_id}/items/{item_id}", method="DELETE", level=LLAMA_STACK_API_V1)
251+
async def openai_delete_conversation_item(
252+
self, conversation_id: str, item_id: str
253+
) -> ConversationItemDeletedResource:
254+
"""Delete a conversation item.
255+
256+
:param conversation_id: The conversation identifier.
257+
:param item_id: The item identifier.
258+
:returns: The deleted item resource.
259+
"""
260+
...

llama_stack/apis/datatypes.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,7 @@ class Api(Enum, metaclass=DynamicApiMeta):
129129
tool_groups = "tool_groups"
130130
files = "files"
131131
prompts = "prompts"
132+
conversations = "conversations"
132133

133134
# built-in API
134135
inspect = "inspect"
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# Copyright (c) Meta Platforms, Inc. and affiliates.
2+
# All rights reserved.
3+
#
4+
# This source code is licensed under the terms described in the LICENSE file in
5+
# the root directory of this source tree.

0 commit comments

Comments
 (0)