-
Notifications
You must be signed in to change notification settings - Fork 1.4k
Add support for GPT-5 Free-Form Function Calling and Context Free Grammar constraints over tools #2572
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Add support for GPT-5 Free-Form Function Calling and Context Free Grammar constraints over tools #2572
Changes from 8 commits
c713a6e
de3bc18
e7ca5ca
f0a5cbe
68fc7cf
5d9af16
3687580
61f7291
7a869b9
5e8cef9
79b519a
991c01d
c70bc1d
9586e6c
0b47135
92db07a
3e605e0
ab45262
3982f32
f3595b3
bef5dec
21e1a0b
cfcf7cf
1c5c500
e0017b4
307b011
131ab91
b386eb6
01988a5
88b8b28
3a46eea
c084523
ef1a696
e3f514d
0dbcdaf
4533df1
fc477bb
4cacf9b
185c929
a81e7b9
4a6e540
8714253
b55c9ab
b347edc
997d2ac
afdd6ef
175eee9
19eb167
6e259c8
742fb91
d69daad
d78a5c2
dc1c182
d1fb3a4
97b4d82
c54f26e
e206e3e
c483fda
5265211
f337820
2c7367f
f3a4afd
c4665a2
b86d2b1
d713c29
d0c346c
3037e6e
4e2264d
e28836b
b49cd81
a7112f4
7c96803
5d2b372
ec057c8
c949c83
36a0759
ae64d6b
f5ca42e
e7bea60
637774f
9cf0931
8c6c976
febe88d
3106219
673ef1e
2581873
9ec5b69
3927cf0
5990d8b
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -313,7 +313,7 @@ async def request( | |
| response = await self._completions_create( | ||
| messages, False, cast(OpenAIModelSettings, model_settings or {}), model_request_parameters | ||
| ) | ||
| model_response = self._process_response(response) | ||
| model_response = self._process_response(response, model_request_parameters) | ||
| return model_response | ||
|
|
||
| @asynccontextmanager | ||
|
|
@@ -762,7 +762,7 @@ async def request( | |
| response = await self._responses_create( | ||
| messages, False, cast(OpenAIResponsesModelSettings, model_settings or {}), model_request_parameters | ||
| ) | ||
| return self._process_response(response) | ||
| return self._process_response(response, model_request_parameters) | ||
|
|
||
| @asynccontextmanager | ||
| async def request_stream( | ||
|
|
@@ -779,7 +779,11 @@ async def request_stream( | |
| async with response: | ||
| yield await self._process_streamed_response(response, model_request_parameters) | ||
|
|
||
| def _process_response(self, response: responses.Response) -> ModelResponse: | ||
| def _process_response( | ||
| self, | ||
| response: responses.Response, | ||
| model_request_parameters: ModelRequestParameters, | ||
| ) -> ModelResponse: | ||
| """Process a non-streamed response, and prepare a message to return.""" | ||
| timestamp = number_to_datetime(response.created_at) | ||
| items: list[ModelResponsePart] = [] | ||
|
|
@@ -795,6 +799,12 @@ def _process_response(self, response: responses.Response) -> ModelResponse: | |
| items.append(TextPart(content.text)) | ||
| elif item.type == 'function_call': | ||
| items.append(ToolCallPart(item.name, item.arguments, tool_call_id=item.call_id)) | ||
| elif item.type == 'custom_tool_call': | ||
| if item.name not in model_request_parameters.tool_defs: | ||
| raise UnexpectedModelBehavior(f'Unknown tool called: {item.name}') | ||
|
||
| tool = model_request_parameters.tool_defs[item.name] | ||
| argument_name = tool.single_string_argument_name | ||
| items.append(ToolCallPart(item.name, {argument_name: item.input}, tool_call_id=item.call_id)) | ||
| return ModelResponse( | ||
| items, | ||
| usage=_map_usage(response), | ||
|
|
@@ -893,11 +903,14 @@ async def _responses_create( | |
| try: | ||
| extra_headers = model_settings.get('extra_headers', {}) | ||
| extra_headers.setdefault('User-Agent', get_user_agent()) | ||
| parallel_tool_calls = self._get_parallel_tool_calling( | ||
| model_settings=model_settings, model_request_parameters=model_request_parameters | ||
| ) | ||
| return await self.client.responses.create( | ||
| input=openai_messages, | ||
| model=self._model_name, | ||
| instructions=instructions, | ||
| parallel_tool_calls=model_settings.get('parallel_tool_calls', NOT_GIVEN), | ||
| parallel_tool_calls=parallel_tool_calls, | ||
| tools=tools or NOT_GIVEN, | ||
| tool_choice=tool_choice or NOT_GIVEN, | ||
| max_output_tokens=model_settings.get('max_tokens', NOT_GIVEN), | ||
|
|
@@ -937,7 +950,18 @@ def _get_reasoning(self, model_settings: OpenAIResponsesModelSettings) -> Reason | |
| return NOT_GIVEN | ||
| return Reasoning(effort=reasoning_effort, summary=reasoning_summary) | ||
|
|
||
| def _get_tools(self, model_request_parameters: ModelRequestParameters) -> list[responses.FunctionToolParam]: | ||
| def _get_parallel_tool_calling( | ||
| self, model_settings: OpenAIResponsesModelSettings, model_request_parameters: ModelRequestParameters | ||
| ) -> bool | NotGiven: | ||
| if any(tool_definition.free_form for tool_definition in model_request_parameters.tool_defs.values()): | ||
| return False | ||
| if any(tool_definition.free_form for tool_definition in model_request_parameters.output_tools): | ||
| return False | ||
| return model_settings.get('parallel_tool_calls', NOT_GIVEN) | ||
|
|
||
| def _get_tools( | ||
| self, model_request_parameters: ModelRequestParameters | ||
| ) -> list[responses.FunctionToolParam | responses.CustomToolParam]: | ||
| return [self._map_tool_definition(r) for r in model_request_parameters.tool_defs.values()] | ||
|
|
||
| def _get_builtin_tools(self, model_request_parameters: ModelRequestParameters) -> list[responses.ToolParam]: | ||
|
|
@@ -960,15 +984,33 @@ def _get_builtin_tools(self, model_request_parameters: ModelRequestParameters) - | |
| ) | ||
| return tools | ||
|
|
||
| def _map_tool_definition(self, f: ToolDefinition) -> responses.FunctionToolParam: | ||
| def _map_tool_definition(self, f: ToolDefinition) -> responses.FunctionToolParam | responses.CustomToolParam: | ||
| model_profile = OpenAIModelProfile.from_profile(self.profile) | ||
| if f.free_form: | ||
| if not model_profile.openai_supports_freeform_function_calling: | ||
| raise UserError( | ||
| f'`{f.name}` is set as free_form but {model_profile.name} does not support free form function calling.' | ||
| ) | ||
| if not f.only_takes_string_argument: | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Could we enforce this at the tool definition level, in the |
||
| raise UserError(f'`{f.name}` is set as free_form but does not take a single string argument.') | ||
| if f.grammar_syntax is not None: | ||
| format = {'type': 'grammar', 'syntax': f.grammar_syntax, 'definition': f.grammar_definition} | ||
| else: | ||
| format = {'type': 'text'} | ||
| tool_param: responses.CustomToolParam = { | ||
| 'name': f.name, | ||
| 'type': 'custom', | ||
| 'description': f.description or '', | ||
| 'format': format, | ||
| } | ||
| return tool_param | ||
|
|
||
| return { | ||
| 'name': f.name, | ||
| 'parameters': f.parameters_json_schema, | ||
| 'type': 'function', | ||
| 'description': f.description, | ||
| 'strict': bool( | ||
| f.strict and OpenAIModelProfile.from_profile(self.profile).openai_supports_strict_tool_definition | ||
| ), | ||
| 'strict': bool(f.strict and model_profile.openai_supports_strict_tool_definition), | ||
| } | ||
|
|
||
| async def _map_messages( | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
@matthewfranglen I know you still have some work planned on this PR before it's really ready for review, but please consider the API I proposed in #2513 (comment). I'd prefer one argument taking an object over 3 that need to be used together
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Sorry I missed your intent with that part. It's a very good idea I will certainly do that, and it will clean up what I have done so far. Thanks for the reminder.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I believe I have addressed this now