diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 8615e587..89464d20 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.4.0-alpha.10" + ".": "0.4.0-alpha.11" } diff --git a/.stats.yml b/.stats.yml index db94672e..c9e7f120 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 103 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-aab1b331382f758fc255f765e73b62fedf463cf0748bc11b2b08974de9ac816a.yml -openapi_spec_hash: f717a21f47419aa51e4d9298aa68cc45 -config_hash: 0017f6c419cbbf7b949f9b2842917a79 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-185ec058880381a5526ac91a488af1833f55656e36cd10b3795eb8fd4d75026f.yml +openapi_spec_hash: fa935c08e25d23cff624e5e150f8e6ca +config_hash: 39578cfdeb4a10121f2cb3fa3e4d5e20 diff --git a/CHANGELOG.md b/CHANGELOG.md index f902c89d..d718553c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## 0.4.0-alpha.11 (2025-11-18) + +Full Changelog: [v0.4.0-alpha.10...v0.4.0-alpha.11](https://github.com/llamastack/llama-stack-client-python/compare/v0.4.0-alpha.10...v0.4.0-alpha.11) + +### Features + +* Wire through parallel_tool_calls to Responses API ([2888bed](https://github.com/llamastack/llama-stack-client-python/commit/2888bed06c11f2eb5c7e71d7555ff0c1a19eb696)) + ## 0.4.0-alpha.10 (2025-11-17) Full Changelog: [v0.4.0-alpha.9...v0.4.0-alpha.10](https://github.com/llamastack/llama-stack-client-python/compare/v0.4.0-alpha.9...v0.4.0-alpha.10) diff --git a/pyproject.toml b/pyproject.toml index e4ed9244..334311cb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "llama_stack_client" -version = "0.4.0-alpha.10" +version = "0.4.0-alpha.11" description = "The official Python library for the llama-stack-client API" dynamic = ["readme"] license = "MIT" diff --git a/src/llama_stack_client/resources/responses/responses.py b/src/llama_stack_client/resources/responses/responses.py index 1813f94b..a008ea0f 100644 --- a/src/llama_stack_client/resources/responses/responses.py +++ b/src/llama_stack_client/resources/responses/responses.py @@ -83,6 +83,7 @@ def create( instructions: Optional[str] | Omit = omit, max_infer_iters: Optional[int] | Omit = omit, max_tool_calls: Optional[int] | Omit = omit, + parallel_tool_calls: Optional[bool] | Omit = omit, previous_response_id: Optional[str] | Omit = omit, prompt: Optional[response_create_params.Prompt] | Omit = omit, store: Optional[bool] | Omit = omit, @@ -132,6 +133,7 @@ def create( instructions: Optional[str] | Omit = omit, max_infer_iters: Optional[int] | Omit = omit, max_tool_calls: Optional[int] | Omit = omit, + parallel_tool_calls: Optional[bool] | Omit = omit, previous_response_id: Optional[str] | Omit = omit, prompt: Optional[response_create_params.Prompt] | Omit = omit, store: Optional[bool] | Omit = omit, @@ -180,6 +182,7 @@ def create( instructions: Optional[str] | Omit = omit, max_infer_iters: Optional[int] | Omit = omit, max_tool_calls: Optional[int] | Omit = omit, + parallel_tool_calls: Optional[bool] | Omit = omit, previous_response_id: Optional[str] | Omit = omit, prompt: Optional[response_create_params.Prompt] | Omit = omit, store: Optional[bool] | Omit = omit, @@ -227,6 +230,7 @@ def create( instructions: Optional[str] | Omit = omit, max_infer_iters: Optional[int] | Omit = omit, max_tool_calls: Optional[int] | Omit = omit, + parallel_tool_calls: Optional[bool] | Omit = omit, previous_response_id: Optional[str] | Omit = omit, prompt: Optional[response_create_params.Prompt] | Omit = omit, store: Optional[bool] | Omit = omit, @@ -252,6 +256,7 @@ def create( "instructions": instructions, "max_infer_iters": max_infer_iters, "max_tool_calls": max_tool_calls, + "parallel_tool_calls": parallel_tool_calls, "previous_response_id": previous_response_id, "prompt": prompt, "store": store, @@ -428,6 +433,7 @@ async def create( instructions: Optional[str] | Omit = omit, max_infer_iters: Optional[int] | Omit = omit, max_tool_calls: Optional[int] | Omit = omit, + parallel_tool_calls: Optional[bool] | Omit = omit, previous_response_id: Optional[str] | Omit = omit, prompt: Optional[response_create_params.Prompt] | Omit = omit, store: Optional[bool] | Omit = omit, @@ -477,6 +483,7 @@ async def create( instructions: Optional[str] | Omit = omit, max_infer_iters: Optional[int] | Omit = omit, max_tool_calls: Optional[int] | Omit = omit, + parallel_tool_calls: Optional[bool] | Omit = omit, previous_response_id: Optional[str] | Omit = omit, prompt: Optional[response_create_params.Prompt] | Omit = omit, store: Optional[bool] | Omit = omit, @@ -525,6 +532,7 @@ async def create( instructions: Optional[str] | Omit = omit, max_infer_iters: Optional[int] | Omit = omit, max_tool_calls: Optional[int] | Omit = omit, + parallel_tool_calls: Optional[bool] | Omit = omit, previous_response_id: Optional[str] | Omit = omit, prompt: Optional[response_create_params.Prompt] | Omit = omit, store: Optional[bool] | Omit = omit, @@ -572,6 +580,7 @@ async def create( instructions: Optional[str] | Omit = omit, max_infer_iters: Optional[int] | Omit = omit, max_tool_calls: Optional[int] | Omit = omit, + parallel_tool_calls: Optional[bool] | Omit = omit, previous_response_id: Optional[str] | Omit = omit, prompt: Optional[response_create_params.Prompt] | Omit = omit, store: Optional[bool] | Omit = omit, @@ -597,6 +606,7 @@ async def create( "instructions": instructions, "max_infer_iters": max_infer_iters, "max_tool_calls": max_tool_calls, + "parallel_tool_calls": parallel_tool_calls, "previous_response_id": previous_response_id, "prompt": prompt, "store": store, diff --git a/src/llama_stack_client/types/response_create_params.py b/src/llama_stack_client/types/response_create_params.py index ef8ed6fe..b9adbffe 100644 --- a/src/llama_stack_client/types/response_create_params.py +++ b/src/llama_stack_client/types/response_create_params.py @@ -76,6 +76,8 @@ class ResponseCreateParamsBase(TypedDict, total=False): max_tool_calls: Optional[int] + parallel_tool_calls: Optional[bool] + previous_response_id: Optional[str] prompt: Optional[Prompt] diff --git a/tests/api_resources/test_responses.py b/tests/api_resources/test_responses.py index f2497504..53ad1282 100644 --- a/tests/api_resources/test_responses.py +++ b/tests/api_resources/test_responses.py @@ -46,6 +46,7 @@ def test_method_create_with_all_params_overload_1(self, client: LlamaStackClient instructions="instructions", max_infer_iters=0, max_tool_calls=0, + parallel_tool_calls=True, previous_response_id="previous_response_id", prompt={ "id": "id", @@ -124,6 +125,7 @@ def test_method_create_with_all_params_overload_2(self, client: LlamaStackClient instructions="instructions", max_infer_iters=0, max_tool_calls=0, + parallel_tool_calls=True, previous_response_id="previous_response_id", prompt={ "id": "id", @@ -317,6 +319,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn instructions="instructions", max_infer_iters=0, max_tool_calls=0, + parallel_tool_calls=True, previous_response_id="previous_response_id", prompt={ "id": "id", @@ -395,6 +398,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn instructions="instructions", max_infer_iters=0, max_tool_calls=0, + parallel_tool_calls=True, previous_response_id="previous_response_id", prompt={ "id": "id",