Skip to content

Commit 254c646

Browse files
release: 0.4.0-alpha.11 (#304)
Automated Release PR --- ## 0.4.0-alpha.11 (2025-11-18) Full Changelog: [v0.4.0-alpha.10...v0.4.0-alpha.11](v0.4.0-alpha.10...v0.4.0-alpha.11) ### Features * Wire through parallel_tool_calls to Responses API ([2888bed](2888bed)) --- This pull request is managed by Stainless's [GitHub App](https://github.com/apps/stainless-app). The [semver version number](https://semver.org/#semantic-versioning-specification-semver) is based on included [commit messages](https://www.conventionalcommits.org/en/v1.0.0/). Alternatively, you can manually set the version number in the title of this pull request. For a better experience, it is recommended to use either rebase-merge or squash-merge when merging this pull request. 🔗 Stainless [website](https://www.stainlessapi.com) 📚 Read the [docs](https://app.stainlessapi.com/docs) 🙋 [Reach out](mailto:[email protected]) for help or questions --------- Co-authored-by: stainless-app[bot] <142633134+stainless-app[bot]@users.noreply.github.com>
1 parent 0f30a25 commit 254c646

File tree

7 files changed

+29
-5
lines changed

7 files changed

+29
-5
lines changed

.release-please-manifest.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
{
2-
".": "0.4.0-alpha.10"
2+
".": "0.4.0-alpha.11"
33
}

.stats.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
configured_endpoints: 103
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-aab1b331382f758fc255f765e73b62fedf463cf0748bc11b2b08974de9ac816a.yml
3-
openapi_spec_hash: f717a21f47419aa51e4d9298aa68cc45
4-
config_hash: 0017f6c419cbbf7b949f9b2842917a79
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-185ec058880381a5526ac91a488af1833f55656e36cd10b3795eb8fd4d75026f.yml
3+
openapi_spec_hash: fa935c08e25d23cff624e5e150f8e6ca
4+
config_hash: 39578cfdeb4a10121f2cb3fa3e4d5e20

CHANGELOG.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
11
# Changelog
22

3+
## 0.4.0-alpha.11 (2025-11-18)
4+
5+
Full Changelog: [v0.4.0-alpha.10...v0.4.0-alpha.11](https://github.com/llamastack/llama-stack-client-python/compare/v0.4.0-alpha.10...v0.4.0-alpha.11)
6+
7+
### Features
8+
9+
* Wire through parallel_tool_calls to Responses API ([2888bed](https://github.com/llamastack/llama-stack-client-python/commit/2888bed06c11f2eb5c7e71d7555ff0c1a19eb696))
10+
311
## 0.4.0-alpha.10 (2025-11-17)
412

513
Full Changelog: [v0.4.0-alpha.9...v0.4.0-alpha.10](https://github.com/llamastack/llama-stack-client-python/compare/v0.4.0-alpha.9...v0.4.0-alpha.10)

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "llama_stack_client"
3-
version = "0.4.0-alpha.10"
3+
version = "0.4.0-alpha.11"
44
description = "The official Python library for the llama-stack-client API"
55
dynamic = ["readme"]
66
license = "MIT"

src/llama_stack_client/resources/responses/responses.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,7 @@ def create(
8383
instructions: Optional[str] | Omit = omit,
8484
max_infer_iters: Optional[int] | Omit = omit,
8585
max_tool_calls: Optional[int] | Omit = omit,
86+
parallel_tool_calls: Optional[bool] | Omit = omit,
8687
previous_response_id: Optional[str] | Omit = omit,
8788
prompt: Optional[response_create_params.Prompt] | Omit = omit,
8889
store: Optional[bool] | Omit = omit,
@@ -132,6 +133,7 @@ def create(
132133
instructions: Optional[str] | Omit = omit,
133134
max_infer_iters: Optional[int] | Omit = omit,
134135
max_tool_calls: Optional[int] | Omit = omit,
136+
parallel_tool_calls: Optional[bool] | Omit = omit,
135137
previous_response_id: Optional[str] | Omit = omit,
136138
prompt: Optional[response_create_params.Prompt] | Omit = omit,
137139
store: Optional[bool] | Omit = omit,
@@ -180,6 +182,7 @@ def create(
180182
instructions: Optional[str] | Omit = omit,
181183
max_infer_iters: Optional[int] | Omit = omit,
182184
max_tool_calls: Optional[int] | Omit = omit,
185+
parallel_tool_calls: Optional[bool] | Omit = omit,
183186
previous_response_id: Optional[str] | Omit = omit,
184187
prompt: Optional[response_create_params.Prompt] | Omit = omit,
185188
store: Optional[bool] | Omit = omit,
@@ -227,6 +230,7 @@ def create(
227230
instructions: Optional[str] | Omit = omit,
228231
max_infer_iters: Optional[int] | Omit = omit,
229232
max_tool_calls: Optional[int] | Omit = omit,
233+
parallel_tool_calls: Optional[bool] | Omit = omit,
230234
previous_response_id: Optional[str] | Omit = omit,
231235
prompt: Optional[response_create_params.Prompt] | Omit = omit,
232236
store: Optional[bool] | Omit = omit,
@@ -252,6 +256,7 @@ def create(
252256
"instructions": instructions,
253257
"max_infer_iters": max_infer_iters,
254258
"max_tool_calls": max_tool_calls,
259+
"parallel_tool_calls": parallel_tool_calls,
255260
"previous_response_id": previous_response_id,
256261
"prompt": prompt,
257262
"store": store,
@@ -428,6 +433,7 @@ async def create(
428433
instructions: Optional[str] | Omit = omit,
429434
max_infer_iters: Optional[int] | Omit = omit,
430435
max_tool_calls: Optional[int] | Omit = omit,
436+
parallel_tool_calls: Optional[bool] | Omit = omit,
431437
previous_response_id: Optional[str] | Omit = omit,
432438
prompt: Optional[response_create_params.Prompt] | Omit = omit,
433439
store: Optional[bool] | Omit = omit,
@@ -477,6 +483,7 @@ async def create(
477483
instructions: Optional[str] | Omit = omit,
478484
max_infer_iters: Optional[int] | Omit = omit,
479485
max_tool_calls: Optional[int] | Omit = omit,
486+
parallel_tool_calls: Optional[bool] | Omit = omit,
480487
previous_response_id: Optional[str] | Omit = omit,
481488
prompt: Optional[response_create_params.Prompt] | Omit = omit,
482489
store: Optional[bool] | Omit = omit,
@@ -525,6 +532,7 @@ async def create(
525532
instructions: Optional[str] | Omit = omit,
526533
max_infer_iters: Optional[int] | Omit = omit,
527534
max_tool_calls: Optional[int] | Omit = omit,
535+
parallel_tool_calls: Optional[bool] | Omit = omit,
528536
previous_response_id: Optional[str] | Omit = omit,
529537
prompt: Optional[response_create_params.Prompt] | Omit = omit,
530538
store: Optional[bool] | Omit = omit,
@@ -572,6 +580,7 @@ async def create(
572580
instructions: Optional[str] | Omit = omit,
573581
max_infer_iters: Optional[int] | Omit = omit,
574582
max_tool_calls: Optional[int] | Omit = omit,
583+
parallel_tool_calls: Optional[bool] | Omit = omit,
575584
previous_response_id: Optional[str] | Omit = omit,
576585
prompt: Optional[response_create_params.Prompt] | Omit = omit,
577586
store: Optional[bool] | Omit = omit,
@@ -597,6 +606,7 @@ async def create(
597606
"instructions": instructions,
598607
"max_infer_iters": max_infer_iters,
599608
"max_tool_calls": max_tool_calls,
609+
"parallel_tool_calls": parallel_tool_calls,
600610
"previous_response_id": previous_response_id,
601611
"prompt": prompt,
602612
"store": store,

src/llama_stack_client/types/response_create_params.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,8 @@ class ResponseCreateParamsBase(TypedDict, total=False):
7676

7777
max_tool_calls: Optional[int]
7878

79+
parallel_tool_calls: Optional[bool]
80+
7981
previous_response_id: Optional[str]
8082

8183
prompt: Optional[Prompt]

tests/api_resources/test_responses.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ def test_method_create_with_all_params_overload_1(self, client: LlamaStackClient
4646
instructions="instructions",
4747
max_infer_iters=0,
4848
max_tool_calls=0,
49+
parallel_tool_calls=True,
4950
previous_response_id="previous_response_id",
5051
prompt={
5152
"id": "id",
@@ -124,6 +125,7 @@ def test_method_create_with_all_params_overload_2(self, client: LlamaStackClient
124125
instructions="instructions",
125126
max_infer_iters=0,
126127
max_tool_calls=0,
128+
parallel_tool_calls=True,
127129
previous_response_id="previous_response_id",
128130
prompt={
129131
"id": "id",
@@ -317,6 +319,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
317319
instructions="instructions",
318320
max_infer_iters=0,
319321
max_tool_calls=0,
322+
parallel_tool_calls=True,
320323
previous_response_id="previous_response_id",
321324
prompt={
322325
"id": "id",
@@ -395,6 +398,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
395398
instructions="instructions",
396399
max_infer_iters=0,
397400
max_tool_calls=0,
401+
parallel_tool_calls=True,
398402
previous_response_id="previous_response_id",
399403
prompt={
400404
"id": "id",

0 commit comments

Comments
 (0)