Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "0.4.0-alpha.10"
".": "0.4.0-alpha.11"
}
6 changes: 3 additions & 3 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 103
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-aab1b331382f758fc255f765e73b62fedf463cf0748bc11b2b08974de9ac816a.yml
openapi_spec_hash: f717a21f47419aa51e4d9298aa68cc45
config_hash: 0017f6c419cbbf7b949f9b2842917a79
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/llamastack%2Fllama-stack-client-185ec058880381a5526ac91a488af1833f55656e36cd10b3795eb8fd4d75026f.yml
openapi_spec_hash: fa935c08e25d23cff624e5e150f8e6ca
config_hash: 39578cfdeb4a10121f2cb3fa3e4d5e20
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# Changelog

## 0.4.0-alpha.11 (2025-11-18)

Full Changelog: [v0.4.0-alpha.10...v0.4.0-alpha.11](https://github.com/llamastack/llama-stack-client-python/compare/v0.4.0-alpha.10...v0.4.0-alpha.11)

### Features

* Wire through parallel_tool_calls to Responses API ([2888bed](https://github.com/llamastack/llama-stack-client-python/commit/2888bed06c11f2eb5c7e71d7555ff0c1a19eb696))

## 0.4.0-alpha.10 (2025-11-17)

Full Changelog: [v0.4.0-alpha.9...v0.4.0-alpha.10](https://github.com/llamastack/llama-stack-client-python/compare/v0.4.0-alpha.9...v0.4.0-alpha.10)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "llama_stack_client"
version = "0.4.0-alpha.10"
version = "0.4.0-alpha.11"
description = "The official Python library for the llama-stack-client API"
dynamic = ["readme"]
license = "MIT"
Expand Down
10 changes: 10 additions & 0 deletions src/llama_stack_client/resources/responses/responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ def create(
instructions: Optional[str] | Omit = omit,
max_infer_iters: Optional[int] | Omit = omit,
max_tool_calls: Optional[int] | Omit = omit,
parallel_tool_calls: Optional[bool] | Omit = omit,
previous_response_id: Optional[str] | Omit = omit,
prompt: Optional[response_create_params.Prompt] | Omit = omit,
store: Optional[bool] | Omit = omit,
Expand Down Expand Up @@ -132,6 +133,7 @@ def create(
instructions: Optional[str] | Omit = omit,
max_infer_iters: Optional[int] | Omit = omit,
max_tool_calls: Optional[int] | Omit = omit,
parallel_tool_calls: Optional[bool] | Omit = omit,
previous_response_id: Optional[str] | Omit = omit,
prompt: Optional[response_create_params.Prompt] | Omit = omit,
store: Optional[bool] | Omit = omit,
Expand Down Expand Up @@ -180,6 +182,7 @@ def create(
instructions: Optional[str] | Omit = omit,
max_infer_iters: Optional[int] | Omit = omit,
max_tool_calls: Optional[int] | Omit = omit,
parallel_tool_calls: Optional[bool] | Omit = omit,
previous_response_id: Optional[str] | Omit = omit,
prompt: Optional[response_create_params.Prompt] | Omit = omit,
store: Optional[bool] | Omit = omit,
Expand Down Expand Up @@ -227,6 +230,7 @@ def create(
instructions: Optional[str] | Omit = omit,
max_infer_iters: Optional[int] | Omit = omit,
max_tool_calls: Optional[int] | Omit = omit,
parallel_tool_calls: Optional[bool] | Omit = omit,
previous_response_id: Optional[str] | Omit = omit,
prompt: Optional[response_create_params.Prompt] | Omit = omit,
store: Optional[bool] | Omit = omit,
Expand All @@ -252,6 +256,7 @@ def create(
"instructions": instructions,
"max_infer_iters": max_infer_iters,
"max_tool_calls": max_tool_calls,
"parallel_tool_calls": parallel_tool_calls,
"previous_response_id": previous_response_id,
"prompt": prompt,
"store": store,
Expand Down Expand Up @@ -428,6 +433,7 @@ async def create(
instructions: Optional[str] | Omit = omit,
max_infer_iters: Optional[int] | Omit = omit,
max_tool_calls: Optional[int] | Omit = omit,
parallel_tool_calls: Optional[bool] | Omit = omit,
previous_response_id: Optional[str] | Omit = omit,
prompt: Optional[response_create_params.Prompt] | Omit = omit,
store: Optional[bool] | Omit = omit,
Expand Down Expand Up @@ -477,6 +483,7 @@ async def create(
instructions: Optional[str] | Omit = omit,
max_infer_iters: Optional[int] | Omit = omit,
max_tool_calls: Optional[int] | Omit = omit,
parallel_tool_calls: Optional[bool] | Omit = omit,
previous_response_id: Optional[str] | Omit = omit,
prompt: Optional[response_create_params.Prompt] | Omit = omit,
store: Optional[bool] | Omit = omit,
Expand Down Expand Up @@ -525,6 +532,7 @@ async def create(
instructions: Optional[str] | Omit = omit,
max_infer_iters: Optional[int] | Omit = omit,
max_tool_calls: Optional[int] | Omit = omit,
parallel_tool_calls: Optional[bool] | Omit = omit,
previous_response_id: Optional[str] | Omit = omit,
prompt: Optional[response_create_params.Prompt] | Omit = omit,
store: Optional[bool] | Omit = omit,
Expand Down Expand Up @@ -572,6 +580,7 @@ async def create(
instructions: Optional[str] | Omit = omit,
max_infer_iters: Optional[int] | Omit = omit,
max_tool_calls: Optional[int] | Omit = omit,
parallel_tool_calls: Optional[bool] | Omit = omit,
previous_response_id: Optional[str] | Omit = omit,
prompt: Optional[response_create_params.Prompt] | Omit = omit,
store: Optional[bool] | Omit = omit,
Expand All @@ -597,6 +606,7 @@ async def create(
"instructions": instructions,
"max_infer_iters": max_infer_iters,
"max_tool_calls": max_tool_calls,
"parallel_tool_calls": parallel_tool_calls,
"previous_response_id": previous_response_id,
"prompt": prompt,
"store": store,
Expand Down
2 changes: 2 additions & 0 deletions src/llama_stack_client/types/response_create_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,8 @@ class ResponseCreateParamsBase(TypedDict, total=False):

max_tool_calls: Optional[int]

parallel_tool_calls: Optional[bool]

previous_response_id: Optional[str]

prompt: Optional[Prompt]
Expand Down
4 changes: 4 additions & 0 deletions tests/api_resources/test_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ def test_method_create_with_all_params_overload_1(self, client: LlamaStackClient
instructions="instructions",
max_infer_iters=0,
max_tool_calls=0,
parallel_tool_calls=True,
previous_response_id="previous_response_id",
prompt={
"id": "id",
Expand Down Expand Up @@ -124,6 +125,7 @@ def test_method_create_with_all_params_overload_2(self, client: LlamaStackClient
instructions="instructions",
max_infer_iters=0,
max_tool_calls=0,
parallel_tool_calls=True,
previous_response_id="previous_response_id",
prompt={
"id": "id",
Expand Down Expand Up @@ -317,6 +319,7 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn
instructions="instructions",
max_infer_iters=0,
max_tool_calls=0,
parallel_tool_calls=True,
previous_response_id="previous_response_id",
prompt={
"id": "id",
Expand Down Expand Up @@ -395,6 +398,7 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn
instructions="instructions",
max_infer_iters=0,
max_tool_calls=0,
parallel_tool_calls=True,
previous_response_id="previous_response_id",
prompt={
"id": "id",
Expand Down
Loading