Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 65 additions & 0 deletions examples/aio/server_side_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import asyncio

from xai_sdk import AsyncClient
from xai_sdk.chat import user
from xai_sdk.tools import code_execution, web_search, x_search


async def agentic_search(client: AsyncClient, model: str, query: str) -> None:
chat = client.chat.create(
model=model,
# All three tools are active, you can add/remove server-side tools as needed
tools=[
web_search(),
x_search(),
code_execution(),
],
)
chat.append(user(query))

is_thinking = True
async for response, chunk in chat.stream():
if response.usage.reasoning_tokens and is_thinking:
print(f"\rThinking... ({response.usage.reasoning_tokens} tokens)", end="", flush=True)
if chunk.content and is_thinking:
print("\n\nFinal Response:")
is_thinking = False
if chunk.content and not is_thinking:
print(chunk.content, end="", flush=True)

print("\n\nCitations:")
print(response.citations)
print("\n\nUsage:")
print(response.usage.server_side_tools_used)
print(response.server_side_tool_usage)


async def main() -> None:
client = AsyncClient()

# Trigger web/x search
await agentic_search(
client,
model="grok-4-fast",
query=(
"What was the result of Arsenal's most recent game? Where did they play, who scored and in which minutes?"
),
)

# Trigger code execution
# await agentic_search(
# client,
# model="grok-4-fast",
# query=("What is the 102nd number in the Fibonacci sequence?. show me the code"),
# )

# Trigger x search/web search
# await agentic_search(
# client,
# model="grok-4-fast-non-reasoning",
# query="What can you tell me about the X user 0xPromar and his recent activity?",
# )


if __name__ == "__main__":
asyncio.run(main())
63 changes: 63 additions & 0 deletions examples/sync/server_side_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
from xai_sdk import Client
from xai_sdk.chat import user
from xai_sdk.tools import code_execution, web_search, x_search


def agentic_search(client: Client, model: str, query: str) -> None:
chat = client.chat.create(
model=model,
# All three tools are active, you can add/remove server-side tools as needed
tools=[
web_search(),
x_search(),
code_execution(),
],
)
chat.append(user(query))

is_thinking = True
for response, chunk in chat.stream():
if response.usage.reasoning_tokens and is_thinking:
print(f"\rThinking... ({response.usage.reasoning_tokens} tokens)", end="", flush=True)
if chunk.content and is_thinking:
print("\n\nFinal Response:")
is_thinking = False
if chunk.content and not is_thinking:
print(chunk.content, end="", flush=True)

print("\n\nCitations:")
print(response.citations)
print("\n\nUsage:")
print(response.usage)
print(response.server_side_tool_usage)


def main() -> None:
client = Client()

# Trigger web/x search
agentic_search(
client,
model="grok-4-fast",
query=(
"What was the result of Arsenal's most recent game? Where did they play, who scored and in which minutes?"
),
)

# Trigger code execution
# agentic_search(
# client,
# model="grok-4-fast",
# query=("What is the 102nd number in the Fibonacci sequence?. show me the code"),
# )

# Trigger x search/web search
# agentic_search(
# client,
# model="grok-4-fast-non-reasoning",
# query="What can you tell me about the X user 0xPromar and his recent activity?",
# )


if __name__ == "__main__":
main()
13 changes: 13 additions & 0 deletions src/xai_sdk/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import datetime
import json
import time
from collections import Counter
from typing import Any, Generic, Literal, Optional, Sequence, TypeVar, Union

import grpc
Expand Down Expand Up @@ -713,6 +714,12 @@ def reasoning_content(self) -> str:
"""Concatenates all reasoning chunks into a single string."""
return "".join(c.reasoning_content for c in self.choices)

@property
def server_side_tool_usage(self) -> dict[str, int]:
"""Returns the server side tools used for this chunk."""
tools_used = [usage_pb2.ServerSideTool.Name(tool) for tool in self.proto.usage.server_side_tools_used]
return dict(Counter(tools_used))

@property
def citations(self) -> Sequence[str]:
"""Returns the citations of this chunk."""
Expand Down Expand Up @@ -854,6 +861,12 @@ def citations(self) -> Sequence[str]:
"""Returns the citations of this response."""
return self.proto.citations

@property
def server_side_tool_usage(self) -> dict[str, int]:
"""Returns the server side tools used for this response."""
tools_used = [usage_pb2.ServerSideTool.Name(tool) for tool in self.proto.usage.server_side_tools_used]
return dict(Counter(tools_used))

@property
def request_settings(self) -> chat_pb2.RequestSettings:
"""Returns the request settings, i.e. the model parameters set on the request used to generate this response."""
Expand Down
94 changes: 48 additions & 46 deletions src/xai_sdk/proto/v5/chat_pb2.py

Large diffs are not rendered by default.

32 changes: 28 additions & 4 deletions src/xai_sdk/proto/v5/chat_pb2.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -269,20 +269,44 @@ class ToolChoice(_message.Message):
def __init__(self, mode: _Optional[_Union[ToolMode, str]] = ..., function_name: _Optional[str] = ...) -> None: ...

class Tool(_message.Message):
__slots__ = ("function", "web_search", "x_search")
__slots__ = ("function", "web_search", "x_search", "code_execution")
FUNCTION_FIELD_NUMBER: _ClassVar[int]
WEB_SEARCH_FIELD_NUMBER: _ClassVar[int]
X_SEARCH_FIELD_NUMBER: _ClassVar[int]
CODE_EXECUTION_FIELD_NUMBER: _ClassVar[int]
function: Function
web_search: WebSearch
x_search: XSearch
def __init__(self, function: _Optional[_Union[Function, _Mapping]] = ..., web_search: _Optional[_Union[WebSearch, _Mapping]] = ..., x_search: _Optional[_Union[XSearch, _Mapping]] = ...) -> None: ...
code_execution: CodeExecution
def __init__(self, function: _Optional[_Union[Function, _Mapping]] = ..., web_search: _Optional[_Union[WebSearch, _Mapping]] = ..., x_search: _Optional[_Union[XSearch, _Mapping]] = ..., code_execution: _Optional[_Union[CodeExecution, _Mapping]] = ...) -> None: ...

class WebSearch(_message.Message):
__slots__ = ()
def __init__(self) -> None: ...
__slots__ = ("excluded_domains", "allowed_domains", "enable_image_understanding")
EXCLUDED_DOMAINS_FIELD_NUMBER: _ClassVar[int]
ALLOWED_DOMAINS_FIELD_NUMBER: _ClassVar[int]
ENABLE_IMAGE_UNDERSTANDING_FIELD_NUMBER: _ClassVar[int]
excluded_domains: _containers.RepeatedScalarFieldContainer[str]
allowed_domains: _containers.RepeatedScalarFieldContainer[str]
enable_image_understanding: bool
def __init__(self, excluded_domains: _Optional[_Iterable[str]] = ..., allowed_domains: _Optional[_Iterable[str]] = ..., enable_image_understanding: bool = ...) -> None: ...

class XSearch(_message.Message):
__slots__ = ("from_date", "to_date", "allowed_x_handles", "excluded_x_handles", "enable_image_understanding", "enable_video_understanding")
FROM_DATE_FIELD_NUMBER: _ClassVar[int]
TO_DATE_FIELD_NUMBER: _ClassVar[int]
ALLOWED_X_HANDLES_FIELD_NUMBER: _ClassVar[int]
EXCLUDED_X_HANDLES_FIELD_NUMBER: _ClassVar[int]
ENABLE_IMAGE_UNDERSTANDING_FIELD_NUMBER: _ClassVar[int]
ENABLE_VIDEO_UNDERSTANDING_FIELD_NUMBER: _ClassVar[int]
from_date: _timestamp_pb2.Timestamp
to_date: _timestamp_pb2.Timestamp
allowed_x_handles: _containers.RepeatedScalarFieldContainer[str]
excluded_x_handles: _containers.RepeatedScalarFieldContainer[str]
enable_image_understanding: bool
enable_video_understanding: bool
def __init__(self, from_date: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., to_date: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., allowed_x_handles: _Optional[_Iterable[str]] = ..., excluded_x_handles: _Optional[_Iterable[str]] = ..., enable_image_understanding: bool = ..., enable_video_understanding: bool = ...) -> None: ...

class CodeExecution(_message.Message):
__slots__ = ()
def __init__(self) -> None: ...

Expand Down
10 changes: 6 additions & 4 deletions src/xai_sdk/proto/v5/usage_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

25 changes: 22 additions & 3 deletions src/xai_sdk/proto/v5/usage_pb2.pyi
Original file line number Diff line number Diff line change
@@ -1,11 +1,28 @@
from google.protobuf.internal import containers as _containers
from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from typing import ClassVar as _ClassVar, Optional as _Optional
from typing import ClassVar as _ClassVar, Iterable as _Iterable, Optional as _Optional, Union as _Union

DESCRIPTOR: _descriptor.FileDescriptor

class ServerSideTool(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
__slots__ = ()
SERVER_SIDE_TOOL_INVALID: _ClassVar[ServerSideTool]
SERVER_SIDE_TOOL_WEB_SEARCH: _ClassVar[ServerSideTool]
SERVER_SIDE_TOOL_X_SEARCH: _ClassVar[ServerSideTool]
SERVER_SIDE_TOOL_CODE_EXECUTION: _ClassVar[ServerSideTool]
SERVER_SIDE_TOOL_VIEW_IMAGE: _ClassVar[ServerSideTool]
SERVER_SIDE_TOOL_VIEW_X_VIDEO: _ClassVar[ServerSideTool]
SERVER_SIDE_TOOL_INVALID: ServerSideTool
SERVER_SIDE_TOOL_WEB_SEARCH: ServerSideTool
SERVER_SIDE_TOOL_X_SEARCH: ServerSideTool
SERVER_SIDE_TOOL_CODE_EXECUTION: ServerSideTool
SERVER_SIDE_TOOL_VIEW_IMAGE: ServerSideTool
SERVER_SIDE_TOOL_VIEW_X_VIDEO: ServerSideTool

class SamplingUsage(_message.Message):
__slots__ = ("completion_tokens", "reasoning_tokens", "prompt_tokens", "total_tokens", "prompt_text_tokens", "cached_prompt_text_tokens", "prompt_image_tokens", "num_sources_used")
__slots__ = ("completion_tokens", "reasoning_tokens", "prompt_tokens", "total_tokens", "prompt_text_tokens", "cached_prompt_text_tokens", "prompt_image_tokens", "num_sources_used", "server_side_tools_used")
COMPLETION_TOKENS_FIELD_NUMBER: _ClassVar[int]
REASONING_TOKENS_FIELD_NUMBER: _ClassVar[int]
PROMPT_TOKENS_FIELD_NUMBER: _ClassVar[int]
Expand All @@ -14,6 +31,7 @@ class SamplingUsage(_message.Message):
CACHED_PROMPT_TEXT_TOKENS_FIELD_NUMBER: _ClassVar[int]
PROMPT_IMAGE_TOKENS_FIELD_NUMBER: _ClassVar[int]
NUM_SOURCES_USED_FIELD_NUMBER: _ClassVar[int]
SERVER_SIDE_TOOLS_USED_FIELD_NUMBER: _ClassVar[int]
completion_tokens: int
reasoning_tokens: int
prompt_tokens: int
Expand All @@ -22,7 +40,8 @@ class SamplingUsage(_message.Message):
cached_prompt_text_tokens: int
prompt_image_tokens: int
num_sources_used: int
def __init__(self, completion_tokens: _Optional[int] = ..., reasoning_tokens: _Optional[int] = ..., prompt_tokens: _Optional[int] = ..., total_tokens: _Optional[int] = ..., prompt_text_tokens: _Optional[int] = ..., cached_prompt_text_tokens: _Optional[int] = ..., prompt_image_tokens: _Optional[int] = ..., num_sources_used: _Optional[int] = ...) -> None: ...
server_side_tools_used: _containers.RepeatedScalarFieldContainer[ServerSideTool]
def __init__(self, completion_tokens: _Optional[int] = ..., reasoning_tokens: _Optional[int] = ..., prompt_tokens: _Optional[int] = ..., total_tokens: _Optional[int] = ..., prompt_text_tokens: _Optional[int] = ..., cached_prompt_text_tokens: _Optional[int] = ..., prompt_image_tokens: _Optional[int] = ..., num_sources_used: _Optional[int] = ..., server_side_tools_used: _Optional[_Iterable[_Union[ServerSideTool, str]]] = ...) -> None: ...

class EmbeddingUsage(_message.Message):
__slots__ = ("num_text_embeddings", "num_image_embeddings")
Expand Down
Loading