Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions aikido_zen/sinks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,3 +114,24 @@ def decorator(func, instance, args, kwargs):
return return_value

return decorator


def after_async(wrapper):
"""
Surrounds an async patch with try-except, calls the original function and gives the return value to the patch
"""

async def decorator(func, instance, args, kwargs):
return_value = await func(*args, **kwargs) # Call the original function
try:
await wrapper(func, instance, args, kwargs, return_value) # Call the patch
except AikidoException as e:
raise e # Re-raise AikidoException
except Exception as e:
logger.debug(
"%s:%s wrapping-after error: %s", func.__module__, func.__name__, e
)

return return_value

return decorator
16 changes: 15 additions & 1 deletion aikido_zen/sinks/anthropic.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from aikido_zen.helpers.on_ai_call import on_ai_call
from aikido_zen.helpers.register_call import register_call
from aikido_zen.sinks import on_import, patch_function, after
from aikido_zen.sinks import on_import, patch_function, after, after_async


@after
Expand All @@ -16,6 +16,20 @@ def _messages_create(func, instance, args, kwargs, return_value):
)


@after_async
async def _messages_create_async(func, instance, args, kwargs, return_value):

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function '_messages_create_async' lacks clear purpose indication - no descriptive name, docstring, or return type hint to clarify it's a tracking callback More info

op = f"anthropic.resources.messages.messages.Messages.create"
register_call(op, "ai_op")

on_ai_call(
provider="anthropic",
model=return_value.model,
input_tokens=return_value.usage.input_tokens,
output_tokens=return_value.usage.output_tokens,
)


@on_import("anthropic.resources.messages")
def patch(m):

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Function 'patch' lacks clear purpose indication - generic name without docstring or return type hint to clarify its role in API monitoring More info

patch_function(m, "messages.Messages.create", _messages_create)
patch_function(m, "messages.AsyncMessages.create", _messages_create_async)
29 changes: 27 additions & 2 deletions aikido_zen/sinks/tests/anthropic_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import pytest
import aikido_zen.sinks.anthropic
import anthropic
import asyncio

from aikido_zen.thread.thread_cache import get_cache

Expand All @@ -27,7 +28,7 @@ def get_ai_stats():
def test_anthropic_messages_create():
client = anthropic.Anthropic()
response = client.messages.create(
model="claude-3-opus-20240229",
model="claude-sonnet-4-20250514",
max_tokens=20,
messages=[
{
Expand All @@ -38,7 +39,31 @@ def test_anthropic_messages_create():
)
print(response)

assert get_ai_stats()[0]["model"] == "claude-3-opus-20240229"
assert get_ai_stats()[0]["model"] == "claude-sonnet-4-20250514"
assert get_ai_stats()[0]["calls"] == 1
assert get_ai_stats()[0]["provider"] == "anthropic"
assert get_ai_stats()[0]["tokens"]["input"] == 21
assert get_ai_stats()[0]["tokens"]["output"] == 20
assert get_ai_stats()[0]["tokens"]["total"] == 41


@skip_no_api_key
@pytest.mark.asyncio
async def test_anthropic_messages_create_async():
client = anthropic.AsyncAnthropic()
response = await client.messages.create(
model="claude-sonnet-4-20250514",
max_tokens=20,
messages=[
{
"role": "user",
"content": "Write the longest response possible, just as I am writing a long content",
}
],
)
print(response)

assert get_ai_stats()[0]["model"] == "claude-sonnet-4-20250514"
assert get_ai_stats()[0]["calls"] == 1
assert get_ai_stats()[0]["provider"] == "anthropic"
assert get_ai_stats()[0]["tokens"]["input"] == 21
Expand Down
10 changes: 5 additions & 5 deletions aikido_zen/sinks/tests/mistralai_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,16 +61,16 @@ def test_mistralai_agents_complete(mistral):
"role": "user",
},
],
agent_id="ag:e1521cc4:20250618:untitled-agent:cb1e1742",
agent_id="ag:e1521cc4:20250805:untitled-agent:498e0dd8",
)
print(res)

assert get_ai_stats()[0]["model"] == "mistral-large-2411"
assert get_ai_stats()[0]["model"] == "mistral-medium-latest"
assert get_ai_stats()[0]["calls"] == 1
assert get_ai_stats()[0]["provider"] == "mistralai"
assert get_ai_stats()[0]["tokens"]["input"] == 20
assert get_ai_stats()[0]["tokens"]["input"] == 16
assert get_ai_stats()[0]["tokens"]["output"] == 11
assert get_ai_stats()[0]["tokens"]["total"] == 31
assert get_ai_stats()[0]["tokens"]["total"] == 27


@skip_no_api_key
Expand All @@ -95,7 +95,7 @@ def test_mistralai_embeddings_create(mistral):
@skip_no_api_key
def test_mistralai_fim_complete(mistral):
res = mistral.fim.complete(
model="codestral-2405", prompt="def", suffix="return a+b"
model="codestral-2405", prompt="def", suffix="return a+b", max_tokens=6
)
print(res)

Expand Down
99 changes: 99 additions & 0 deletions aikido_zen/sinks/tests/openai_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
import os

import pytest
import aikido_zen.sinks.openai

from aikido_zen.thread.thread_cache import get_cache

skip_no_api_key = pytest.mark.skipif(
"OPENAI_API_KEY" not in os.environ,
reason="OPENAI_API_KEY environment variable not set",
)


@pytest.fixture(autouse=True)
def setup():
get_cache().reset()
yield
get_cache().reset()


@pytest.fixture
def client():
import openai

return openai.OpenAI()


def get_ai_stats():
return get_cache().ai_stats.get_stats()


@skip_no_api_key
def test_openai_responses_create_with_vision(client):
prompt = "What is in this image?"
img_url = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/d5/2023_06_08_Raccoon1.jpg/1599px-2023_06_08_Raccoon1.jpg"

response = client.responses.create(
model="gpt-4o-mini",
input=[
{
"role": "user",
"content": [
{"type": "input_text", "text": prompt},
{"type": "input_image", "image_url": f"{img_url}"},
],
}
],
max_output_tokens=25,
)
print(response)

assert get_ai_stats()[0]["model"] == "gpt-4o-mini-2024-07-18"
assert get_ai_stats()[0]["calls"] == 1
assert get_ai_stats()[0]["provider"] == "openai"
assert get_ai_stats()[0]["tokens"]["input"] == 36848
assert get_ai_stats()[0]["tokens"]["output"] == 25
assert get_ai_stats()[0]["tokens"]["total"] == 36873


@skip_no_api_key
def test_openai_chat_complete(client):
completion = client.chat.completions.create(
model="gpt-4o",
max_tokens=15,
messages=[
{"role": "developer", "content": "Talk like a pirate."},
{
"role": "user",
"content": "Who is the best French painter? Answer in one short sentence.",
},
],
)
answer = completion.choices[0].message.content
print(answer)

assert get_ai_stats()[0]["model"] == "gpt-4o-2024-08-06"
assert get_ai_stats()[0]["calls"] == 1
assert get_ai_stats()[0]["provider"] == "openai"
assert get_ai_stats()[0]["tokens"]["input"] == 29
assert get_ai_stats()[0]["tokens"]["output"] == 15
assert get_ai_stats()[0]["tokens"]["total"] == 44


@skip_no_api_key
def test_openai_responses_create(client):
response = client.responses.create(
model="gpt-4o",
instructions="You are a coding assistant that talks like a pirate.",
input="How do I check if a Python object is an instance of a class?",
max_output_tokens=18,
)
print(response.output_text)

assert get_ai_stats()[0]["model"] == "gpt-4o-2024-08-06"
assert get_ai_stats()[0]["calls"] == 1
assert get_ai_stats()[0]["provider"] == "openai"
assert get_ai_stats()[0]["tokens"]["input"] == 37
assert get_ai_stats()[0]["tokens"]["output"] == 18
assert get_ai_stats()[0]["tokens"]["total"] == 55
Loading