Skip to content

Commit 4a40240

Browse files
author
Loïc POISOT
committed
update tests to fix issue with mistral deps update and test URL and binary PDFs
1 parent a31feec commit 4a40240

File tree

1 file changed

+80
-6
lines changed

1 file changed

+80
-6
lines changed

tests/models/test_mistral.py

Lines changed: 80 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from pydantic_ai.exceptions import ModelHTTPError, ModelRetry
1717
from pydantic_ai.messages import (
1818
BinaryContent,
19+
DocumentUrl,
1920
ImageUrl,
2021
ModelRequest,
2122
ModelResponse,
@@ -118,12 +119,12 @@ async def chat_completions_create( # pragma: lax no cover
118119

119120

120121
def completion_message(
121-
message: MistralAssistantMessage, *, usage: MistralUsageInfo | None = None, with_created: bool = True
122+
message: MistralAssistantMessage, *, usage: MistralUsageInfo | None = None, created: int = 1704067200
122123
) -> MistralChatCompletionResponse:
123124
return MistralChatCompletionResponse(
124125
id='123',
125126
choices=[MistralChatCompletionChoice(finish_reason='stop', index=0, message=message)],
126-
created=1704067200 if with_created else None, # 2024-01-01
127+
created=created,
127128
model='mistral-large-123',
128129
object='chat.completion',
129130
usage=usage or MistralUsageInfo(prompt_tokens=1, completion_tokens=1, total_tokens=1),
@@ -142,7 +143,7 @@ def chunk(
142143
MistralCompletionResponseStreamChoice(index=index, delta=delta, finish_reason=finish_reason)
143144
for index, delta in enumerate(delta)
144145
],
145-
created=1704067200 if with_created else None, # 2024-01-01
146+
created=1704067200, # 2024-01-01
146147
model='gpt-4',
147148
object='chat.completion.chunk',
148149
usage=MistralUsageInfo(prompt_tokens=1, completion_tokens=1, total_tokens=1),
@@ -187,13 +188,20 @@ def test_init():
187188

188189

189190
async def test_multiple_completions(allow_model_requests: None):
191+
from datetime import datetime, timezone
192+
190193
completions = [
194+
# First completion: created is "now" (simulate IsNow)
191195
completion_message(
192196
MistralAssistantMessage(content='world'),
193197
usage=MistralUsageInfo(prompt_tokens=1, completion_tokens=1, total_tokens=1),
194-
with_created=False,
198+
created=int(datetime.now(tz=timezone.utc).timestamp()),
199+
),
200+
# Second completion: created is fixed 2024-01-01 00:00:00 UTC
201+
completion_message(
202+
MistralAssistantMessage(content='hello again'),
203+
created=int(datetime(2024, 1, 1, 0, 0, tzinfo=timezone.utc).timestamp()),
195204
),
196-
completion_message(MistralAssistantMessage(content='hello again')),
197205
]
198206
mock_client = MockMistralAI.create_mock(completions)
199207
model = MistralModel('mistral-large-latest', provider=MistralProvider(mistral_client=mock_client))
@@ -1909,6 +1917,72 @@ async def test_image_as_binary_content_input(allow_model_requests: None):
19091917
)
19101918

19111919

1920+
async def test_pdf_url_input(allow_model_requests: None):
1921+
c = completion_message(MistralAssistantMessage(content='world', role='assistant'))
1922+
mock_client = MockMistralAI.create_mock(c)
1923+
m = MistralModel('mistral-large-latest', provider=MistralProvider(mistral_client=mock_client))
1924+
agent = Agent(m)
1925+
1926+
result = await agent.run(
1927+
[
1928+
'hello',
1929+
DocumentUrl(url='https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf'),
1930+
]
1931+
)
1932+
assert result.all_messages() == snapshot(
1933+
[
1934+
ModelRequest(
1935+
parts=[
1936+
UserPromptPart(
1937+
content=[
1938+
'hello',
1939+
DocumentUrl(url='https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf'),
1940+
],
1941+
timestamp=IsDatetime(),
1942+
)
1943+
]
1944+
),
1945+
ModelResponse(
1946+
parts=[TextPart(content='world')],
1947+
usage=Usage(requests=1, request_tokens=1, response_tokens=1, total_tokens=1),
1948+
model_name='mistral-large-123',
1949+
timestamp=IsDatetime(),
1950+
vendor_id='123',
1951+
),
1952+
]
1953+
)
1954+
1955+
1956+
async def test_pdf_as_binary_content_input(allow_model_requests: None):
1957+
c = completion_message(MistralAssistantMessage(content='world', role='assistant'))
1958+
mock_client = MockMistralAI.create_mock(c)
1959+
m = MistralModel('mistral-large-latest', provider=MistralProvider(mistral_client=mock_client))
1960+
agent = Agent(m)
1961+
1962+
base64_content = b'%PDF-1.\rtrailer<</Root<</Pages<</Kids[<</MediaBox[0 0 3 3]>>>>>>>>>'
1963+
1964+
result = await agent.run(['hello', BinaryContent(data=base64_content, media_type='application/pdf')])
1965+
assert result.all_messages() == snapshot(
1966+
[
1967+
ModelRequest(
1968+
parts=[
1969+
UserPromptPart(
1970+
content=['hello', BinaryContent(data=base64_content, media_type='application/pdf')],
1971+
timestamp=IsDatetime(),
1972+
)
1973+
]
1974+
),
1975+
ModelResponse(
1976+
parts=[TextPart(content='world')],
1977+
usage=Usage(requests=1, request_tokens=1, response_tokens=1, total_tokens=1),
1978+
model_name='mistral-large-123',
1979+
timestamp=IsDatetime(),
1980+
vendor_id='123',
1981+
),
1982+
]
1983+
)
1984+
1985+
19121986
async def test_audio_as_binary_content_input(allow_model_requests: None):
19131987
c = completion_message(MistralAssistantMessage(content='world', role='assistant'))
19141988
mock_client = MockMistralAI.create_mock(c)
@@ -1917,7 +1991,7 @@ async def test_audio_as_binary_content_input(allow_model_requests: None):
19171991

19181992
base64_content = b'//uQZ'
19191993

1920-
with pytest.raises(RuntimeError, match='Only image binary content is supported for Mistral.'):
1994+
with pytest.raises(RuntimeError, match='BinaryContent other than image or PDF is not supported in Mistral.'):
19211995
await agent.run(['hello', BinaryContent(data=base64_content, media_type='audio/wav')])
19221996

19231997

0 commit comments

Comments
 (0)