8
8
from unittest .mock import patch
9
9
10
10
import pytest
11
- from openai import OpenAI
12
11
13
12
from llama_stack .core .datatypes import User
14
- from llama_stack .core .library_client import LlamaStackAsLibraryClient
15
13
16
14
17
- def test_openai_client_basic_operations (compat_client , client_with_models ):
15
+ # a fixture to skip all these tests if a files provider is not available
16
+ @pytest .fixture (autouse = True )
17
+ def skip_if_no_files_provider (llama_stack_client ):
18
+ if not [provider for provider in llama_stack_client .providers .list () if provider .api == "files" ]:
19
+ pytest .skip ("No files providers found" )
20
+
21
+
22
+ def test_openai_client_basic_operations (openai_client ):
18
23
"""Test basic file operations through OpenAI client."""
19
- if isinstance ( client_with_models , LlamaStackAsLibraryClient ) and isinstance ( compat_client , OpenAI ):
20
- pytest . skip ( "OpenAI files are not supported when testing with LlamaStackAsLibraryClient" )
21
- client = compat_client
24
+ from openai import NotFoundError
25
+
26
+ client = openai_client
22
27
23
28
test_content = b"files test content"
24
29
30
+ uploaded_file = None
31
+
25
32
try :
26
33
# Upload file using OpenAI client
27
34
with BytesIO (test_content ) as file_buffer :
@@ -31,6 +38,7 @@ def test_openai_client_basic_operations(compat_client, client_with_models):
31
38
# Verify basic response structure
32
39
assert uploaded_file .id .startswith ("file-" )
33
40
assert hasattr (uploaded_file , "filename" )
41
+ assert uploaded_file .filename == "openai_test.txt"
34
42
35
43
# List files
36
44
files_list = client .files .list ()
@@ -43,37 +51,41 @@ def test_openai_client_basic_operations(compat_client, client_with_models):
43
51
44
52
# Retrieve file content - OpenAI client returns httpx Response object
45
53
content_response = client .files .content (uploaded_file .id )
46
- # The response is an httpx Response object with .content attribute containing bytes
47
- if isinstance (content_response , str ):
48
- # Llama Stack Client returns a str
49
- # TODO: fix Llama Stack Client
50
- content = bytes (content_response , "utf-8" )
51
- else :
52
- content = content_response .content
53
- assert content == test_content
54
+ assert content_response .content == test_content
54
55
55
56
# Delete file
56
57
delete_response = client .files .delete (uploaded_file .id )
57
58
assert delete_response .deleted is True
58
59
59
- except Exception as e :
60
- # Cleanup in case of failure
61
- try :
60
+ # Retrieve file should fail
61
+ with pytest .raises (NotFoundError , match = "not found" ):
62
+ client .files .retrieve (uploaded_file .id )
63
+
64
+ # File should not be found in listing
65
+ files_list = client .files .list ()
66
+ file_ids = [f .id for f in files_list .data ]
67
+ assert uploaded_file .id not in file_ids
68
+
69
+ # Double delete should fail
70
+ with pytest .raises (NotFoundError , match = "not found" ):
62
71
client .files .delete (uploaded_file .id )
63
- except Exception :
64
- pass
65
- raise e
66
72
73
+ finally :
74
+ # Cleanup in case of failure
75
+ if uploaded_file is not None :
76
+ try :
77
+ client .files .delete (uploaded_file .id )
78
+ except NotFoundError :
79
+ pass # ignore 404
67
80
81
+
82
+ @pytest .mark .xfail (message = "User isolation broken for current providers, must be fixed." )
68
83
@patch ("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user" )
69
- def test_files_authentication_isolation (mock_get_authenticated_user , compat_client , client_with_models ):
84
+ def test_files_authentication_isolation (mock_get_authenticated_user , llama_stack_client ):
70
85
"""Test that users can only access their own files."""
71
- if isinstance (client_with_models , LlamaStackAsLibraryClient ) and isinstance (compat_client , OpenAI ):
72
- pytest .skip ("OpenAI files are not supported when testing with LlamaStackAsLibraryClient" )
73
- if not isinstance (client_with_models , LlamaStackAsLibraryClient ):
74
- pytest .skip ("Authentication tests require LlamaStackAsLibraryClient (library mode)" )
86
+ from llama_stack_client import NotFoundError
75
87
76
- client = compat_client
88
+ client = llama_stack_client
77
89
78
90
# Create two test users
79
91
user1 = User ("user1" , {"roles" : ["user" ], "teams" : ["team-a" ]})
@@ -117,7 +129,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
117
129
118
130
# User 1 cannot retrieve user2's file
119
131
mock_get_authenticated_user .return_value = user1
120
- with pytest .raises (ValueError , match = "not found" ):
132
+ with pytest .raises (NotFoundError , match = "not found" ):
121
133
client .files .retrieve (user2_file .id )
122
134
123
135
# User 1 can access their file content
@@ -131,7 +143,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
131
143
132
144
# User 1 cannot access user2's file content
133
145
mock_get_authenticated_user .return_value = user1
134
- with pytest .raises (ValueError , match = "not found" ):
146
+ with pytest .raises (NotFoundError , match = "not found" ):
135
147
client .files .content (user2_file .id )
136
148
137
149
# User 1 can delete their own file
@@ -141,7 +153,7 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
141
153
142
154
# User 1 cannot delete user2's file
143
155
mock_get_authenticated_user .return_value = user1
144
- with pytest .raises (ValueError , match = "not found" ):
156
+ with pytest .raises (NotFoundError , match = "not found" ):
145
157
client .files .delete (user2_file .id )
146
158
147
159
# User 2 can still access their file after user1's file is deleted
@@ -169,14 +181,9 @@ def test_files_authentication_isolation(mock_get_authenticated_user, compat_clie
169
181
170
182
171
183
@patch ("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user" )
172
- def test_files_authentication_shared_attributes (mock_get_authenticated_user , compat_client , client_with_models ):
184
+ def test_files_authentication_shared_attributes (mock_get_authenticated_user , llama_stack_client ):
173
185
"""Test access control with users having identical attributes."""
174
- if isinstance (client_with_models , LlamaStackAsLibraryClient ) and isinstance (compat_client , OpenAI ):
175
- pytest .skip ("OpenAI files are not supported when testing with LlamaStackAsLibraryClient" )
176
- if not isinstance (client_with_models , LlamaStackAsLibraryClient ):
177
- pytest .skip ("Authentication tests require LlamaStackAsLibraryClient (library mode)" )
178
-
179
- client = compat_client
186
+ client = llama_stack_client
180
187
181
188
# Create users with identical attributes (required for default policy)
182
189
user_a = User ("user-a" , {"roles" : ["user" ], "teams" : ["shared-team" ]})
@@ -231,14 +238,8 @@ def test_files_authentication_shared_attributes(mock_get_authenticated_user, com
231
238
232
239
233
240
@patch ("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user" )
234
- def test_files_authentication_anonymous_access (mock_get_authenticated_user , compat_client , client_with_models ):
235
- """Test anonymous user behavior when no authentication is present."""
236
- if isinstance (client_with_models , LlamaStackAsLibraryClient ) and isinstance (compat_client , OpenAI ):
237
- pytest .skip ("OpenAI files are not supported when testing with LlamaStackAsLibraryClient" )
238
- if not isinstance (client_with_models , LlamaStackAsLibraryClient ):
239
- pytest .skip ("Authentication tests require LlamaStackAsLibraryClient (library mode)" )
240
-
241
- client = compat_client
241
+ def test_files_authentication_anonymous_access (mock_get_authenticated_user , llama_stack_client ):
242
+ client = llama_stack_client
242
243
243
244
# Simulate anonymous user (no authentication)
244
245
mock_get_authenticated_user .return_value = None
0 commit comments