Skip to content

Commit 6b978ad

Browse files
mjvogelsongAuto-format Bot
andauthored
Add IQ metadata (#132)
Co-authored-by: Auto-format Bot <[email protected]>
1 parent b79c307 commit 6b978ad

File tree

11 files changed

+171
-6
lines changed

11 files changed

+171
-6
lines changed

Makefile

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@ docs-comprehensive: apidocs
9494
cd docs && npm run build
9595

9696
apidocs:
97+
cd docs && npm install
9798
poetry run make html
9899

99100
html:

generated/docs/ImageQueriesApi.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,6 +208,7 @@ with openapi_client.ApiClient(configuration) as api_client:
208208
human_review = "human_review_example" # str | If set to `DEFAULT`, use the regular escalation logic (i.e., send the image query for human review if the ML model is not confident). If set to `ALWAYS`, always send the image query for human review even if the ML model is confident. If set to `NEVER`, never send the image query for human review even if the ML model is not confident. (optional)
209209
patience_time = 3.14 # float | How long to wait for a confident response. (optional)
210210
want_async = "want_async_example" # str | If \"true\" then submitting an image query returns immediately without a result. The result will be computed asynchronously and can be retrieved later. (optional)
211+
metadata = "metadata_example" # str | A dictionary of custom key/value metadata to associate with the image query (limited to 1KB). (optional)
211212
body = open('@path/to/image.jpeg', 'rb') # file_type | (optional)
212213

213214
# example passing only required values which don't have defaults set
@@ -220,7 +221,7 @@ with openapi_client.ApiClient(configuration) as api_client:
220221
# example passing only required values which don't have defaults set
221222
# and optional values
222223
try:
223-
api_response = api_instance.submit_image_query(detector_id, human_review=human_review, patience_time=patience_time, want_async=want_async, body=body)
224+
api_response = api_instance.submit_image_query(detector_id, human_review=human_review, patience_time=patience_time, want_async=want_async, metadata=metadata, body=body)
224225
pprint(api_response)
225226
except openapi_client.ApiException as e:
226227
print("Exception when calling ImageQueriesApi->submit_image_query: %s\n" % e)
@@ -235,6 +236,7 @@ Name | Type | Description | Notes
235236
**human_review** | **str**| If set to &#x60;DEFAULT&#x60;, use the regular escalation logic (i.e., send the image query for human review if the ML model is not confident). If set to &#x60;ALWAYS&#x60;, always send the image query for human review even if the ML model is confident. If set to &#x60;NEVER&#x60;, never send the image query for human review even if the ML model is not confident. | [optional]
236237
**patience_time** | **float**| How long to wait for a confident response. | [optional]
237238
**want_async** | **str**| If \&quot;true\&quot; then submitting an image query returns immediately without a result. The result will be computed asynchronously and can be retrieved later. | [optional]
239+
**metadata** | **str**| A dictionary of custom key/value metadata to associate with the image query (limited to 1KB). | [optional]
238240
**body** | **file_type**| | [optional]
239241

240242
### Return type

generated/docs/ImageQuery.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ Name | Type | Description | Notes
1212
**detector_id** | **str** | Which detector was used on this image query? | [readonly]
1313
**result_type** | **bool, date, datetime, dict, float, int, list, str, none_type** | What type of result are we returning? | [readonly]
1414
**result** | **bool, date, datetime, dict, float, int, list, str, none_type** | | [optional] [readonly]
15+
**metadata** | **{str: (bool, date, datetime, dict, float, int, list, str, none_type)}, none_type** | A dictionary of custom key/value metadata to associate with the image query (limited to 1KB). | [optional] [readonly]
1516
**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional]
1617

1718
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)

generated/model.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
# generated by datamodel-codegen:
22
# filename: public-api.yaml
3-
# timestamp: 2023-10-16T23:29:00+00:00
3+
# timestamp: 2023-11-09T05:00:29+00:00
44

55
from __future__ import annotations
66

77
from datetime import datetime
88
from enum import Enum
9-
from typing import List, Optional
9+
from typing import Any, Dict, List, Optional
1010

1111
from pydantic import AnyUrl, BaseModel, Field, confloat, constr
1212

@@ -70,6 +70,10 @@ class ImageQuery(BaseModel):
7070
detector_id: str = Field(..., description="Which detector was used on this image query?")
7171
result_type: ResultTypeEnum = Field(..., description="What type of result are we returning?")
7272
result: Optional[ClassificationResult] = None
73+
metadata: Optional[Dict[str, Any]] = Field(
74+
None,
75+
description="A dictionary of custom key/value metadata to associate with the image query (limited to 1KB).",
76+
)
7377

7478

7579
class PaginatedDetectorList(BaseModel):

generated/openapi_client/api/image_queries_api.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,7 @@ def __init__(self, api_client=None):
134134
"human_review",
135135
"patience_time",
136136
"want_async",
137+
"metadata",
137138
"body",
138139
],
139140
"required": [
@@ -151,19 +152,22 @@ def __init__(self, api_client=None):
151152
"human_review": (str,),
152153
"patience_time": (float,),
153154
"want_async": (str,),
155+
"metadata": (str,),
154156
"body": (file_type,),
155157
},
156158
"attribute_map": {
157159
"detector_id": "detector_id",
158160
"human_review": "human_review",
159161
"patience_time": "patience_time",
160162
"want_async": "want_async",
163+
"metadata": "metadata",
161164
},
162165
"location_map": {
163166
"detector_id": "query",
164167
"human_review": "query",
165168
"patience_time": "query",
166169
"want_async": "query",
170+
"metadata": "query",
167171
"body": "body",
168172
},
169173
"collection_format_map": {},
@@ -304,6 +308,7 @@ def submit_image_query(self, detector_id, **kwargs):
304308
human_review (str): If set to `DEFAULT`, use the regular escalation logic (i.e., send the image query for human review if the ML model is not confident). If set to `ALWAYS`, always send the image query for human review even if the ML model is confident. If set to `NEVER`, never send the image query for human review even if the ML model is not confident. . [optional]
305309
patience_time (float): How long to wait for a confident response.. [optional]
306310
want_async (str): If \"true\" then submitting an image query returns immediately without a result. The result will be computed asynchronously and can be retrieved later.. [optional]
311+
metadata (str): A dictionary of custom key/value metadata to associate with the image query (limited to 1KB).. [optional]
307312
body (file_type): [optional]
308313
_return_http_data_only (bool): response data without head status
309314
code and headers. Default is True.

generated/openapi_client/model/image_query.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -138,6 +138,10 @@ def openapi_types():
138138
str,
139139
none_type,
140140
), # noqa: E501
141+
"metadata": (
142+
{str: (bool, date, datetime, dict, float, int, list, str, none_type)},
143+
none_type,
144+
), # noqa: E501
141145
}
142146

143147
@cached_property
@@ -152,6 +156,7 @@ def discriminator():
152156
"detector_id": "detector_id", # noqa: E501
153157
"result_type": "result_type", # noqa: E501
154158
"result": "result", # noqa: E501
159+
"metadata": "metadata", # noqa: E501
155160
}
156161

157162
read_only_vars = {
@@ -162,6 +167,7 @@ def discriminator():
162167
"detector_id", # noqa: E501
163168
"result_type", # noqa: E501
164169
"result", # noqa: E501
170+
"metadata", # noqa: E501
165171
}
166172

167173
_composed_schemas = {}
@@ -211,6 +217,7 @@ def _from_openapi_data(cls, id, type, created_at, query, detector_id, result_typ
211217
through its discriminator because we passed in
212218
_visited_composed_classes = (Animal,)
213219
result (bool, date, datetime, dict, float, int, list, str, none_type): [optional] # noqa: E501
220+
metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}, none_type): A dictionary of custom key/value metadata to associate with the image query (limited to 1KB).. [optional] # noqa: E501
214221
"""
215222

216223
_check_type = kwargs.pop("_check_type", True)
@@ -304,6 +311,7 @@ def __init__(self, *args, **kwargs): # noqa: E501
304311
through its discriminator because we passed in
305312
_visited_composed_classes = (Animal,)
306313
result (bool, date, datetime, dict, float, int, list, str, none_type): [optional] # noqa: E501
314+
metadata ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}, none_type): A dictionary of custom key/value metadata to associate with the image query (limited to 1KB).. [optional] # noqa: E501
307315
"""
308316

309317
_check_type = kwargs.pop("_check_type", True)

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ packages = [
99
{include = "**/*.py", from = "src"},
1010
]
1111
readme = "README.md"
12-
version = "0.12.1"
12+
version = "0.13.0"
1313

1414
[tool.poetry.dependencies]
1515
# For certifi, use ">=" instead of "^" since it upgrades its "major version" every year, not really following semver

spec/public-api.yaml

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -153,6 +153,14 @@ paths:
153153
schema:
154154
type: string
155155
description: If "true" then submitting an image query returns immediately without a result. The result will be computed asynchronously and can be retrieved later.
156+
- in: query
157+
name: metadata
158+
schema:
159+
type: string
160+
required: false
161+
description:
162+
A dictionary of custom key/value metadata to associate with the image
163+
query (limited to 1KB).
156164
tags:
157165
- image-queries
158166
requestBody:
@@ -339,6 +347,13 @@ components:
339347
allOf:
340348
- $ref: "#/components/schemas/ClassificationResult"
341349
readOnly: true
350+
metadata:
351+
type: object
352+
readOnly: true
353+
nullable: true
354+
description:
355+
A dictionary of custom key/value metadata to associate with the image
356+
query (limited to 1KB).
342357
required:
343358
- created_at
344359
- detector_id

src/groundlight/client.py

Lines changed: 35 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313

1414
from groundlight.binary_labels import Label, convert_display_label_to_internal, convert_internal_label_to_display
1515
from groundlight.config import API_TOKEN_HELP_MESSAGE, API_TOKEN_VARIABLE_NAME
16+
from groundlight.encodings import url_encode_dict
1617
from groundlight.images import ByteStreamWrapper, parse_supported_image_types
1718
from groundlight.internalapi import (
1819
GroundlightApiClient,
@@ -289,6 +290,7 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments, t
289290
human_review: Optional[str] = None,
290291
want_async: bool = False,
291292
inspection_id: Optional[str] = None,
293+
metadata: Union[dict, str, None] = None,
292294
) -> ImageQuery:
293295
"""
294296
Evaluates an image with Groundlight.
@@ -334,6 +336,11 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments, t
334336
this is the ID of the inspection to associate with the image query.
335337
:type inspection_id: str
336338
339+
:param metadata: A dictionary or JSON string of custom key/value metadata to associate with
340+
the image query (limited to 1KB). You can retrieve this metadata later by calling
341+
`get_image_query()`.
342+
:type metadata: dict or str
343+
337344
:return: ImageQuery
338345
:rtype: ImageQuery
339346
"""
@@ -360,6 +367,12 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments, t
360367
)
361368
params["want_async"] = str(bool(want_async))
362369

370+
if metadata is not None:
371+
# Currently, our backend server puts the image in the body data of the API request,
372+
# which means we need to put the metadata in the query string. To do that safely, we
373+
# url- and base64-encode the metadata.
374+
params["metadata"] = url_encode_dict(metadata, name="metadata", size_limit_bytes=1024)
375+
363376
# If no inspection_id is provided, we submit the image query using image_queries_api (autogenerated via OpenAPI)
364377
# However, our autogenerated code does not currently support inspection_id, so if an inspection_id was
365378
# provided, we use the private API client instead.
@@ -380,12 +393,13 @@ def submit_image_query( # noqa: PLR0913 # pylint: disable=too-many-arguments, t
380393

381394
return self._fixup_image_query(image_query)
382395

383-
def ask_confident(
396+
def ask_confident( # noqa: PLR0913 # pylint: disable=too-many-arguments
384397
self,
385398
detector: Union[Detector, str],
386399
image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray],
387400
confidence_threshold: Optional[float] = None,
388401
wait: Optional[float] = None,
402+
metadata: Union[dict, str, None] = None,
389403
) -> ImageQuery:
390404
"""
391405
Evaluates an image with Groundlight waiting until an answer above the confidence threshold
@@ -411,6 +425,11 @@ def ask_confident(
411425
:param wait: How long to wait (in seconds) for a confident answer.
412426
:type wait: float
413427
428+
:param metadata: A dictionary or JSON string of custom key/value metadata to associate with
429+
the image query (limited to 1KB). You can retrieve this metadata later by calling
430+
`get_image_query()`.
431+
:type metadata: dict or str
432+
414433
:return: ImageQuery
415434
:rtype: ImageQuery
416435
"""
@@ -421,13 +440,15 @@ def ask_confident(
421440
wait=wait,
422441
patience_time=wait,
423442
human_review=None,
443+
metadata=metadata,
424444
)
425445

426446
def ask_ml(
427447
self,
428448
detector: Union[Detector, str],
429449
image: Union[str, bytes, Image.Image, BytesIO, BufferedReader, np.ndarray],
430450
wait: Optional[float] = None,
451+
metadata: Union[dict, str, None] = None,
431452
) -> ImageQuery:
432453
"""
433454
Evaluates an image with Groundlight, getting the first answer Groundlight can provide.
@@ -448,13 +469,19 @@ def ask_ml(
448469
:param wait: How long to wait (in seconds) for any answer.
449470
:type wait: float
450471
472+
:param metadata: A dictionary or JSON string of custom key/value metadata to associate with
473+
the image query (limited to 1KB). You can retrieve this metadata later by calling
474+
`get_image_query()`.
475+
:type metadata: dict or str
476+
451477
:return: ImageQuery
452478
:rtype: ImageQuery
453479
"""
454480
iq = self.submit_image_query(
455481
detector,
456482
image,
457483
wait=0,
484+
metadata=metadata,
458485
)
459486
if iq_is_answered(iq):
460487
return iq
@@ -468,6 +495,7 @@ def ask_async( # noqa: PLR0913 # pylint: disable=too-many-arguments
468495
patience_time: Optional[float] = None,
469496
confidence_threshold: Optional[float] = None,
470497
human_review: Optional[str] = None,
498+
metadata: Union[dict, str, None] = None,
471499
) -> ImageQuery:
472500
"""
473501
Convenience method for submitting an `ImageQuery` asynchronously. This is equivalent to calling
@@ -509,6 +537,11 @@ def ask_async( # noqa: PLR0913 # pylint: disable=too-many-arguments
509537
this is the ID of the inspection to associate with the image query.
510538
:type inspection_id: str
511539
540+
:param metadata: A dictionary or JSON string of custom key/value metadata to associate with
541+
the image query (limited to 1KB). You can retrieve this metadata later by calling
542+
`get_image_query()`.
543+
:type metadata: dict or str
544+
512545
:return: ImageQuery
513546
:rtype: ImageQuery
514547
@@ -552,6 +585,7 @@ def ask_async( # noqa: PLR0913 # pylint: disable=too-many-arguments
552585
confidence_threshold=confidence_threshold,
553586
human_review=human_review,
554587
want_async=True,
588+
metadata=metadata,
555589
)
556590

557591
def wait_for_confident_result(

src/groundlight/encodings.py

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
import base64
2+
import json
3+
import sys
4+
from typing import Dict, Optional, Union
5+
6+
7+
def url_encode_dict(maybe_dict: Union[Dict, str], name: str, size_limit_bytes: Optional[int] = None) -> str:
8+
"""Encode a dictionary as a URL-safe, base64-encoded JSON string.
9+
10+
:param maybe_dict: The dictionary or JSON string to encode.
11+
:type maybe_dict: dict or str
12+
13+
:param name: The name of the dictionary, for use in the error message.
14+
:type name: str
15+
16+
:param size_limit_bytes: The maximum size of the dictionary, in bytes.
17+
If `None`, no size limit is enforced.
18+
:type size_limit_bytes: int or None
19+
20+
:raises TypeError: If `maybe_dict` is not a dictionary or JSON string.
21+
:raises ValueError: If `maybe_dict` is too large.
22+
23+
:return: The URL-safe, base64-encoded JSON string.
24+
:rtype: str
25+
"""
26+
original_type = type(maybe_dict)
27+
if isinstance(maybe_dict, str):
28+
try:
29+
# It's a little inefficient to parse the JSON string, just to re-encode it later. But it
30+
# allows us to check that we get a valid dictionary, and we remove any whitespace.
31+
maybe_dict = json.loads(maybe_dict)
32+
except json.JSONDecodeError as e:
33+
raise TypeError(f"`{name}` must be a dictionary or JSON string: got {original_type}") from e
34+
35+
if not isinstance(maybe_dict, dict):
36+
raise TypeError(f"`{name}` must be a dictionary or JSON string: got {original_type}")
37+
38+
data_json = json.dumps(maybe_dict)
39+
40+
if size_limit_bytes is not None:
41+
size_bytes = sys.getsizeof(data_json)
42+
if size_bytes > size_limit_bytes:
43+
raise ValueError(f"`{name}` is too large: {size_bytes} bytes > {size_limit_bytes} bytes limit.")
44+
45+
return base64.urlsafe_b64encode(data_json.encode("utf-8")).decode("utf-8")

0 commit comments

Comments
 (0)