Skip to content

Update Radiology Insights SDK from local TypeSpec #41963

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
"azure.healthinsights.radiologyinsights.models.QualityMeasureOptions": "AzureHealthInsights.QualityMeasureOptions",
"azure.healthinsights.radiologyinsights.models.Quantity": "Fhir.R4.Quantity",
"azure.healthinsights.radiologyinsights.models.RadiologyCodeWithTypes": "AzureHealthInsights.RadiologyCodeWithTypes",
"azure.healthinsights.radiologyinsights.models.RadiologyInsightsCustomInferenceResult": "AzureHealthInsights.RadiologyInsightsCustomInferenceResponse",
"azure.healthinsights.radiologyinsights.models.RadiologyInsightsData": "AzureHealthInsights.RadiologyInsightsData",
"azure.healthinsights.radiologyinsights.models.RadiologyInsightsInferenceOptions": "AzureHealthInsights.RadiologyInsightsInferenceOptions",
"azure.healthinsights.radiologyinsights.models.RadiologyInsightsInferenceResult": "AzureHealthInsights.RadiologyInsightsInferenceResult",
Expand Down Expand Up @@ -83,6 +84,8 @@
"azure.healthinsights.radiologyinsights.models.ScoringAndAssessmentCategoryType": "AzureHealthInsights.ScoringAndAssessmentCategoryType",
"azure.healthinsights.radiologyinsights.models.JobStatus": "AzureHealthInsights.JobStatus",
"azure.healthinsights.radiologyinsights.RadiologyInsightsClient.begin_infer_radiology_insights": "ClientForAzureHealthInsights.RadiologyInsightsClient.inferRadiologyInsights",
"azure.healthinsights.radiologyinsights.aio.RadiologyInsightsClient.begin_infer_radiology_insights": "ClientForAzureHealthInsights.RadiologyInsightsClient.inferRadiologyInsights"
"azure.healthinsights.radiologyinsights.aio.RadiologyInsightsClient.begin_infer_radiology_insights": "ClientForAzureHealthInsights.RadiologyInsightsClient.inferRadiologyInsights",
"azure.healthinsights.radiologyinsights.RadiologyInsightsClient.begin_custom_inference": "ClientForAzureHealthInsights.RadiologyInsightsClient.inferFromCustomModelId",
"azure.healthinsights.radiologyinsights.aio.RadiologyInsightsClient.begin_custom_inference": "ClientForAzureHealthInsights.RadiologyInsightsClient.inferFromCustomModelId"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@
from azure.core.rest import HttpRequest, HttpResponse

from ._configuration import RadiologyInsightsClientConfiguration
from ._operations import RadiologyInsightsClientOperationsMixin
from ._operations._operations import _RadiologyInsightsClientOperationsMixin
from ._utils.serialization import Deserializer, Serializer

if TYPE_CHECKING:
from azure.core.credentials import TokenCredential


class RadiologyInsightsClient(RadiologyInsightsClientOperationsMixin):
class RadiologyInsightsClient(_RadiologyInsightsClientOperationsMixin):
"""RadiologyInsightsClient.

:param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,11 @@
if TYPE_CHECKING:
from ._patch import * # pylint: disable=unused-wildcard-import

from ._operations import RadiologyInsightsClientOperationsMixin # type: ignore

from ._patch import __all__ as _patch_all
from ._patch import *
from ._patch import patch_sdk as _patch_sdk

__all__ = [
"RadiologyInsightsClientOperationsMixin",
]
__all__ = []
__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
# pylint: disable=line-too-long,useless-suppression
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) Python Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from collections.abc import MutableMapping
try:
from collections.abc import MutableMapping
except ImportError:
from collections import MutableMapping # pylint: disable=deprecated-class
from io import IOBase
import json
from typing import Any, Callable, Dict, IO, Iterator, List, Optional, TypeVar, Union, cast, overload
Expand Down Expand Up @@ -35,6 +39,7 @@
from .._utils.utils import ClientMixinABC

JSON = MutableMapping[str, Any]
_Unset: Any = object()
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]

Expand Down Expand Up @@ -73,7 +78,31 @@ def build_radiology_insights_infer_radiology_insights_request( # pylint: disabl
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)


class RadiologyInsightsClientOperationsMixin(
def build_radiology_insights_begin_custom_inference_request( # pylint: disable=name-too-long
**kwargs: Any,
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})

content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01"))
accept = _headers.pop("Accept", "application/json")

# Construct URL
_url = "/radiology-insights/custom"

# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")

# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")

return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)


class _RadiologyInsightsClientOperationsMixin(
ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], RadiologyInsightsClientConfiguration]
):

Expand All @@ -83,7 +112,7 @@ def _infer_radiology_insights_initial(
resource: Union[_models.RadiologyInsightsJob, JSON, IO[bytes]],
*,
expand: Optional[List[str]] = None,
**kwargs: Any
**kwargs: Any,
) -> Iterator[bytes]:
error_map: MutableMapping = {
401: ClientAuthenticationError,
Expand Down Expand Up @@ -155,7 +184,7 @@ def begin_infer_radiology_insights(
*,
expand: Optional[List[str]] = None,
content_type: str = "application/json",
**kwargs: Any
**kwargs: Any,
) -> LROPoller[_models.RadiologyInsightsJob]:
"""Create Radiology Insights job.

Expand Down Expand Up @@ -185,7 +214,7 @@ def begin_infer_radiology_insights(
*,
expand: Optional[List[str]] = None,
content_type: str = "application/json",
**kwargs: Any
**kwargs: Any,
) -> LROPoller[_models.RadiologyInsightsJob]:
"""Create Radiology Insights job.

Expand Down Expand Up @@ -215,7 +244,7 @@ def begin_infer_radiology_insights(
*,
expand: Optional[List[str]] = None,
content_type: str = "application/json",
**kwargs: Any
**kwargs: Any,
) -> LROPoller[_models.RadiologyInsightsJob]:
"""Create Radiology Insights job.

Expand Down Expand Up @@ -244,7 +273,7 @@ def begin_infer_radiology_insights(
resource: Union[_models.RadiologyInsightsJob, JSON, IO[bytes]],
*,
expand: Optional[List[str]] = None,
**kwargs: Any
**kwargs: Any,
) -> LROPoller[_models.RadiologyInsightsJob]:
"""Create Radiology Insights job.

Expand Down Expand Up @@ -281,7 +310,7 @@ def begin_infer_radiology_insights(
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
**kwargs,
)
raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)
Expand Down Expand Up @@ -321,3 +350,219 @@ def get_long_running_output(pipeline_response):
return LROPoller[_models.RadiologyInsightsJob](
self._client, raw_result, get_long_running_output, polling_method # type: ignore
)

def _begin_custom_inference_initial(
self,
body: Union[JSON, IO[bytes]] = _Unset,
*,
inference_data: _models.RadiologyInsightsData = _Unset,
custom_model_ids: Optional[List[str]] = None,
**kwargs: Any,
) -> Iterator[bytes]:
error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})

_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}

content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)

if body is _Unset:
if inference_data is _Unset:
raise TypeError("missing required argument: inference_data")
body = {"id": custom_model_ids, "inferenceData": inference_data}
body = {k: v for k, v in body.items() if v is not None}
content_type = content_type or "application/json"
_content = None
if isinstance(body, (IOBase, bytes)):
_content = body
else:
_content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore

_request = build_radiology_insights_begin_custom_inference_request(
content_type=content_type,
api_version=self._config.api_version,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)

_stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)

response = pipeline_response.http_response

if response.status_code not in [202]:
try:
response.read() # Load the body in memory and close the socket
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)

response_headers = {}
response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location"))

deserialized = response.iter_bytes()

if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore

return deserialized # type: ignore

@overload
def begin_custom_inference(
self,
*,
inference_data: _models.RadiologyInsightsData,
content_type: str = "application/json",
custom_model_ids: Optional[List[str]] = None,
**kwargs: Any,
) -> LROPoller[_models.RadiologyInsightsCustomInferenceResult]:
"""Infer radiology insights using a custom model.

:keyword inference_data: Contains the list of patients, and configuration data. Required.
:paramtype inference_data: ~azure.healthinsights.radiologyinsights.models.RadiologyInsightsData
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword custom_model_ids: Models to be used for inference. If this is not specified, the model
will use the default model for inference. Default value is None.
:paramtype custom_model_ids: list[str]
:return: An instance of LROPoller that returns RadiologyInsightsCustomInferenceResult. The
RadiologyInsightsCustomInferenceResult is compatible with MutableMapping
:rtype:
~azure.core.polling.LROPoller[~azure.healthinsights.radiologyinsights.models.RadiologyInsightsCustomInferenceResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""

@overload
def begin_custom_inference(
self, body: JSON, *, content_type: str = "application/json", **kwargs: Any
) -> LROPoller[_models.RadiologyInsightsCustomInferenceResult]:
"""Infer radiology insights using a custom model.

:param body: Required.
:type body: JSON
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:return: An instance of LROPoller that returns RadiologyInsightsCustomInferenceResult. The
RadiologyInsightsCustomInferenceResult is compatible with MutableMapping
:rtype:
~azure.core.polling.LROPoller[~azure.healthinsights.radiologyinsights.models.RadiologyInsightsCustomInferenceResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""

@overload
def begin_custom_inference(
self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any
) -> LROPoller[_models.RadiologyInsightsCustomInferenceResult]:
"""Infer radiology insights using a custom model.

:param body: Required.
:type body: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:return: An instance of LROPoller that returns RadiologyInsightsCustomInferenceResult. The
RadiologyInsightsCustomInferenceResult is compatible with MutableMapping
:rtype:
~azure.core.polling.LROPoller[~azure.healthinsights.radiologyinsights.models.RadiologyInsightsCustomInferenceResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""

@distributed_trace
def begin_custom_inference(
self,
body: Union[JSON, IO[bytes]] = _Unset,
*,
inference_data: _models.RadiologyInsightsData = _Unset,
custom_model_ids: Optional[List[str]] = None,
**kwargs: Any,
) -> LROPoller[_models.RadiologyInsightsCustomInferenceResult]:
"""Infer radiology insights using a custom model.

:param body: Is either a JSON type or a IO[bytes] type. Required.
:type body: JSON or IO[bytes]
:keyword inference_data: Contains the list of patients, and configuration data. Required.
:paramtype inference_data: ~azure.healthinsights.radiologyinsights.models.RadiologyInsightsData
:keyword custom_model_ids: Models to be used for inference. If this is not specified, the model
will use the default model for inference. Default value is None.
:paramtype custom_model_ids: list[str]
:return: An instance of LROPoller that returns RadiologyInsightsCustomInferenceResult. The
RadiologyInsightsCustomInferenceResult is compatible with MutableMapping
:rtype:
~azure.core.polling.LROPoller[~azure.healthinsights.radiologyinsights.models.RadiologyInsightsCustomInferenceResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}

content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.RadiologyInsightsCustomInferenceResult] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._begin_custom_inference_initial(
body=body,
inference_data=inference_data,
custom_model_ids=custom_model_ids,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs,
)
raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)

def get_long_running_output(pipeline_response):
response_headers = {}
response = pipeline_response.http_response
response_headers["Operation-Location"] = self._deserialize(
"str", response.headers.get("Operation-Location")
)

deserialized = _deserialize(
_models.RadiologyInsightsCustomInferenceResult, response.json().get("result", {})
)
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized

path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}

if polling is True:
polling_method: PollingMethod = cast(
PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
)
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller[_models.RadiologyInsightsCustomInferenceResult].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller[_models.RadiologyInsightsCustomInferenceResult](
self._client, raw_result, get_long_running_output, polling_method # type: ignore
)
Loading
Loading