Skip to content

Update azure-healthinsights-radiologyinsights SDK from local TypeSpec #41951

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@
"azure.healthinsights.radiologyinsights.models.ScoringAndAssessmentCategoryType": "AzureHealthInsights.ScoringAndAssessmentCategoryType",
"azure.healthinsights.radiologyinsights.models.JobStatus": "AzureHealthInsights.JobStatus",
"azure.healthinsights.radiologyinsights.RadiologyInsightsClient.begin_infer_radiology_insights": "ClientForAzureHealthInsights.RadiologyInsightsClient.inferRadiologyInsights",
"azure.healthinsights.radiologyinsights.aio.RadiologyInsightsClient.begin_infer_radiology_insights": "ClientForAzureHealthInsights.RadiologyInsightsClient.inferRadiologyInsights"
"azure.healthinsights.radiologyinsights.aio.RadiologyInsightsClient.begin_infer_radiology_insights": "ClientForAzureHealthInsights.RadiologyInsightsClient.inferRadiologyInsights",
"azure.healthinsights.radiologyinsights.RadiologyInsightsClient.begin_custom_inference": "ClientForAzureHealthInsights.RadiologyInsightsClient.customInference",
"azure.healthinsights.radiologyinsights.aio.RadiologyInsightsClient.begin_custom_inference": "ClientForAzureHealthInsights.RadiologyInsightsClient.customInference"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@
from azure.core.rest import HttpRequest, HttpResponse

from ._configuration import RadiologyInsightsClientConfiguration
from ._operations import RadiologyInsightsClientOperationsMixin
from ._operations._operations import _RadiologyInsightsClientOperationsMixin
from ._utils.serialization import Deserializer, Serializer

if TYPE_CHECKING:
from azure.core.credentials import TokenCredential


class RadiologyInsightsClient(RadiologyInsightsClientOperationsMixin):
class RadiologyInsightsClient(_RadiologyInsightsClientOperationsMixin):
"""RadiologyInsightsClient.

:param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,11 @@
if TYPE_CHECKING:
from ._patch import * # pylint: disable=unused-wildcard-import

from ._operations import RadiologyInsightsClientOperationsMixin # type: ignore

from ._patch import __all__ as _patch_all
from ._patch import *
from ._patch import patch_sdk as _patch_sdk

__all__ = [
"RadiologyInsightsClientOperationsMixin",
]
__all__ = []
__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# Code generated by Microsoft (R) Python Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from collections.abc import MutableMapping
from collections.abc import MutableMapping # pylint: disable=import-error
from io import IOBase
import json
from typing import Any, Callable, Dict, IO, Iterator, List, Optional, TypeVar, Union, cast, overload
Expand Down Expand Up @@ -35,6 +35,7 @@
from .._utils.utils import ClientMixinABC

JSON = MutableMapping[str, Any]
_Unset: Any = object()
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]

Expand Down Expand Up @@ -73,7 +74,29 @@ def build_radiology_insights_infer_radiology_insights_request( # pylint: disabl
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)


class RadiologyInsightsClientOperationsMixin(
def build_radiology_insights_custom_inference_request(**kwargs: Any) -> HttpRequest: # pylint: disable=name-too-long
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})

content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-10-01"))
accept = _headers.pop("Accept", "application/json")

# Construct URL
_url = "/radiology-insights/custom"

# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")

# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")

return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)


class _RadiologyInsightsClientOperationsMixin(
ClientMixinABC[PipelineClient[HttpRequest, HttpResponse], RadiologyInsightsClientConfiguration]
):

Expand Down Expand Up @@ -321,3 +344,217 @@ def get_long_running_output(pipeline_response):
return LROPoller[_models.RadiologyInsightsJob](
self._client, raw_result, get_long_running_output, polling_method # type: ignore
)

def _custom_inference_initial(
self,
body: Union[JSON, IO[bytes]] = _Unset,
*,
inference_data: _models.RadiologyInsightsData = _Unset,
model_id: Optional[List[str]] = None,
**kwargs: Any
) -> Iterator[bytes]:
error_map: MutableMapping = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})

_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}

content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None)

if body is _Unset:
if inference_data is _Unset:
raise TypeError("missing required argument: inference_data")
body = {"inferenceData": inference_data, "modelId": model_id}
body = {k: v for k, v in body.items() if v is not None}
content_type = content_type or "application/json"
_content = None
if isinstance(body, (IOBase, bytes)):
_content = body
else:
_content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore

_request = build_radiology_insights_custom_inference_request(
content_type=content_type,
api_version=self._config.api_version,
content=_content,
headers=_headers,
params=_params,
)
path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
_request.url = self._client.format_url(_request.url, **path_format_arguments)

_stream = True
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
_request, stream=_stream, **kwargs
)

response = pipeline_response.http_response

if response.status_code not in [202]:
try:
response.read() # Load the body in memory and close the socket
except (StreamConsumedError, StreamClosedError):
pass
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response)

response_headers = {}
response_headers["Operation-Location"] = self._deserialize("str", response.headers.get("Operation-Location"))

deserialized = response.iter_bytes()

if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore

return deserialized # type: ignore

@overload
def begin_custom_inference(
self,
*,
inference_data: _models.RadiologyInsightsData,
content_type: str = "application/json",
model_id: Optional[List[str]] = None,
**kwargs: Any
) -> LROPoller[_models.RadiologyInsightsInferenceResult]:
"""Infer radiology insights using a custom model.

:keyword inference_data: Contains the list of patients, and configuration data. Required.
:paramtype inference_data: ~azure.healthinsights.radiologyinsights.models.RadiologyInsightsData
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword model_id: Models to be used for inference. If this is not specified, the model will
use the default model for inference. Default value is None.
:paramtype model_id: list[str]
:return: An instance of LROPoller that returns RadiologyInsightsInferenceResult. The
RadiologyInsightsInferenceResult is compatible with MutableMapping
:rtype:
~azure.core.polling.LROPoller[~azure.healthinsights.radiologyinsights.models.RadiologyInsightsInferenceResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""

@overload
def begin_custom_inference(
self, body: JSON, *, content_type: str = "application/json", **kwargs: Any
) -> LROPoller[_models.RadiologyInsightsInferenceResult]:
"""Infer radiology insights using a custom model.

:param body: Required.
:type body: JSON
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:return: An instance of LROPoller that returns RadiologyInsightsInferenceResult. The
RadiologyInsightsInferenceResult is compatible with MutableMapping
:rtype:
~azure.core.polling.LROPoller[~azure.healthinsights.radiologyinsights.models.RadiologyInsightsInferenceResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""

@overload
def begin_custom_inference(
self, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any
) -> LROPoller[_models.RadiologyInsightsInferenceResult]:
"""Infer radiology insights using a custom model.

:param body: Required.
:type body: IO[bytes]
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:return: An instance of LROPoller that returns RadiologyInsightsInferenceResult. The
RadiologyInsightsInferenceResult is compatible with MutableMapping
:rtype:
~azure.core.polling.LROPoller[~azure.healthinsights.radiologyinsights.models.RadiologyInsightsInferenceResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""

@distributed_trace
def begin_custom_inference(
self,
body: Union[JSON, IO[bytes]] = _Unset,
*,
inference_data: _models.RadiologyInsightsData = _Unset,
model_id: Optional[List[str]] = None,
**kwargs: Any
) -> LROPoller[_models.RadiologyInsightsInferenceResult]:
"""Infer radiology insights using a custom model.

:param body: Is either a JSON type or a IO[bytes] type. Required.
:type body: JSON or IO[bytes]
:keyword inference_data: Contains the list of patients, and configuration data. Required.
:paramtype inference_data: ~azure.healthinsights.radiologyinsights.models.RadiologyInsightsData
:keyword model_id: Models to be used for inference. If this is not specified, the model will
use the default model for inference. Default value is None.
:paramtype model_id: list[str]
:return: An instance of LROPoller that returns RadiologyInsightsInferenceResult. The
RadiologyInsightsInferenceResult is compatible with MutableMapping
:rtype:
~azure.core.polling.LROPoller[~azure.healthinsights.radiologyinsights.models.RadiologyInsightsInferenceResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = kwargs.pop("params", {}) or {}

content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.RadiologyInsightsInferenceResult] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._custom_inference_initial(
body=body,
inference_data=inference_data,
model_id=model_id,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
raw_result.http_response.read() # type: ignore
kwargs.pop("error_map", None)

def get_long_running_output(pipeline_response):
response_headers = {}
response = pipeline_response.http_response
response_headers["Operation-Location"] = self._deserialize(
"str", response.headers.get("Operation-Location")
)

deserialized = _deserialize(_models.RadiologyInsightsInferenceResult, response.json().get("result", {}))
if cls:
return cls(pipeline_response, deserialized, response_headers) # type: ignore
return deserialized

path_format_arguments = {
"endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}

if polling is True:
polling_method: PollingMethod = cast(
PollingMethod, LROBasePolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
)
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller[_models.RadiologyInsightsInferenceResult].from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller[_models.RadiologyInsightsInferenceResult](
self._client, raw_result, get_long_running_output, polling_method # type: ignore
)
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from datetime import datetime, date, time, timedelta, timezone
from json import JSONEncoder
import xml.etree.ElementTree as ET
from collections.abc import MutableMapping
from collections.abc import MutableMapping # pylint: disable=import-error
from typing_extensions import Self
import isodate
from azure.core.exceptions import DeserializationError
Expand Down Expand Up @@ -641,7 +641,7 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self:
cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items())
cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")

return super().__new__(cls)
return super().__new__(cls) # pylint: disable=no-value-for-parameter

def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None:
for base in cls.__bases__:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@

from .._utils.serialization import Deserializer, Serializer
from ._configuration import RadiologyInsightsClientConfiguration
from ._operations import RadiologyInsightsClientOperationsMixin
from ._operations._operations import _RadiologyInsightsClientOperationsMixin

if TYPE_CHECKING:
from azure.core.credentials_async import AsyncTokenCredential


class RadiologyInsightsClient(RadiologyInsightsClientOperationsMixin):
class RadiologyInsightsClient(_RadiologyInsightsClientOperationsMixin):
"""RadiologyInsightsClient.

:param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,11 @@
if TYPE_CHECKING:
from ._patch import * # pylint: disable=unused-wildcard-import

from ._operations import RadiologyInsightsClientOperationsMixin # type: ignore

from ._patch import __all__ as _patch_all
from ._patch import *
from ._patch import patch_sdk as _patch_sdk

__all__ = [
"RadiologyInsightsClientOperationsMixin",
]
__all__ = []
__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore
_patch_sdk()
Loading