Skip to content

Commit 83dc9fc

Browse files
committed
added test cases and PR review comments
1 parent ba43d62 commit 83dc9fc

File tree

4 files changed

+79
-114
lines changed

4 files changed

+79
-114
lines changed

ads/aqua/modeldeployment/deployment.py

Lines changed: 9 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -763,7 +763,6 @@ def _create_deployment(
763763
target=self.get_deployment_status,
764764
args=(
765765
deployment,
766-
deployment_id,
767766
deployment.dsc_model_deployment.workflow_req_id,
768767
model_type,
769768
model_name,
@@ -1269,8 +1268,7 @@ def list_shapes(self, **kwargs) -> List[ComputeShapeSummary]:
12691268

12701269
def get_deployment_status(
12711270
self,
1272-
deployment,
1273-
model_deployment_id: str,
1271+
deployment: ModelDeployment,
12741272
work_request_id: str,
12751273
model_type: str,
12761274
model_name: str,
@@ -1292,27 +1290,23 @@ def get_deployment_status(
12921290
AquaDeployment
12931291
An Aqua deployment instance.
12941292
"""
1295-
ocid = get_ocid_substring(model_deployment_id, key_len=8)
1296-
1293+
ocid = get_ocid_substring(deployment.id, key_len=8)
12971294
data_science_work_request: DataScienceWorkRequest = DataScienceWorkRequest(
12981295
work_request_id
12991296
)
1300-
13011297
try:
13021298
data_science_work_request.wait_work_request(
13031299
progress_bar_description="Creating model deployment",
13041300
max_wait_time=DEFAULT_WAIT_TIME,
13051301
poll_interval=DEFAULT_POLL_INTERVAL,
13061302
)
1307-
except Exception as e:
1303+
except Exception:
13081304
status = ""
1309-
predict_logs = deployment.tail_logs("predict")
1310-
access_logs = deployment.tail_logs("access")
1311-
if access_logs and len(access_logs) > 0:
1312-
status = access_logs[0]["message"]
1305+
logs = deployment.show_logs().sort_values(by="time", ascending=False)
1306+
1307+
if logs and len(logs) > 0:
1308+
status = logs.iloc[0]["message"]
13131309

1314-
if predict_logs and len(predict_logs) > 0:
1315-
status += predict_logs[0]["message"]
13161310
status = re.sub(r"[^a-zA-Z0-9]", " ", status)
13171311

13181312
if data_science_work_request._error_message:
@@ -1324,7 +1318,8 @@ def get_deployment_status(
13241318
telemetry_kwargs = {
13251319
"ocid": ocid,
13261320
"model_name": model_name,
1327-
"status": error_str + " " + status,
1321+
"work_request_error": error_str,
1322+
"status": status,
13281323
}
13291324

13301325
self.telemetry.record_event(
@@ -1333,7 +1328,6 @@ def get_deployment_status(
13331328
**telemetry_kwargs,
13341329
)
13351330
else:
1336-
status = +" " + str(e)
13371331
telemetry_kwargs = {
13381332
"ocid": ocid,
13391333
"model_name": model_name,

ads/common/oci_logging.py

Lines changed: 0 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -863,47 +863,6 @@ def tail(
863863
)
864864
self._print(sorted(tail_logs, key=lambda log: log["time"]))
865865

866-
def get_tail_logs(
867-
self,
868-
source: str = None,
869-
limit: int = LOG_RECORDS_LIMIT,
870-
time_start: datetime.datetime = None,
871-
log_filter: str = None,
872-
) -> List[Union[oci.loggingsearch.models.SearchResult, dict]]:
873-
"""Returns the most recent consolidated log records.
874-
875-
Parameters
876-
----------
877-
source : str, optional
878-
Expression or OCID to filter the "source" field of the OCI log record.
879-
Defaults to None.
880-
limit : int, optional.
881-
Maximum number of records to be returned.
882-
If limit is set to None, all logs from time_start to now will be returned.
883-
Defaults to 100.
884-
time_start : datetime.datetime, optional
885-
Starting time for the log query.
886-
Defaults to None.
887-
log_filter : str, optional
888-
Expression for filtering the logs. This will be the WHERE clause of the query.
889-
Defaults to None.
890-
891-
Returns
892-
-------
893-
list
894-
A list of oci.loggingsearch.models.SearchResult objects or log records sorted in descending order by time
895-
Each log record is a dictionary with the following keys: `annotation`, `id`, `time`,
896-
`message` and `datetime`.
897-
"""
898-
tail_logs = self._search_and_format(
899-
source=source,
900-
limit=limit,
901-
sort_order=SortOrder.DESC,
902-
time_start=time_start,
903-
log_filter=log_filter,
904-
)
905-
return sorted(tail_logs, key=lambda log: log["time"])
906-
907866
def head(
908867
self,
909868
source: str = None,

ads/model/deployment/model_deployment.py

Lines changed: 0 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -729,44 +729,6 @@ def update(
729729

730730
return self._update_from_oci_model(response)
731731

732-
def tail_logs(
733-
self, log_type: str = None, time_start: datetime = None, log_filter: str = None
734-
) -> List[Union[oci.loggingsearch.models.SearchResult, dict]]:
735-
"""Returns the most recent consolidated log records for the model deployment
736-
737-
Parameters
738-
----------
739-
source : str, optional
740-
Expression or OCID to filter the "source" field of the OCI log record.
741-
Defaults to None.
742-
limit : int, optional.
743-
Maximum number of records to be returned.
744-
If limit is set to None, all logs from time_start to now will be returned.
745-
Defaults to 100.
746-
time_start : datetime.datetime, optional
747-
Starting time for the log query.
748-
Defaults to None.
749-
log_filter : str, optional
750-
Expression for filtering the logs. This will be the WHERE clause of the query.
751-
Defaults to None.
752-
753-
Returns
754-
-------
755-
list
756-
A list of oci.loggingsearch.models.SearchResult objects or log records sorted in descending order by time
757-
Each log record is a dictionary with the following keys: `annotation`, `id`, `time`,
758-
`message` and `datetime`.
759-
"""
760-
try:
761-
logs = self.logs(log_type).get_tail_logs(
762-
source=self.model_deployment_id,
763-
time_start=time_start,
764-
log_filter=log_filter,
765-
)
766-
return logs
767-
except LogNotConfiguredError:
768-
return []
769-
770732
def watch(
771733
self,
772734
log_type: str = None,

tests/unitary/with_extras/aqua/test_deployment.py

Lines changed: 70 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -2368,34 +2368,84 @@ def test_validate_multimodel_deployment_feasibility_positive_single(
23682368
"test_data/deployment/aqua_summary_multi_model_single.json",
23692369
)
23702370

2371-
def test_get_deployment_status(self):
2371+
def test_get_deployment_status_success(self):
23722372
model_deployment = copy.deepcopy(TestDataset.model_deployment_object[0])
23732373
deployment_id = "fakeid.datasciencemodeldeployment.oc1.iad.xxx"
23742374
work_request_id = "fakeid.workrequest.oc1.iad.xxx"
23752375
model_type = "custom"
23762376
model_name = "model_name"
23772377

23782378
with patch(
2379-
"ads.model.service.oci_datascience_model_deployment.DataScienceWorkRequest.__init__"
2380-
) as mock_ds_work_request:
2381-
mock_ds_work_request.return_value = None
2382-
with patch(
2383-
"ads.model.service.oci_datascience_model_deployment.DataScienceWorkRequest.wait_work_request"
2384-
) as mock_wait:
2385-
self.app.get_deployment_status(
2386-
model_deployment,
2387-
deployment_id,
2388-
work_request_id,
2389-
model_type,
2390-
model_name,
2391-
)
2379+
"ads.model.service.oci_datascience_model_deployment.DataScienceWorkRequest.__init__",
2380+
return_value=None,
2381+
) as mock_ds_work_request, patch(
2382+
"ads.model.service.oci_datascience_model_deployment.DataScienceWorkRequest.wait_work_request"
2383+
) as mock_wait:
2384+
self.app.get_deployment_status(
2385+
oci.data_science.models.ModelDeploymentSummary(**model_deployment),
2386+
work_request_id,
2387+
model_type,
2388+
model_name,
2389+
)
23922390

2393-
mock_ds_work_request.assert_called_with(work_request_id)
2394-
mock_wait.assert_called_with(
2395-
progress_bar_description="Creating model deployment",
2396-
max_wait_time=DEFAULT_WAIT_TIME,
2397-
poll_interval=DEFAULT_POLL_INTERVAL,
2398-
)
2391+
mock_ds_work_request.assert_called_once_with(work_request_id)
2392+
mock_wait.assert_called_once_with(
2393+
progress_bar_description="Creating model deployment",
2394+
max_wait_time=DEFAULT_WAIT_TIME,
2395+
poll_interval=DEFAULT_POLL_INTERVAL,
2396+
)
2397+
2398+
def raise_exception(*args, **kwargs):
2399+
raise Exception("Work request failed")
2400+
2401+
def test_get_deployment_status_failed(self):
2402+
model_deployment = copy.deepcopy(TestDataset.model_deployment_object[0])
2403+
deployment_id = "fakeid.datasciencemodeldeployment.oc1.iad.xxx"
2404+
work_request_id = "fakeid.workrequest.oc1.iad.xxx"
2405+
model_type = "custom"
2406+
model_name = "model_name"
2407+
with patch(
2408+
"ads.telemetry.client.TelemetryClient.record_event"
2409+
) as mock_record_event, patch(
2410+
"ads.aqua.modeldeployment.deployment.DataScienceWorkRequest"
2411+
) as mock_ds_work_request_class, patch(
2412+
"ads.model.deployment.model_deployment.ModelDeployment.show_logs"
2413+
) as mock_show_log:
2414+
mock_ds_work_request_instance = MagicMock()
2415+
mock_ds_work_request_class.return_value = mock_ds_work_request_instance
2416+
2417+
mock_ds_work_request_instance._error_message = [
2418+
MagicMock(message="Some error occurred")
2419+
]
2420+
2421+
mock_ds_work_request_instance.wait_work_request.side_effect = (
2422+
self.raise_exception
2423+
)
2424+
2425+
logs_df = MagicMock()
2426+
logs_df.sort_values.return_value = logs_df
2427+
logs_df.empty = False
2428+
logs_df.iloc.__getitem__.return_value = {
2429+
"message": "Error: deployment failed!"
2430+
}
2431+
mock_show_log.return_value = logs_df
2432+
2433+
self.app.get_deployment_status(
2434+
ModelDeployment(),
2435+
work_request_id,
2436+
model_type,
2437+
model_name,
2438+
)
2439+
mock_record_event.assert_called_once()
2440+
args, kwargs = mock_record_event.call_args
2441+
self.assertEqual(kwargs["category"], f"aqua/{model_type}/deployment/status")
2442+
self.assertEqual(kwargs["action"], "FAILED")
2443+
self.assertIn("work_request_error", kwargs)
2444+
self.assertIn("status", kwargs)
2445+
self.assertIn("ocid", kwargs)
2446+
self.assertIn("model_name", kwargs)
2447+
2448+
mock_ds_work_request_class.assert_called_once_with(work_request_id)
23992449

24002450

24012451
class TestBaseModelSpec:

0 commit comments

Comments
 (0)