Skip to content

Commit 0c54a62

Browse files
committed
Merge remote-tracking branch 'origin/main' into feature/feature-store-int-fix
2 parents e62b61e + 34105b5 commit 0c54a62

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

47 files changed

+487
-339
lines changed

.github/workflows/run-unittests-py38-cov-report.yml

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -57,17 +57,9 @@ jobs:
5757
- uses: ./.github/workflows/set-dummy-conf
5858
name: "Test config setup"
5959

60-
- name: "Test env setup"
60+
- uses: ./.github/workflows/test-env-setup
61+
name: "Test env setup"
6162
timeout-minutes: 20
62-
shell: bash
63-
run: |
64-
set -x # print commands that are executed
65-
66-
sudo apt-get install libkrb5-dev graphviz
67-
$CONDA/bin/conda init
68-
source /home/runner/.bashrc
69-
70-
pip install -r dev-requirements.txt
7163

7264
- name: "Run unitary tests folder with maximum ADS dependencies"
7365
timeout-minutes: 60

.github/workflows/run-unittests-py39-py310.yml

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -58,17 +58,9 @@ jobs:
5858
- uses: ./.github/workflows/set-dummy-conf
5959
name: "Test config setup"
6060

61-
- name: "Test env setup"
61+
- uses: ./.github/workflows/test-env-setup
62+
name: "Test env setup"
6263
timeout-minutes: 20
63-
shell: bash
64-
run: |
65-
set -x # print commands that are executed
66-
67-
sudo apt-get install libkrb5-dev graphviz
68-
$CONDA/bin/conda init
69-
source /home/runner/.bashrc
70-
71-
pip install -r dev-requirements.txt
7264

7365
- name: "Run unitary tests folder with maximum ADS dependencies"
7466
timeout-minutes: 60
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
# This composite action is to avoid duplicating code across test actions.
2+
# Ref: https://docs.github.com/en/actions/creating-actions/creating-a-composite-action
3+
4+
name: "Test env setup"
5+
description: "Creates conda test env with optional dependencies to run all unit tests"
6+
runs:
7+
using: composite
8+
steps:
9+
- shell: bash
10+
run: |
11+
set -x # print commands that are executed
12+
13+
sudo apt update
14+
sudo apt-get install libkrb5-dev graphviz
15+
$CONDA/bin/conda init
16+
source /home/runner/.bashrc
17+
18+
pip install -r dev-requirements.txt

THIRD_PARTY_LICENSES.txt

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -247,6 +247,12 @@ psutil
247247
* Source code: https://github.com/giampaolo/psutil
248248
* Project home: https://github.com/giampaolo/psutil
249249

250+
py-cpuinfo
251+
* Copyright (c) 2014-2022 Matthew Brennan Jones <matthew.brennan.jones@gmail.com>
252+
* License: The MIT License (MIT)
253+
* Source code: https://github.com/workhorsy/py-cpuinfo
254+
* Project home: https://github.com/workhorsy/py-cpuinfo
255+
250256
pyspark
251257
* Copyright 2014 and onwards The Apache Software Foundation.
252258
* License: Apache-2.0 LICENSE

ads/common/oci_logging.py

Lines changed: 13 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -533,7 +533,7 @@ def _search_and_format(
533533
time_end: datetime.datetime = None,
534534
limit: int = LOG_RECORDS_LIMIT,
535535
sort_by: str = "datetime",
536-
sort_order: str = SortOrder.DESC,
536+
sort_order: str = SortOrder.ASC,
537537
log_filter: str = None,
538538
):
539539
"""Returns the formatted log records.
@@ -550,7 +550,7 @@ def _search_and_format(
550550
Maximum number of records to be returned.
551551
sort_by: (str, optional). Defaults to "datetime"
552552
The field for sorting the logs.
553-
sort_order: (str, optional). Defaults to "DESC".
553+
sort_order: (str, optional). Defaults to "ASC".
554554
The sort order for the log records. Can be "ASC" or "DESC".
555555
log_filter : (str, optional). Defaults to None.
556556
Expression for filtering the logs.
@@ -574,8 +574,8 @@ def _search_and_format(
574574
sort_order=sort_order,
575575
log_filter=log_filter,
576576
)
577-
logs = sorted((log.data for log in logs), key=lambda x: x.get("datetime"))
578-
logs = [log.get("logContent", {}) for log in logs]
577+
578+
logs = [log.data.get("logContent", {}) for log in logs]
579579
return [
580580
{
581581
"id": log.get("id"),
@@ -617,7 +617,7 @@ def tail(
617617
return self._search_and_format(
618618
source=source,
619619
limit=limit,
620-
sort_order=SortOrder.DESC,
620+
sort_order=SortOrder.ASC,
621621
time_start=time_start,
622622
log_filter=log_filter,
623623
)
@@ -808,7 +808,7 @@ def stream(
808808
logs = self._search_and_format(
809809
source=source,
810810
limit=None,
811-
sort_order=SortOrder.DESC,
811+
sort_order=SortOrder.ASC,
812812
time_start=time_start,
813813
log_filter=log_filter,
814814
)
@@ -858,7 +858,7 @@ def tail(
858858
self._search_and_format(
859859
source=source,
860860
limit=limit,
861-
sort_order=SortOrder.DESC,
861+
sort_order=SortOrder.ASC,
862862
time_start=time_start,
863863
log_filter=log_filter,
864864
)
@@ -927,7 +927,7 @@ def search(
927927
time_end: datetime.datetime = None,
928928
limit: int = None,
929929
sort_by: str = "datetime",
930-
sort_order: str = SortOrder.DESC,
930+
sort_order: str = SortOrder.ASC,
931931
log_filter: str = None,
932932
) -> List[oci.loggingsearch.models.SearchResult]:
933933
"""Searches raw logs.
@@ -951,7 +951,7 @@ def search(
951951
Defaults to "datetime"
952952
sort_order : str, optional.
953953
The sort order for the log records. Can be "ASC" or "DESC".
954-
Defaults to "DESC".
954+
Defaults to "ASC".
955955
log_filter : str, optional
956956
Expression for filtering the logs. This will be the WHERE clause of the query.
957957
Defaults to None.
@@ -979,7 +979,7 @@ def _search_and_format(
979979
time_end: datetime.datetime = None,
980980
limit: int = LOG_RECORDS_LIMIT,
981981
sort_by: str = "datetime",
982-
sort_order: str = SortOrder.DESC,
982+
sort_order: str = SortOrder.ASC,
983983
log_filter: str = None,
984984
need_format: bool = True,
985985
) -> List[Union[oci.loggingsearch.models.SearchResult, dict]]:
@@ -1005,7 +1005,7 @@ def _search_and_format(
10051005
Defaults to "datetime"
10061006
sort_order : str, optional.
10071007
The sort order for the log records. Can be "ASC" or "DESC".
1008-
Defaults to "DESC".
1008+
Defaults to "ASC".
10091009
log_filter : str, optional
10101010
Expression for filtering the logs. This will be the WHERE clause of the query.
10111011
Defaults to None.
@@ -1039,12 +1039,6 @@ def _search_and_format(
10391039
)
10401040
)
10411041

1042-
# _collect_logs returns a list of either dict or oci.loggingsearch.models.SearchResult
1043-
# objects based on `need_format` parameter, so below there are two cases for log sorting.
1044-
if need_format:
1045-
batch_logs.sort(key=lambda x: x.get("datetime"))
1046-
else:
1047-
batch_logs.sort(key=lambda x: x.data.get("datetime"))
10481042
if limit and len(batch_logs) > limit:
10491043
batch_logs = batch_logs[:limit]
10501044
return batch_logs
@@ -1057,7 +1051,7 @@ def _collect_logs(
10571051
time_end: datetime.datetime = None,
10581052
limit: int = LOG_RECORDS_LIMIT,
10591053
sort_by: str = "datetime",
1060-
sort_order: str = SortOrder.DESC,
1054+
sort_order: str = SortOrder.ASC,
10611055
log_filter: str = None,
10621056
need_format: bool = True,
10631057
) -> List[Union[oci.loggingsearch.models.SearchResult, dict]]:
@@ -1085,7 +1079,7 @@ def _collect_logs(
10851079
Defaults to "datetime"
10861080
sort_order : str, optional.
10871081
The sort order for the log records. Can be "ASC" or "DESC".
1088-
Defaults to "DESC".
1082+
Defaults to "ASC".
10891083
log_filter : str, optional
10901084
Expression for filtering the logs. This will be the WHERE clause of the query.
10911085
Defaults to None.

ads/common/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@
7171
MAX_DISPLAY_VALUES = 10
7272

7373
# par link of the index json file.
74-
PAR_LINK = "https://objectstorage.us-ashburn-1.oraclecloud.com/p/Ri7zFc_h91sxMdgnza9Qnqw3Ina8hf8wzDvEpAnUXMDOnUR1U1fpsaBUjUfgPgIq/n/ociodscdev/b/service-conda-packs/o/service_pack/index.json"
74+
PAR_LINK = "https://objectstorage.us-ashburn-1.oraclecloud.com/p/WyjtfVIG0uda-P3-2FmAfwaLlXYQZbvPZmfX1qg0-sbkwEQO6jpwabGr2hMDBmBp/n/ociodscdev/b/service-conda-packs/o/service_pack/index.json"
7575

7676
random_state = 42
7777
test_size = 0.3

ads/jobs/serializer.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,9 @@
1212
import yaml
1313
from ads.common.auth import default_signer
1414

15+
# Special type to represent the current enclosed class.
16+
# This type is used by factory class method or when a method returns ``self``.
1517
Self = TypeVar("Self", bound="Serializable")
16-
"""Special type to represent the current enclosed class.
17-
18-
This type is used by factory class method or when a method returns ``self``.
19-
"""
2018

2119

2220
class Serializable(ABC):
@@ -72,6 +70,14 @@ def _write_to_file(s: str, uri: str, **kwargs) -> None:
7270
"if you wish to overwrite."
7371
)
7472

73+
# Add default signer if the uri is an object storage uri, and
74+
# the user does not specify config or signer.
75+
if (
76+
uri.startswith("oci://")
77+
and "config" not in kwargs
78+
and "signer" not in kwargs
79+
):
80+
kwargs.update(default_signer())
7581
with fsspec.open(uri, "w", **kwargs) as f:
7682
f.write(s)
7783

ads/model/deployment/model_deployment.py

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ class LogNotConfiguredError(Exception): # pragma: no cover
8585
pass
8686

8787

88-
class ModelDeploymentFailedError(Exception): # pragma: no cover
88+
class ModelDeploymentPredictError(Exception): # pragma: no cover
8989
pass
9090

9191

@@ -607,11 +607,6 @@ def deploy(
607607
-------
608608
ModelDeployment
609609
The instance of ModelDeployment.
610-
611-
Raises
612-
------
613-
ModelDeploymentFailedError
614-
If model deployment fails to deploy
615610
"""
616611
create_model_deployment_details = (
617612
self._build_model_deployment_details()
@@ -626,11 +621,6 @@ def deploy(
626621
poll_interval=poll_interval,
627622
)
628623

629-
if response.lifecycle_state == State.FAILED.name:
630-
raise ModelDeploymentFailedError(
631-
f"Model deployment {response.id} failed to deploy: {response.lifecycle_details}"
632-
)
633-
634624
return self._update_from_oci_model(response)
635625

636626
def delete(
@@ -662,6 +652,7 @@ def delete(
662652
max_wait_time=max_wait_time,
663653
poll_interval=poll_interval,
664654
)
655+
665656
return self._update_from_oci_model(response)
666657

667658
def update(
@@ -890,6 +881,12 @@ def predict(
890881
Prediction results.
891882
892883
"""
884+
current_state = self.sync().lifecycle_state
885+
if current_state != State.ACTIVE.name:
886+
raise ModelDeploymentPredictError(
887+
"This model deployment is not in active state, you will not be able to use predict end point. "
888+
f"Current model deployment state: {current_state} "
889+
)
893890
endpoint = f"{self.url}/predict"
894891
signer = authutil.default_signer()["signer"]
895892
header = {
@@ -953,7 +950,7 @@ def predict(
953950
except oci.exceptions.ServiceError as ex:
954951
# When bandwidth exceeds the allocated value, TooManyRequests error (429) will be raised by oci backend.
955952
if ex.status == 429:
956-
bandwidth_mbps = self.infrastructure.bandwidth_mbps or MODEL_DEPLOYMENT_BANDWIDTH_MBPS
953+
bandwidth_mbps = self.infrastructure.bandwidth_mbps or DEFAULT_BANDWIDTH_MBPS
957954
utils.get_logger().warning(
958955
f"Load balancer bandwidth exceeds the allocated {bandwidth_mbps} Mbps."
959956
"To estimate the actual bandwidth, use formula: (payload size in KB) * (estimated requests per second) * 8 / 1024."

ads/model/generic_model.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1575,7 +1575,10 @@ def from_model_deployment(
15751575

15761576
current_state = model_deployment.state.name.upper()
15771577
if current_state != ModelDeploymentState.ACTIVE.name:
1578-
raise NotActiveDeploymentError(current_state)
1578+
logger.warning(
1579+
"This model deployment is not in active state, you will not be able to use predict end point. "
1580+
f"Current model deployment state: `{current_state}`"
1581+
)
15791582

15801583
model = cls.from_model_catalog(
15811584
model_id=model_deployment.properties.model_id,

ads/model/runtime/utils.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def _get_index_json_through_bucket(
111111
bucketname: str
112112
The Object Storage bucketname.
113113
auth: (Dict, optional). Defaults to None.
114-
The default authetication is set using `ads.set_auth` API. If you need to override the
114+
The default authentication is set using `ads.set_auth` API. If you need to override the
115115
default, use the `ads.common.auth.api_keys` or `ads.common.auth.resource_principal` to create appropriate
116116
authentication signer and kwargs required to instantiate IdentityClient object.
117117
@@ -147,7 +147,7 @@ def get_service_packs(
147147
bucketname: str
148148
bucketname of the service pack.
149149
auth: (Dict, optional). Defaults to None.
150-
The default authetication is set using `ads.set_auth` API. If you need to override the
150+
The default authentication is set using `ads.set_auth` API. If you need to override the
151151
default, use the `ads.common.auth.api_keys` or `ads.common.auth.resource_principal` to create appropriate
152152
authentication signer and kwargs required to instantiate IdentityClient object.
153153

0 commit comments

Comments
 (0)