Skip to content

Commit 7d6f475

Browse files
authored
Merge branch 'develop' into jobs_tags
2 parents 03cef2b + a34675e commit 7d6f475

21 files changed

+957
-121
lines changed

.github/workflows/run-unittests-default_setup.yml

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,14 @@ on:
44
pull_request:
55
branches:
66
- main
7-
- 'release/**'
7+
- "release/**"
88
- develop
99
paths:
10-
- 'ads/**'
10+
- "ads/**"
1111
- setup.py
12-
- '**requirements.txt'
12+
- "**requirements.txt"
13+
- .github/workflows/run-unittests.yml
14+
- .github/workflows/run-unittests-default_setup.yml
1315

1416
# Cancel in progress workflows on pull_requests.
1517
# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
@@ -74,7 +76,6 @@ jobs:
7476
timeout-minutes: 15
7577
shell: bash
7678
env:
77-
NB_SESSION_COMPARTMENT_OCID: ocid1.compartment.oc1.<unique_ocid>
7879
NoDependency: True
7980
run: |
8081
set -x # print commands that are executed

.github/workflows/run-unittests.yml

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,14 @@ on:
44
pull_request:
55
branches:
66
- main
7-
- 'release/**'
7+
- "release/**"
88
- develop
99
paths:
10-
- 'ads/**'
10+
- "ads/**"
1111
- setup.py
12-
- '**requirements.txt'
12+
- "**requirements.txt"
13+
- .github/workflows/run-unittests.yml
14+
- .github/workflows/run-unittests-default_setup.yml
1315

1416
# Cancel in progress workflows on pull_requests.
1517
# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
@@ -85,22 +87,21 @@ jobs:
8587
shell: bash
8688
run: |
8789
set -x # print commands that are executed
88-
90+
8991
sudo apt-get install libkrb5-dev graphviz
9092
$CONDA/bin/conda init
9193
source /home/runner/.bashrc
92-
94+
9395
pip install -r dev-requirements.txt
9496
9597
- name: "Run unitary tests folder with maximum ADS dependencies"
9698
timeout-minutes: 60
9799
shell: bash
98100
env:
99-
NB_SESSION_COMPARTMENT_OCID: ocid1.compartment.oc1.<unique_ocid>
100101
CONDA_PREFIX: /usr/share/miniconda
101102
run: |
102103
set -x # print commands that are executed
103-
104+
104105
# Setup project and tests folder for cov reports to not be overwritten by another parallel step
105106
if [[ ! -z "${{ matrix.cov-reports }}" ]]; then
106107
mkdir -p cov-${{ matrix.name }}
@@ -109,7 +110,7 @@ jobs:
109110
ln -s ../ads ads
110111
ln -s ../.coveragerc .coveragerc
111112
fi
112-
113+
113114
# Run tests
114115
python -m pytest -v -p no:warnings --durations=5 \
115116
-n auto --dist loadfile ${{ matrix.cov-reports }} \
@@ -145,18 +146,18 @@ jobs:
145146
- name: "Calculate overall coverage"
146147
run: |
147148
set -x # print commands that are executed
148-
149+
149150
# Prepare default cov body text
150151
COV_BODY_INTRO="📌 Overall coverage:\n\n"
151152
echo COV_BODY="$COV_BODY_INTRO No success to gather report. 😿" >> $GITHUB_ENV
152-
153+
153154
# Combine coverage files
154155
pip install coverage
155156
coverage combine cov-reports-unitary/.coverage cov-reports-model/.coverage
156-
157+
157158
# Make html report
158159
coverage html
159-
160+
160161
# Calculate overall coverage and update body message
161162
COV=$(grep -E 'pc_cov' htmlcov/index.html | cut -d'>' -f 2 | cut -d'%' -f 1)
162163
if [[ ! -z $COV ]]; then
@@ -168,18 +169,18 @@ jobs:
168169
if: always()
169170
run: |
170171
set -x # print commands that are executed
171-
172+
172173
# Prepare default diff body text
173174
DIFF_BODY_INTRO="📌 Cov diff with **${{ env.COMPARE_BRANCH }}**:\n\n"
174175
echo DIFF_BODY="$DIFF_BODY_INTRO No success to gather report. 😿" >> $GITHUB_ENV
175-
176+
176177
# Prepare file paths to coverage xml files
177178
# Filenames taken from job.test last step with name - "Save coverage files"
178179
FILE1="cov-reports-unitary/coverage.xml"; [[ ! -f $FILE1 ]] && FILE1=""
179180
FILE2="cov-reports-model/coverage.xml"; [[ ! -f $FILE2 ]] && FILE2=""
180181
echo "FILE1=$FILE1" >> $GITHUB_ENV
181182
echo "FILE2=$FILE2" >> $GITHUB_ENV
182-
183+
183184
# Calculate coverage diff and update body message
184185
pip install diff_cover
185186
diff-cover $FILE1 $FILE2 --compare-branch=origin/${{ env.COMPARE_BRANCH }}

ads/model/deployment/model_deployment.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -863,9 +863,9 @@ def predict(
863863
and `json_input` required to be json serializable. If `auto_serialize_data` set
864864
to True, data will be serialized before sending to model deployment endpoint.
865865
model_name: str
866-
Defaults to None. When the `Inference_server="triton"`, the name of the model to invoke.
866+
Defaults to None. When the `inference_server="triton"`, the name of the model to invoke.
867867
model_version: str
868-
Defaults to None. When the `Inference_server="triton"`, the version of the model to invoke.
868+
Defaults to None. When the `inference_server="triton"`, the version of the model to invoke.
869869
kwargs:
870870
content_type: str
871871
Used to indicate the media type of the resource.

ads/opctl/backend/ads_model_deployment.py

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,16 @@
44
# Copyright (c) 2022, 2023 Oracle and/or its affiliates.
55
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
66

7+
import json
78
from typing import Dict
8-
from ads.common.auth import create_signer, AuthContext
9+
10+
from oci.data_science.models import ModelDeployment as OCIModelDeployment
11+
12+
import ads
13+
from ads.common.auth import AuthContext, create_signer
914
from ads.common.oci_client import OCIClientFactory
10-
from ads.opctl.backend.base import Backend
1115
from ads.model.deployment import ModelDeployment
16+
from ads.opctl.backend.base import Backend
1217

1318

1419
class ModelDeploymentBackend(Backend):
@@ -117,3 +122,16 @@ def watch(self) -> None:
117122
model_deployment.watch(
118123
log_type=log_type, interval=interval, log_filter=log_filter
119124
)
125+
126+
def predict(self) -> None:
127+
ocid = self.config["execution"].get("ocid")
128+
data = self.config["execution"].get("payload")
129+
model_name = self.config["execution"].get("model_name")
130+
model_version = self.config["execution"].get("model_version")
131+
with AuthContext(auth=self.auth_type, profile=self.profile):
132+
model_deployment = ModelDeployment.from_id(ocid)
133+
try:
134+
data = json.loads(data)
135+
except:
136+
pass
137+
print(model_deployment.predict(data=data, model_name=model_name, model_version=model_version))

ads/opctl/backend/base.py

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,19 +7,19 @@
77
from abc import abstractmethod
88
from typing import Dict
99

10-
from ads.common.auth import get_signer
10+
from ads.common.auth import create_signer
11+
1112

1213

1314
class Backend:
1415
"""Interface for backend"""
1516

1617
def __init__(self, config: Dict) -> None:
1718
self.config = config
18-
self.oci_auth = get_signer(
19-
config["execution"].get("oci_config", None),
20-
config["execution"].get("oci_profile", None),
21-
)
19+
self.auth_type = config["execution"].get("auth")
2220
self.profile = config["execution"].get("oci_profile", None)
21+
self.oci_config = config["execution"].get("oci_config", None)
22+
2323

2424
@abstractmethod
2525
def run(self) -> Dict:
@@ -91,3 +91,13 @@ def run_diagnostics(self):
9191
"""
9292
Implement Diagnostics check appropriate for the backend
9393
"""
94+
95+
def predict(self) -> None:
96+
"""
97+
Run model predict.
98+
99+
Returns
100+
-------
101+
None
102+
"""
103+
raise NotImplementedError("`predict` has not been implemented yet.")

0 commit comments

Comments
 (0)