Skip to content

Commit 0ebbef3

Browse files
committed
Merge branch 'feature/forecast_v2'
2 parents 87c31bd + f17b49b commit 0ebbef3

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

76 files changed

+6232
-2403
lines changed

.github/workflows/run-operators-unit-tests.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ jobs:
2626
test:
2727
name: python ${{ matrix.python-version }}
2828
runs-on: ubuntu-latest
29-
timeout-minutes: 60
29+
timeout-minutes: 180
3030

3131
strategy:
3232
fail-fast: false
@@ -49,7 +49,7 @@ jobs:
4949
name: "Test config setup"
5050

5151
- name: "Run Operators Tests"
52-
timeout-minutes: 60
52+
timeout-minutes: 180
5353
shell: bash
5454
run: |
5555
set -x # print commands that are executed

ads/common/auth.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -687,6 +687,13 @@ def create_signer(self) -> Dict:
687687
logger.info(f"Using 'resource_principal' authentication.")
688688
return signer_dict
689689

690+
@staticmethod
691+
def supported():
692+
return any(
693+
os.environ.get(var)
694+
for var in ['JOB_RUN_OCID', 'NB_SESSION_OCID', 'DATAFLOW_RUN_ID', 'PIPELINE_RUN_OCID']
695+
)
696+
690697

691698
class InstancePrincipal(AuthSignerGenerator):
692699
"""

ads/opctl/config/merger.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111

1212
import yaml
1313

14-
from ads.common.auth import AuthType
14+
from ads.common.auth import AuthType, ResourcePrincipal
1515
from ads.opctl import logger
1616
from ads.opctl.config.base import ConfigProcessor
1717
from ads.opctl.config.utils import read_from_ini, _DefaultNoneDict
@@ -115,7 +115,7 @@ def _fill_config_with_defaults(self, ads_config_path: str) -> None:
115115
)
116116
# set default auth
117117
if not self.config["execution"].get("auth", None):
118-
if is_in_notebook_session():
118+
if ResourcePrincipal.supported():
119119
self.config["execution"]["auth"] = (
120120
exec_config.get("auth") or AuthType.RESOURCE_PRINCIPAL
121121
)

ads/opctl/operator/__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,9 @@ def __registered_operators():
1414
return [
1515
f
1616
for f in os.listdir(target_dir)
17-
if os.path.isdir(os.path.join(target_dir, f)) and not f.startswith("__")
17+
if os.path.isdir(os.path.join(target_dir, f))
18+
and not f.startswith("__")
19+
and f != "common"
1820
]
1921

2022

ads/opctl/operator/cli.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,14 @@
99
import click
1010
import fsspec
1111
import yaml
12+
import logging
1213
from ads.opctl.operator.common.utils import default_signer
1314
from ads.common.auth import AuthType
1415
from ads.common.object_storage_details import ObjectStorageDetails
1516
from ads.opctl.constants import BACKEND_NAME, RUNTIME_TYPE
1617
from ads.opctl.decorator.common import click_options, with_auth, with_click_unknown_args
1718
from ads.opctl.utils import suppress_traceback
19+
from ads.opctl import logger
1820

1921
from .__init__ import __operators__
2022
from .cmd import run as cmd_run
@@ -311,10 +313,14 @@ def publish_conda(debug: bool, **kwargs: Dict[str, Any]) -> None:
311313
@click.pass_context
312314
@with_click_unknown_args
313315
@with_auth
314-
def run(ctx: click.core.Context, debug: bool, **kwargs: Dict[str, Any]) -> None:
316+
def run(ctx: click.core.Context, debug: bool = False, **kwargs: Dict[str, Any]) -> None:
315317
"""
316318
Runs the operator with the given specification on the targeted backend.
317319
"""
320+
if debug:
321+
logger.setLevel(logging.DEBUG)
322+
else:
323+
logger.setLevel(logging.CRITICAL)
318324
operator_spec = {}
319325
backend = kwargs.pop("backend")
320326

ads/opctl/operator/cmd.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@
4848
from .common.errors import (
4949
OperatorCondaNotFoundError,
5050
OperatorImageNotFoundError,
51-
OperatorSchemaYamlError,
51+
InvalidParameterError,
5252
)
5353
from .common.operator_loader import _operator_info_list
5454

@@ -167,7 +167,7 @@ def init(
167167
)
168168
else:
169169
overwrite = True
170-
output = os.path.join(tempfile.TemporaryDirectory().name, "")
170+
output = operator_utils.create_output_folder(name=type + "/")
171171

172172
# generating operator specification
173173
operator_config = {}
@@ -422,7 +422,7 @@ def verify(
422422
run_name="verify",
423423
)
424424
operator_module.get("verify")(config, **kwargs)
425-
except OperatorSchemaYamlError as ex:
425+
except InvalidParameterError as ex:
426426
logger.debug(ex)
427427
raise ValueError(
428428
f"The operator's specification is not valid for the `{operator_info.type}` operator. "

ads/opctl/operator/common/errors.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,9 @@
77
from ads.opctl.operator import __operators__
88

99

10-
class OperatorSchemaYamlError(Exception):
10+
class InvalidParameterError(Exception):
1111
"""Exception raised when there is an issue with the schema."""
12+
1213
def __init__(self, error: str):
1314
super().__init__(
1415
"Invalid operator specification. Check the YAML structure and ensure it "

ads/opctl/operator/common/operator_config.py

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,31 @@
88
import json
99
from abc import abstractmethod
1010
from dataclasses import dataclass
11-
from typing import Any, Dict
11+
from typing import Any, Dict, List
1212

1313
from ads.common.serializer import DataClassSerializable
1414

1515
from ads.opctl.operator.common.utils import OperatorValidator
16-
from ads.opctl.operator.common.errors import OperatorSchemaYamlError
16+
from ads.opctl.operator.common.errors import InvalidParameterError
17+
18+
@dataclass(repr=True)
19+
class InputData(DataClassSerializable):
20+
"""Class representing operator specification input data details."""
21+
22+
connect_args: Dict = None
23+
format: str = None
24+
columns: List[str] = None
25+
url: str = None
26+
filters: List[str] = None
27+
options: Dict = None
28+
limit: int = None
29+
sql: str = None
30+
table_name: str = None
31+
32+
33+
@dataclass(repr=True)
34+
class OutputDirectory(InputData):
35+
"""Class representing operator specification output directory details."""
1736

1837

1938
@dataclass(repr=True)
@@ -65,7 +84,7 @@ def _validate_dict(cls, obj_dict: Dict) -> bool:
6584
result = validator.validate(obj_dict)
6685

6786
if not result:
68-
raise OperatorSchemaYamlError(json.dumps(validator.errors, indent=2))
87+
raise InvalidParameterError(json.dumps(validator.errors, indent=2))
6988
return True
7089

7190
@classmethod

ads/opctl/operator/common/utils.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,22 @@ class OperatorValidator(Validator):
2929
pass
3030

3131

32+
def create_output_folder(name):
33+
output_folder = name
34+
protocol = fsspec.utils.get_protocol(output_folder)
35+
storage_options = {}
36+
if protocol != "file":
37+
storage_options = auth or default_signer()
38+
39+
fs = fsspec.filesystem(protocol, **storage_options)
40+
name_suffix = 1
41+
while fs.exists(output_folder):
42+
name_suffix = name_suffix + 1
43+
output_folder = f"{name}_{name_suffix}"
44+
fs.mkdirs(output_folder)
45+
return output_folder
46+
47+
3248
def _build_image(
3349
dockerfile: str,
3450
image_name: str,
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
type: anomaly
2+
version: v1
3+
conda_type: service
4+
name: Anomaly Detection Operator
5+
gpu: no
6+
keywords:
7+
- Anomaly Detection
8+
backends:
9+
- job
10+
- operator.local
11+
description: |
12+
Anomaly Detection is the identification of rare items, events, or observations in data that
13+
differ significantly from the expectation. This can be used for several scenarios like asset
14+
monitoring, maintenance and prognostic surveillance in industries such as utility,
15+
aviation and manufacturing.

0 commit comments

Comments
 (0)