Skip to content

Commit 3b6a692

Browse files
committed
Updated docs.
1 parent 19f10bc commit 3b6a692

File tree

3 files changed

+48
-16
lines changed

3 files changed

+48
-16
lines changed

ads/jobs/builders/infrastructure/dataflow.py

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -802,18 +802,8 @@ def with_defined_tag(self, **kwargs) -> "DataFlow":
802802
"""
803803
return self.set_spec(self.CONST_DEFINED_TAGS, kwargs)
804804

805-
@property
806-
def freeform_tags(self) -> dict:
807-
"""Freeform tags"""
808-
return self.get_spec(self.CONST_FREEFORM_TAGS, {})
809-
810-
@property
811-
def defined_tags(self) -> dict:
812-
"""Defined tags"""
813-
return self.get_spec(self.CONST_DEFINED_TAGS, {})
814-
815805
def __getattr__(self, item):
816-
if f"with_{item}" in self.__dir__():
806+
if f"with_{item}" in self.__dir__() or (item == self.CONST_DEFINED_TAGS or self.CONST_FREEFORM_TAGS):
817807
return self.get_spec(item)
818808
raise AttributeError(f"Attribute {item} not found.")
819809

docs/source/user_guide/apachespark/dataflow.rst

Lines changed: 29 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -207,6 +207,8 @@ You can set them using the ``with_{property}`` functions:
207207
- ``with_spark_version``
208208
- ``with_warehouse_bucket_uri``
209209
- ``with_private_endpoint_id`` (`doc <https://docs.oracle.com/en-us/iaas/data-flow/using/pe-allowing.htm#pe-allowing>`__)
210+
- ``with_defined_tags``
211+
- ``with_freeform_tags``
210212

211213
For more details, see `Data Flow class documentation <https://docs.oracle.com/en-us/iaas/tools/ads-sdk/latest/ads.jobs.html#module-ads.jobs.builders.infrastructure.dataflow>`__.
212214

@@ -229,10 +231,10 @@ create applications.
229231

230232
In the following "hello-world" example, ``DataFlow`` is populated with ``compartment_id``,
231233
``driver_shape``, ``driver_shape_config``, ``executor_shape``, ``executor_shape_config``
232-
and ``spark_version``. ``DataFlowRuntime`` is populated with ``script_uri`` and
233-
``script_bucket``. The ``script_uri`` specifies the path to the script. It can be
234-
local or remote (an Object Storage path). If the path is local, then
235-
``script_bucket`` must be specified additionally because Data Flow
234+
, ``spark_version``, ``defined_tags`` and ``freeform_tags``. ``DataFlowRuntime`` is
235+
populated with ``script_uri`` and ``script_bucket``. The ``script_uri`` specifies the
236+
path to the script. It can be local or remote (an Object Storage path). If the path
237+
is local, then ``script_bucket`` must be specified additionally because Data Flow
236238
requires a script to be available in Object Storage. ADS
237239
performs the upload step for you, as long as you give the bucket name
238240
or the Object Storage path prefix to upload the script. Either can be
@@ -272,6 +274,10 @@ accepted. In the next example, the prefix is given for ``script_bucket``.
272274
.with_executor_shape("VM.Standard.E4.Flex")
273275
.with_executor_shape_config(ocpus=4, memory_in_gbs=64)
274276
.with_spark_version("3.0.2")
277+
.with_defined_tag(
278+
**{"Oracle-Tags": {"CreatedBy": "test_name@oracle.com"}}
279+
)
280+
.with_freeform_tag(test_freeform_key="test_freeform_value")
275281
)
276282
runtime_config = (
277283
DataFlowRuntime()
@@ -393,6 +399,10 @@ In the next example, ``archive_uri`` is given as an Object Storage location.
393399
"spark.driverEnv.myEnvVariable": "value1",
394400
"spark.executorEnv.myEnvVariable": "value2",
395401
})
402+
.with_defined_tag(
403+
**{"Oracle-Tags": {"CreatedBy": "test_name@oracle.com"}}
404+
)
405+
.with_freeform_tag(test_freeform_key="test_freeform_value")
396406
)
397407
runtime_config = (
398408
DataFlowRuntime()
@@ -566,6 +576,11 @@ into the ``Job.from_yaml()`` function to build a Data Flow job:
566576
numExecutors: 1
567577
sparkVersion: 3.2.1
568578
privateEndpointId: <private_endpoint_ocid>
579+
definedTags:
580+
Oracle-Tags:
581+
CreatedBy: test_name@oracle.com
582+
freeformTags:
583+
test_freeform_key: test_freeform_value
569584
type: dataFlow
570585
name: dataflow_app_name
571586
runtime:
@@ -647,6 +662,12 @@ into the ``Job.from_yaml()`` function to build a Data Flow job:
647662
configuration:
648663
required: false
649664
type: dict
665+
definedTags:
666+
required: false
667+
type: dict
668+
freeformTags:
669+
required: false
670+
type: dict
650671
type:
651672
allowed:
652673
- dataFlow
@@ -694,7 +715,10 @@ into the ``Job.from_yaml()`` function to build a Data Flow job:
694715
configuration:
695716
required: false
696717
type: dict
697-
freeform_tag:
718+
definedTags:
719+
required: false
720+
type: dict
721+
freeformTags:
698722
required: false
699723
type: dict
700724
scriptBucket:

docs/source/user_guide/apachespark/quickstart.rst

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,10 @@ followed by the spark version, 3.2.1.
4545
.with_executor_shape("VM.Standard.E4.Flex")
4646
.with_executor_shape_config(ocpus=4, memory_in_gbs=64)
4747
.with_spark_version("3.2.1")
48+
.with_defined_tag(
49+
**{"Oracle-Tags": {"CreatedBy": "test_name@oracle.com"}}
50+
)
51+
.with_freeform_tag(test_freeform_key="test_freeform_value")
4852
)
4953
runtime_config = (
5054
DataFlowRuntime()
@@ -95,6 +99,11 @@ Assuming you have the following two files written in your current directory as `
9599
memory_in_gbs: 64
96100
sparkVersion: 3.2.1
97101
numExecutors: 1
102+
definedTags:
103+
Oracle-Tags:
104+
CreatedBy: test_name@oracle.com
105+
freeformTags:
106+
test_freeform_key: test_freeform_value
98107
type: dataFlow
99108
runtime:
100109
kind: runtime
@@ -185,6 +194,10 @@ From a Python Environment
185194
.with_executor_shape("VM.Standard.E4.Flex")
186195
.with_executor_shape_config(ocpus=4, memory_in_gbs=64)
187196
.with_spark_version("3.2.1")
197+
.with_defined_tag(
198+
**{"Oracle-Tags": {"CreatedBy": "test_name@oracle.com"}}
199+
)
200+
.with_freeform_tag(test_freeform_key="test_freeform_value")
188201
)
189202
runtime_config = (
190203
DataFlowRuntime()
@@ -275,6 +288,11 @@ Again, assume you have the following two files written in your current directory
275288
memory_in_gbs: 64
276289
sparkVersion: 3.2.1
277290
numExecutors: 1
291+
definedTags:
292+
Oracle-Tags:
293+
CreatedBy: test_name@oracle.com
294+
freeformTags:
295+
test_freeform_key: test_freeform_value
278296
type: dataFlow
279297
runtime:
280298
kind: runtime

0 commit comments

Comments
 (0)