Skip to content

Commit f545886

Browse files
chore(release): 0.5.1 (#189)
2 parents 5865e74 + 7b1ae60 commit f545886

21 files changed

+341
-36
lines changed

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -115,6 +115,7 @@ If you want to test this package on examples from this repo:
115115
```bash
116116
git clone git@github.com:artefactory/vertex-pipelines-deployer.git
117117
poetry install
118+
poetry shell # if you want to activate the virtual environment
118119
cd example
119120
```
120121
<!-- --8<-- [end:installation] -->

deployer/_templates/deployer.env.jinja

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,17 @@ GCP_REGION=
33

44
TAG=latest
55

6-
# Google Artifact Registry
7-
GAR_LOCATION= # Google Artifact Registry repo location
6+
# GOOGLE ARTIFACT REGISTRY
7+
# Google Artifact Registry repo location
8+
GAR_LOCATION=
9+
810
GAR_DOCKER_REPO_ID=
911
GAR_PIPELINES_REPO_ID=
1012
GAR_VERTEX_BASE_IMAGE_NAME=
1113

12-
# Vertex AI
13-
VERTEX_STAGING_BUCKET_NAME= # without gs://
14-
VERTEX_SERVICE_ACCOUNT= # full service account email
14+
# VERTEX AI
15+
# without gs://
16+
VERTEX_STAGING_BUCKET_NAME=
17+
18+
# full service account email
19+
VERTEX_SERVICE_ACCOUNT=

deployer/_templates/deployment/Dockerfile.jinja

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,10 @@ ENV VERTEX_SERVICE_ACCOUNT=${VERTEX_SERVICE_ACCOUNT}
1616

1717
WORKDIR /app
1818

19-
COPY deployer-requirements.txt .
19+
COPY requirements.txt .
20+
COPY requirements-vertex.txt .
2021
RUN python3 -m pip install --upgrade pip
21-
RUN python3 -m pip install -r deployer-requirements.txt
22+
RUN python3 -m pip install -r requirements-vertex.txt
2223

2324
ENV PYTHONPATH "${PYTHONPATH}:."
2425

deployer/_templates/requirements-vertex.txt.jinja

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,4 @@
55
# deploy
66
kfp
77
google-cloud-aiplatform
8-
vertex-deployer={{ deployer_version }}
8+
vertex-deployer=={{ deployer_version }}

deployer/cli.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -227,6 +227,15 @@ def deploy( # noqa: C901
227227
"Defaults to '{pipeline_name}-experiment'.",
228228
),
229229
] = None,
230+
run_name: Annotated[
231+
Optional[str],
232+
typer.Option(
233+
"--run-name",
234+
"-rn",
235+
help="The pipeline's run name. Displayed in the UI."
236+
"Defaults to '{pipeline_name}-{tags}-%Y%m%d%H%M%S'.",
237+
),
238+
] = None,
230239
skip_validation: Annotated[
231240
bool,
232241
typer.Option(
@@ -276,6 +285,7 @@ def deploy( # noqa: C901
276285
staging_bucket_name=vertex_settings.VERTEX_STAGING_BUCKET_NAME,
277286
service_account=vertex_settings.VERTEX_SERVICE_ACCOUNT,
278287
pipeline_name=pipeline_name,
288+
run_name=run_name,
279289
pipeline_func=pipeline_func,
280290
gar_location=vertex_settings.GAR_LOCATION,
281291
gar_repo_id=vertex_settings.GAR_PIPELINES_REPO_ID,

deployer/constants.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import re
12
from pathlib import Path
23

34
TEMPLATES_PATH = Path(__file__).parent / "_templates"
@@ -64,3 +65,5 @@
6465
"you can add the following flags to the deploy command if not set in your config:\n"
6566
"--schedule --cron=cron_expression --scheduler-timezone=IANA_time_zone\n"
6667
)
68+
69+
VALID_RUN_NAME_PATTERN = re.compile("^[a-z][-a-z0-9]{0,127}$", re.IGNORECASE)

deployer/init_deployer.py

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@
44
from jinja2 import Environment, FileSystemLoader, meta
55
from rich.tree import Tree
66

7-
from deployer import constants
87
from deployer.__init__ import __version__ as deployer_version
8+
from deployer.constants import INSTRUCTIONS, TEMPLATES_DEFAULT_STRUCTURE, TEMPLATES_PATH
99
from deployer.settings import (
1010
DeployerSettings,
1111
find_pyproject_toml,
@@ -72,7 +72,7 @@ def _create_file_from_template(path: Path, template_path: Path, **kwargs):
7272
)
7373
else:
7474
path.write_text(content)
75-
except (FileNotFoundError, KeyError, jinja2.TemplateError) as e:
75+
except (KeyError, jinja2.TemplateError, jinja2.TemplateNotFound) as e:
7676
raise TemplateFileCreationError(
7777
f"An error occurred while creating the file from template: {e}"
7878
) from e
@@ -85,9 +85,9 @@ def _generate_templates_mapping(
8585
):
8686
"""Generate the mapping of a list of templates to create and their variables."""
8787
templates_mapping = {}
88-
env = Environment(loader=FileSystemLoader(str(constants.TEMPLATES_PATH)), autoescape=True)
88+
env = Environment(loader=FileSystemLoader(str(TEMPLATES_PATH)), autoescape=True)
8989
for template, template_path in templates_dict.items():
90-
template_name = str(template_path.relative_to(constants.TEMPLATES_PATH))
90+
template_name = str(template_path.relative_to(TEMPLATES_PATH))
9191
template_source = env.loader.get_source(env, template_name)[0]
9292
parsed_content = env.parse(template_source)
9393
variables = meta.find_undeclared_variables(parsed_content)
@@ -110,12 +110,10 @@ def build_default_folder_structure(deployer_settings: DeployerSettings):
110110
"""Create the default folder structure for the Vertex Pipelines project."""
111111
vertex_folder_path = deployer_settings.vertex_folder_path
112112
dockerfile_path = vertex_folder_path / str(
113-
constants.TEMPLATES_DEFAULT_STRUCTURE["dockerfile"].relative_to(constants.TEMPLATES_PATH)
113+
TEMPLATES_DEFAULT_STRUCTURE["dockerfile"].relative_to(TEMPLATES_PATH)
114114
).replace(".jinja", "")
115115
cloud_build_path = vertex_folder_path / str(
116-
constants.TEMPLATES_DEFAULT_STRUCTURE["cloudbuild_local"].relative_to(
117-
constants.TEMPLATES_PATH
118-
)
116+
TEMPLATES_DEFAULT_STRUCTURE["cloudbuild_local"].relative_to(TEMPLATES_PATH)
119117
).replace(".jinja", "")
120118

121119
# Create the folder structure
@@ -130,7 +128,7 @@ def build_default_folder_structure(deployer_settings: DeployerSettings):
130128
}
131129

132130
templates_mapping = _generate_templates_mapping(
133-
constants.TEMPLATES_DEFAULT_STRUCTURE, mapping_variables, vertex_folder_path
131+
TEMPLATES_DEFAULT_STRUCTURE, mapping_variables, vertex_folder_path
134132
)
135133

136134
# Create the files
@@ -177,6 +175,4 @@ def show_commands(deployer_settings: DeployerSettings):
177175
vertex_folder_path = deployer_settings.vertex_folder_path
178176
build_base_image_path = vertex_folder_path / "deployment" / "build_base_image.sh"
179177

180-
console.print(
181-
constants.INSTRUCTIONS.format(build_base_image_path=build_base_image_path), style="blue"
182-
)
178+
console.print(INSTRUCTIONS.format(build_base_image_path=build_base_image_path), style="blue")

deployer/pipeline_deployer.py

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from __future__ import annotations
22

33
import os
4+
from datetime import datetime
45
from pathlib import Path
56
from typing import Callable, List, Optional
67

@@ -11,6 +12,7 @@
1112
from loguru import logger
1213
from requests import HTTPError
1314

15+
from deployer import constants
1416
from deployer.utils.exceptions import (
1517
MissingGoogleArtifactRegistryHostError,
1618
TagNotFoundError,
@@ -24,6 +26,7 @@ def __init__(
2426
self,
2527
pipeline_name: str,
2628
pipeline_func: Callable,
29+
run_name: Optional[str] = None,
2730
project_id: Optional[str] = None,
2831
region: Optional[str] = None,
2932
staging_bucket_name: Optional[str] = None,
@@ -39,6 +42,7 @@ def __init__(
3942
self.service_account = service_account
4043

4144
self.pipeline_name = pipeline_name
45+
self.run_name = run_name
4246
self.pipeline_func = pipeline_func
4347

4448
self.gar_location = gar_location
@@ -106,6 +110,26 @@ def _check_experiment_name(self, experiment_name: Optional[str] = None) -> str:
106110

107111
return experiment_name
108112

113+
def _check_run_name(self, tag: Optional[str] = None) -> None:
114+
"""Each run name (job_id) must be unique.
115+
We thus always add a timestamp to ensure uniqueness.
116+
"""
117+
now_str = datetime.now().strftime("%Y%m%d-%H%M%S")
118+
if self.run_name is None:
119+
self.run_name = f"{self.pipeline_name}"
120+
if tag:
121+
self.run_name += f"-{tag}"
122+
123+
self.run_name = self.run_name.replace("_", "-")
124+
self.run_name += f"-{now_str}"
125+
126+
if not constants.VALID_RUN_NAME_PATTERN.match(self.run_name):
127+
raise ValueError(
128+
f"Run name {self.run_name} does not match the pattern"
129+
f" {constants.VALID_RUN_NAME_PATTERN.pattern}"
130+
)
131+
logger.debug(f"run_name is: {self.run_name}")
132+
109133
def _create_pipeline_job(
110134
self,
111135
template_path: str,
@@ -139,6 +163,7 @@ def _create_pipeline_job(
139163
""" # noqa: E501
140164
job = aiplatform.PipelineJob(
141165
display_name=self.pipeline_name,
166+
job_id=self.run_name,
142167
template_path=template_path,
143168
pipeline_root=self.staging_bucket_uri,
144169
location=self.region,
@@ -210,7 +235,7 @@ def run(
210235
tag (str, optional): Tag of the pipeline template. Defaults to None.
211236
""" # noqa: E501
212237
experiment_name = self._check_experiment_name(experiment_name)
213-
238+
self._check_run_name(tag=tag)
214239
template_path = self._get_template_path(tag)
215240

216241
logger.debug(
@@ -238,7 +263,7 @@ def run(
238263
f"Encountered an error while linking your job {job.job_id}"
239264
f" with experiment {experiment_name}."
240265
" This is likely due to a bug in the AI Platform Pipelines client."
241-
" You job should be running anyway. Try to link it manually."
266+
" Your job should be running anyway. Try to link it manually."
242267
)
243268
else:
244269
raise e

deployer/settings.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ class _DeployerDeploySettings(CustomBaseModel):
3030
config_name: Optional[str] = None
3131
enable_caching: Optional[bool] = None
3232
experiment_name: Optional[str] = None
33+
run_name: Optional[str] = None
3334
skip_validation: bool = True
3435

3536

docs/CLI_REFERENCE.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,7 @@ $ vertex-deployer deploy [OPTIONS] PIPELINE_NAMES...
123123
* `-cn, --config-name TEXT`: Name of the json/py file with parameter values and input artifacts to use when running the pipeline. It must be in the pipeline config dir. e.g. `config_dev.json` for `./vertex/configs/{pipeline-name}/config_dev.json`.
124124
* `-ec, --enable-caching / -nec, --no-cache`: Whether to turn on caching for the run.If this is not set, defaults to the compile time settings, which are True for alltasks by default, while users may specify different caching options for individualtasks. If this is set, the setting applies to all tasks in the pipeline.Overrides the compile time settings. Defaults to None.
125125
* `-en, --experiment-name TEXT`: The name of the experiment to run the pipeline in.Defaults to '{pipeline_name}-experiment'.
126+
* `-rn, --run-name TEXT`: The pipeline's run name. Displayed in the UI.Defaults to '{pipeline_name}-{tags}-%Y%m%d%H%M%S'.
126127
* `-y, --skip-validation / -n, --no-skip`: Whether to continue without user validation of the settings. [default: skip-validation]
127128
* `--help`: Show this message and exit.
128129

example/README.md

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,3 +16,59 @@ git commit -m "first commit"
1616
git remote add origin "your_repo_url"
1717
git push -u origin master
1818
```
19+
20+
# Running the example
21+
22+
In this section we detail how to run basic commands in the example folder.
23+
24+
* Before the start, add this environment variable, so the pipelines are found: `export PYTHONPATH=.`
25+
26+
* You must also add the required environment variables in the [example.env](example.env) file.
27+
28+
## Check pipeline validity
29+
30+
The following command will check if your pipeline is valid (notably, that the pipeline can be compiled and the config files are correctly defined).
31+
32+
```bash
33+
vertex-deployer check dummy_pipeline
34+
```
35+
36+
## Build the custom image
37+
38+
To build and upload the custom image to Artifact Registry, you can use the following make command:
39+
40+
```bash
41+
export $(cat example.env | xargs)
42+
make build-base-image
43+
```
44+
45+
## Deploy the dummy pipeline via Cloud Build
46+
47+
For the `vertex-deployer deploy` command to work within cloudbuild (and not simply locally), you will need to give additional IAM rights, to the service account used in Cloud Build Jobs.
48+
\
49+
\
50+
By default, the service account used is the following:
51+
* `[PROJECT_NUMBER]@cloudbuild.gserviceaccount.com`
52+
53+
```bash
54+
export CLOUDBUILD_SERVICE_ACCOUNT = [PROJECT_NUMBER]@cloudbuild.gserviceaccount.com
55+
56+
gcloud projects add-iam-policy-binding ${PROJECT_ID} \
57+
--member="serviceAccount:${CLOUDBUILD_SERVICE_ACCOUNT}" \
58+
--role="roles/aiplatform.user"
59+
60+
gcloud projects add-iam-policy-binding ${PROJECT_ID} \
61+
--member="serviceAccount:${CLOUDBUILD_SERVICE_ACCOUNT}" \
62+
--role="roles/iam.serviceAccountUser"
63+
```
64+
65+
Once this is done, you can launch the make command.
66+
67+
If you do not modify the [cloudbuild_cd.yaml](cloudbuild.yaml) file, it should:
68+
- rebuild the base image
69+
- deploy a scheduled Vertex AI pipeline
70+
71+
```bash
72+
export $(cat example.env | xargs)
73+
make deploy-pipeline
74+
```

example/example.env

100644100755
Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,13 @@ GCP_REGION=europe-west1
33

44
TAG=latest
55

6-
# Google Artifact Registry
7-
GAR_LOCATION=europe-west1 # Google Artifact Registry repo location
8-
GAR_DOCKER_REPO_ID=demo_docker_repo
9-
GAR_PIPELINES_REPO_ID=demo_pipelines_repo
6+
# Google Artifact Registry - GAR
7+
GAR_LOCATION=europe-west1
8+
GAR_DOCKER_REPO_ID=demo-docker-repo
9+
GAR_PIPELINES_REPO_ID=demo-pipelines-repo
1010
GAR_VERTEX_BASE_IMAGE_NAME=demo_base_image
1111

1212
# Vertex AI
13-
VERTEX_STAGING_BUCKET_NAME=YOUR_VERTEX_STAGING_BUCKET_NAME # without gs://
14-
VERTEX_SERVICE_ACCOUNT=YOUR_VERTEX_SERVICE_ACCOUNT # full service account email
13+
VERTEX_STAGING_BUCKET_NAME=demo-vertex-staging-bucket
14+
VERTEX_SERVICE_ACCOUNT_NAME=demo-vertex-ai-sa
15+
VERTEX_SERVICE_ACCOUNT=demo-vertex-ai-sa@PROJECT_ID.iam.gserviceaccount.com

example/vertex/deployment/cloudbuild_cd.yaml

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,18 @@ steps:
1818
# schedule pipeline: compile, upload, schedule
1919
- name: '${_GAR_IMAGE_PATH}'
2020
entrypoint: 'bash'
21-
args: [
22-
'-c',
23-
'vertex-deployer -log DEBUG deploy dummy_pipeline --compile --upload --run -ec --tags ${_TAG} --schedule --delete-last-schedule --cron *-*-19-*-* --config-name config_test.json'
24-
]
21+
args:
22+
- '-c'
23+
- |
24+
vertex-deployer -log DEBUG deploy dummy_pipeline \
25+
--compile \
26+
--upload \
27+
--run \
28+
--enable-caching \
29+
--config-name config_test.json \
30+
--tags ${_TAG} \
31+
--schedule --delete-last-schedule --cron '*-*-19-*-*'
32+
2533
dir: '.'
2634
id: schedule-dummy-pipeline
2735
waitFor: ['build-base-image']

mkdocs.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ nav:
4949
- Basic Usage: usage.md
5050
- Advanced User Guide:
5151
- Vertex DevOps: advanced_user_guide.md
52-
- Undestand settings and configurations: configuration.md
52+
- Understand settings and configurations: configuration.md
5353
- CLI Reference: CLI_REFERENCE.md
5454
- Contributing: contributing.md
5555
- Changelog: changelog.md

tests/conftest.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
from pathlib import Path
2+
13
import kfp.dsl
24
import pytest
35
from kfp.dsl import Artifact, Input
@@ -20,3 +22,8 @@ def dummy_pipeline(name: str, artifact: Input[Artifact]) -> None:
2022
raise Exception("This is an exception.")
2123
except Exception as e:
2224
exception_traceback = e.__traceback__
25+
26+
27+
@pytest.fixture(scope="session")
28+
def templates_path_fixture():
29+
return Path("tests/unit_tests/input_files")

0 commit comments

Comments
 (0)