Skip to content

Commit 3392e87

Browse files
committed
Add support for Minio, refactor for clarity
1 parent 63ced37 commit 3392e87

File tree

2 files changed

+138
-44
lines changed

2 files changed

+138
-44
lines changed

README.md

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,25 @@
11
# Cloud archive
22

33
This `WORKSPACE` rule for Google Bazel lets you securely download private
4-
dependencies from S3.
4+
workspace dependencies from S3 or Minio.
55

66
## Requirements
77

88
This currently only works on Linux, although adapting it to macOS and Windows
9-
shouldn't be difficult. AWS CLI is required to be in the path, and must be set
9+
shouldn't be difficult.
10+
11+
### S3
12+
13+
AWS CLI is required to be in the path for S3 support, and must be set
1014
up such that you can download files from the buckets referenced in the rules
1115
with `aws s3 cp`. `--profile` flag is also supported, for people who use
1216
multiple profiles.
1317

18+
### Minio
19+
20+
Likewise for Minio, `mc` command should be in the path, and Minio should be set
21+
up such that `mc cp` is able to download the referenced files.
22+
1423
## Usage
1524

1625
Please refer to `WORKSPACE` file in this repository for an example of how to

cloud_archive.bzl

Lines changed: 127 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1,57 +1,142 @@
1-
# This rule will download an archive from S3, check sha256, extract it, and
2-
# symlink the provided BUILD file inside.
1+
""" This rule will download an archive from S3, check sha256, extract it, and
2+
symlink the provided BUILD file inside. """
33

44
# License: Apache 2.0
5+
# Provenance: https://github.com/1e100/cloud_archive
56

7+
def validate_checksum(repo_ctx, url, local_path, expected_sha256):
8+
# Verify checksum
9+
sha256_path = repo_ctx.which("sha256sum")
10+
repo_ctx.report_progress("Checksumming {}.".format(local_path))
11+
sha256_result = repo_ctx.execute([sha256_path, local_path])
12+
if sha256_result.return_code != 0:
13+
fail("Failed to verify checksum: {}".format(sha256_result.stderr))
14+
sha256 = sha256_result.stdout.split(" ")[0]
15+
if sha256 != expected_sha256:
16+
fail("Checksum mismatch for {}, expected {}, got {}.".format(
17+
url,
18+
expected_sha256,
19+
sha256,
20+
))
621

7-
def _s3_archive_impl(ctx):
8-
url = "s3://{}/{}".format(ctx.attr.bucket, ctx.attr.file_path)
9-
filename = ctx.path(url).basename
22+
def extract_archive(repo_ctx, local_path, strip_prefix, build_file, build_file_contents):
23+
# Extract the downloaded archive.
24+
repo_ctx.extract(local_path, stripPrefix = strip_prefix)
25+
26+
# Provide external BUILD file if requested; `build_file_contents` takes
27+
# priority.
28+
bash_path = repo_ctx.os.environ.get("BAZEL_SH", "bash")
29+
if build_file_contents:
30+
repo_ctx.execute([bash_path, "-c", "rm -f BUILD BUILD.bazel"])
31+
repo_ctx.file("BUILD.bazel", build_file_contents, executable = False)
32+
elif build_file:
33+
repo_ctx.execute([bash_path, "-c", "rm -f BUILD BUILD.bazel"])
34+
repo_ctx.symlink(build_file, "BUILD.bazel")
35+
36+
def s3_archive_download(
37+
repo_ctx,
38+
s3_bucket,
39+
s3_file_path,
40+
expected_sha256,
41+
strip_prefix = "",
42+
build_file = "",
43+
build_file_contents = "",
44+
aws_profile = None):
45+
""" Securely downloads and unpacks an archive from S3, then places a
46+
BUILD file inside. """
47+
url = "s3://{}/{}".format(s3_bucket, s3_file_path)
48+
filename = repo_ctx.path(url).basename
1049

1150
# Download
12-
aws_cli_path = ctx.which("aws")
51+
aws_cli_path = repo_ctx.which("aws")
1352
profile_flags = []
14-
if ctx.attr.aws_profile:
15-
profile_flags = ["--profile", ctx.attr.aws_profile]
53+
if aws_profile:
54+
profile_flags = ["--profile", aws_profile]
1655
aws_cli_cmd = [aws_cli_path] + profile_flags + ["s3", "cp", url, "."]
17-
s3_result = ctx.execute(aws_cli_cmd)
56+
repo_ctx.report_progress("Downloading {}.".format(url))
57+
s3_result = repo_ctx.execute(aws_cli_cmd, timeout = 1800)
1858
if s3_result.return_code != 0:
1959
fail("Failed to download {} from S3: {}".format(url, s3_result.stderr))
2060

21-
# Verify checksum
22-
sha256_path = ctx.which("sha256sum")
23-
sha256_result = ctx.execute([sha256_path, filename])
24-
if sha256_result.return_code != 0:
25-
fail("Failed to verify checksum: {}".format(sha256_result.stderr))
26-
sha256 = sha256_result.stdout.split(" ")[0]
27-
if sha256 != ctx.attr.sha256:
28-
fail("Checksum mismatch for {}, expected {}, got {}.".format(
29-
url, ctx.attr.sha256, sha256))
61+
validate_checksum(repo_ctx, url, filename, expected_sha256)
62+
extract_archive(repo_ctx, filename, strip_prefix, build_file, build_file_contents)
3063

31-
# Extract the downloaded archive.
32-
ctx.extract(filename, stripPrefix=ctx.attr.strip_prefix)
64+
def _s3_archive_impl(ctx):
65+
s3_archive_download(
66+
ctx,
67+
ctx.attr.bucket,
68+
ctx.attr.file_path,
69+
ctx.attr.sha256,
70+
strip_prefix = ctx.attr.strip_prefix,
71+
build_file = ctx.attr.build_file,
72+
build_file_contents = ctx.attr.build_file_contents,
73+
aws_profile = ctx.attr.aws_profile,
74+
)
3375

34-
# Provide external BUILD file if requested.
35-
bash_path = ctx.os.environ.get("BAZEL_SH", "bash")
36-
if ctx.attr.build_file:
37-
ctx.execute([bash_path, "-c", "rm -f BUILD BUILD.bazel"])
38-
ctx.symlink(ctx.attr.build_file, "BUILD.bazel")
76+
s3_archive = repository_rule(
77+
implementation = _s3_archive_impl,
78+
attrs = {
79+
"bucket": attr.string(mandatory = True, doc = "S3 bucket name"),
80+
"file_path": attr.string(
81+
mandatory = True,
82+
doc = "Relative path to the archive file within the bucket",
83+
),
84+
"aws_profile": attr.string(doc = "AWS profile to use for authentication"),
85+
"sha256": attr.string(mandatory = True, doc = "SHA256 checksum of the archive"),
86+
"build_file": attr.label(
87+
allow_single_file = True,
88+
doc = "BUILD file for the unpacked archive",
89+
),
90+
"build_file_contents": attr.string(doc = "The contents of the build file for the target"),
91+
"strip_prefix": attr.string(doc = "Prefix to strip when archive is unpacked"),
92+
},
93+
)
3994

95+
def minio_archive_download(
96+
repo_ctx,
97+
file_path,
98+
expected_sha256,
99+
strip_prefix = "",
100+
build_file = "",
101+
build_file_contents = ""):
102+
""" Securely downloads and unpacks an archive from Minio, then places a
103+
BUILD file inside. """
104+
filename = repo_ctx.path(file_path).basename
40105

41-
s3_archive = repository_rule(
42-
implementation=_s3_archive_impl, attrs={
43-
"bucket":
44-
attr.string(mandatory=True, doc="S3 bucket name"),
45-
"file_path":
46-
attr.string(mandatory=True,
47-
doc="Relative path to the archive file within the bucket"),
48-
"aws_profile":
49-
attr.string(doc="AWS profile to use for authentication"),
50-
"sha256":
51-
attr.string(mandatory=True, doc="SHA256 checksum of the archive"),
52-
"build_file":
53-
attr.label(allow_single_file=True,
54-
doc="BUILD file for the unpacked archive"),
55-
"strip_prefix":
56-
attr.string(doc="Prefix to strip when archive is unpacked"),
57-
})
106+
# Download
107+
minio_cli_path = repo_ctx.which("mc")
108+
minio_cli_cmd = [minio_cli_path] + ["cp", "-q", file_path, "."]
109+
repo_ctx.report_progress("Downloading {}.".format(file_path))
110+
minio_result = repo_ctx.execute(minio_cli_cmd, timeout = 1800)
111+
if minio_result.return_code != 0:
112+
fail("Failed to download {} from Minio: {}".format(file_path, minio_result.stderr))
113+
114+
validate_checksum(repo_ctx, file_path, filename, expected_sha256)
115+
extract_archive(repo_ctx, filename, strip_prefix, build_file, build_file_contents)
116+
117+
def _minio_archive_impl(ctx):
118+
minio_archive_download(
119+
ctx,
120+
ctx.attr.file_path,
121+
ctx.attr.sha256,
122+
strip_prefix = ctx.attr.strip_prefix,
123+
build_file = ctx.attr.build_file,
124+
build_file_contents = ctx.attr.build_file_contents,
125+
)
126+
127+
minio_archive = repository_rule(
128+
implementation = _minio_archive_impl,
129+
attrs = {
130+
"file_path": attr.string(
131+
mandatory = True,
132+
doc = "Path to the file on minio. Backend needs to be set up locally for this to work.",
133+
),
134+
"sha256": attr.string(mandatory = True, doc = "SHA256 checksum of the archive"),
135+
"build_file": attr.label(
136+
allow_single_file = True,
137+
doc = "BUILD file for the unpacked archive",
138+
),
139+
"build_file_contents": attr.string(doc = "The contents of the build file for the target"),
140+
"strip_prefix": attr.string(doc = "Prefix to strip when archive is unpacked"),
141+
},
142+
)

0 commit comments

Comments
 (0)