Skip to content

Commit 093e79c

Browse files
committed
chore: Generated commit to update templated files since the last template run up to stackabletech/operator-templating@1789cc2
Reference-to: stackabletech/operator-templating@1789cc2 (Change UID of docker user)
1 parent cbb1255 commit 093e79c

File tree

5 files changed

+107
-31
lines changed

5 files changed

+107
-31
lines changed

.github/actionlint.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
self-hosted-runner:
3+
# Ubicloud machines we are using
4+
labels:
5+
- ubicloud-standard-8-arm

.github/workflows/build.yml

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -88,18 +88,18 @@ jobs:
8888
TRIGGER: ${{ github.event_name }}
8989
GITHUB_REF: ${{ github.ref }}
9090
run: |
91-
if [[ $TRIGGER == "pull_request" ]]; then
91+
if [[ "$TRIGGER" == "pull_request" ]]; then
9292
echo "exporting test as target helm repo: ${{ env.TEST_REPO_HELM_URL }}"
93-
echo "helm_repo=${{ env.TEST_REPO_HELM_URL }}" >> $GITHUB_OUTPUT
94-
elif [[ ( $TRIGGER == "push" || $TRIGGER == "schedule" || $TRIGGER == "workflow_dispatch" ) && $GITHUB_REF == "refs/heads/main" ]]; then
93+
echo "helm_repo=${{ env.TEST_REPO_HELM_URL }}" >> "$GITHUB_OUTPUT"
94+
elif [[ ( "$TRIGGER" == "push" || "$TRIGGER" == "schedule" || "$TRIGGER" == "workflow_dispatch" ) && "$GITHUB_REF" == "refs/heads/main" ]]; then
9595
echo "exporting dev as target helm repo: ${{ env.DEV_REPO_HELM_URL }}"
96-
echo "helm_repo=${{ env.DEV_REPO_HELM_URL }}" >> $GITHUB_OUTPUT
97-
elif [[ $TRIGGER == "push" && $GITHUB_REF == refs/tags/* ]]; then
96+
echo "helm_repo=${{ env.DEV_REPO_HELM_URL }}" >> "$GITHUB_OUTPUT"
97+
elif [[ "$TRIGGER" == "push" && $GITHUB_REF == refs/tags/* ]]; then
9898
echo "exporting stable as target helm repo: ${{ env.STABLE_REPO_HELM_URL }}"
99-
echo "helm_repo=${{ env.STABLE_REPO_HELM_URL }}" >> $GITHUB_OUTPUT
99+
echo "helm_repo=${{ env.STABLE_REPO_HELM_URL }}" >> "$GITHUB_OUTPUT"
100100
else
101101
echo "Unknown trigger and ref combination encountered, skipping publish step: $TRIGGER $GITHUB_REF"
102-
echo "helm_repo=skip" >> $GITHUB_OUTPUT
102+
echo "helm_repo=skip" >> "$GITHUB_OUTPUT"
103103
fi
104104
105105
run_cargodeny:
@@ -379,7 +379,7 @@ jobs:
379379
- id: printtag
380380
name: Output image name and tag
381381
if: ${{ !github.event.pull_request.head.repo.fork }}
382-
run: echo "IMAGE_TAG=$(make -e print-docker-tag)" >> $GITHUB_OUTPUT
382+
run: echo "IMAGE_TAG=$(make -e print-docker-tag)" >> "$GITHUB_OUTPUT"
383383

384384
create_manifest_list:
385385
name: Build and publish manifest list
@@ -437,4 +437,4 @@ jobs:
437437
ARCH_FOR_PREFLIGHT="$(arch | sed -e 's#x86_64#amd64#' | sed -e 's#aarch64#arm64#')"
438438
./preflight-linux-amd64 check container "$IMAGE_TAG" --platform "${ARCH_FOR_PREFLIGHT}" > preflight.out
439439
- name: "Passed?"
440-
run: '[ "$(cat preflight.out | jq -r .passed)" == true ]'
440+
run: '[ "$(jq -r .passed < preflight.out)" == true ]'

.github/workflows/pr_pre-commit.yaml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ jobs:
1616
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
1717
with:
1818
fetch-depth: 0
19+
submodules: recursive
1920
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
2021
with:
2122
python-version: '3.12'
@@ -39,6 +40,7 @@ jobs:
3940
chmod 700 "${LOCATION_BIN}"
4041
4142
echo "$LOCATION_DIR" >> "$GITHUB_PATH"
43+
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
4244
- uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # v3.0.1
4345
with:
4446
extra_args: "--from-ref ${{ github.event.pull_request.base.sha }} --to-ref ${{ github.event.pull_request.head.sha }}"

.pre-commit-config.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,13 +66,13 @@ repos:
6666
- id: regenerate-charts
6767
name: regenerate-charts
6868
language: system
69-
entry: make regenerate-charts
69+
entry: nix-shell --run 'make regenerate-charts'
7070
stages: [commit, merge-commit, manual]
7171
pass_filenames: false
7272

7373
- id: cargo-test
7474
name: cargo-test
7575
language: system
76-
entry: cargo test
76+
entry: nix-shell --run 'cargo test'
7777
stages: [commit, merge-commit, manual]
7878
pass_filenames: false

docker/Dockerfile

Lines changed: 89 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,40 +1,109 @@
1+
# syntax=docker/dockerfile:1.10.0@sha256:865e5dd094beca432e8c0a1d5e1c465db5f998dca4e439981029b3b81fb39ed5
2+
# NOTE: The syntax directive needs to be the first line in a Dockerfile
3+
14
# =============
25
# This file is automatically generated from the templates in stackabletech/operator-templating
36
# DON'T MANUALLY EDIT THIS FILE
47
# =============
5-
FROM oci.stackable.tech/sdp/ubi9-rust-builder AS builder
68

7-
FROM registry.access.redhat.com/ubi9/ubi-minimal AS operator
9+
# https://docs.docker.com/build/checks/#fail-build-on-check-violations
10+
# check=error=true
11+
12+
# We want to automatically use the latest. We also don't tag our images with a version.
13+
# hadolint ignore=DL3007
14+
FROM oci.stackable.tech/sdp/ubi9-rust-builder:latest AS builder
15+
16+
17+
# We want to automatically use the latest.
18+
# hadolint ignore=DL3007
19+
FROM registry.access.redhat.com/ubi9/ubi-minimal:latest AS operator
820

921
ARG VERSION
1022
ARG RELEASE="1"
1123

12-
LABEL name="Stackable Operator for Apache HDFS" \
13-
maintainer="info@stackable.tech" \
14-
vendor="Stackable GmbH" \
15-
version="${VERSION}" \
16-
release="${RELEASE}" \
17-
summary="Deploy and manage Apache HDFS clusters." \
18-
description="Deploy and manage Apache HDFS clusters."
24+
# These are chosen at random and are this high on purpose to have very little chance to clash with an existing user or group on the host system
25+
ARG STACKABLE_USER_GID="574654813"
26+
ARG STACKABLE_USER_UID="782252253"
27+
28+
# These labels have mostly been superceded by the OpenContainer spec annotations below but it doesn't hurt to include them
29+
# http://label-schema.org/rc1/
30+
LABEL name="Stackable Operator for Apache HDFS"
31+
LABEL maintainer="info@stackable.tech"
32+
LABEL vendor="Stackable GmbH"
33+
LABEL version="${VERSION}"
34+
LABEL release="${RELEASE}"
35+
LABEL summary="Deploy and manage Apache HDFS clusters."
36+
LABEL description="Deploy and manage Apache HDFS clusters."
37+
38+
# Overwriting/Pinning UBI labels
39+
# https://github.com/projectatomic/ContainerApplicationGenericLabels
40+
LABEL vcs-ref=""
41+
LABEL distribution-scope="public"
42+
LABEL url="https://stackable.tech"
43+
ARG TARGETARCH
44+
LABEL architecture="${TARGETARCH}"
45+
LABEL com.redhat.component=""
46+
# It complains about it being an invalid label but RedHat uses it and we want to override it and it works....
47+
# hadolint ignore=DL3048
48+
LABEL com.redhat.license_terms=""
49+
LABEL io.buildah.version=""
50+
LABEL io.openshift.expose-services=""
1951

52+
# https://github.com/opencontainers/image-spec/blob/036563a4a268d7c08b51a08f05a02a0fe74c7268/annotations.md#annotations
53+
LABEL org.opencontainers.image.authors="info@stackable.tech"
54+
LABEL org.opencontainers.image.url="https://stackable.tech"
55+
LABEL org.opencontainers.image.vendor="Stackable GmbH"
56+
LABEL org.opencontainers.image.licenses="OSL-3.0"
57+
LABEL org.opencontainers.image.documentation="https://docs.stackable.tech/home/stable/hdfs/"
58+
LABEL org.opencontainers.image.version="${VERSION}"
59+
LABEL org.opencontainers.image.revision="${RELEASE}"
60+
LABEL org.opencontainers.image.title="Stackable Operator for Apache HDFS"
61+
LABEL org.opencontainers.image.description="Deploy and manage Apache HDFS clusters."
62+
63+
# https://docs.openshift.com/container-platform/4.16/openshift_images/create-images.html#defining-image-metadata
64+
# https://github.com/projectatomic/ContainerApplicationGenericLabels/blob/master/vendor/redhat/labels.md
65+
LABEL io.openshift.tags="ubi9,stackable,sdp,hdfs"
66+
LABEL io.k8s.description="Deploy and manage Apache HDFS clusters."
67+
LABEL io.k8s.display-name="Stackable Operator for Apache HDFS"
68+
69+
RUN <<EOF
2070
# Update image and install kerberos client libraries
2171
# install_weak_deps in microdnf does not support the literal "False" as dnf does
2272
# https://github.com/rpm-software-management/microdnf/blob/a600c62f29262d71a6259b70dc220df65a2ab9b5/dnf/dnf-main.c#L176-L189
23-
RUN microdnf update -y --setopt=install_weak_deps=0 \
24-
&& microdnf install -y --setopt=install_weak_deps=0 \
25-
krb5-libs \
26-
libkadm5 \
27-
&& microdnf clean all \
28-
&& rm -rf /var/cache/yum
73+
microdnf update
74+
# NOTE (@NickLarsenNZ): Maybe we should consider pinning package versions?
75+
# hadolint ignore=DL3041
76+
microdnf install -y \
77+
krb5-libs \
78+
libkadm5 \
79+
shadow-utils
80+
81+
groupadd --gid ${STACKABLE_USER_GID} --system ${STACKABLE_USER_NAME}
82+
# The --no-log-init is required to work around a bug/problem in Go/Docker when very large UIDs are used
83+
# See https://github.com/moby/moby/issues/5419#issuecomment-41478290 for more context
84+
# Making this a system user prevents a mail dir from being created, expiry of passwords etc. but it will warn:
85+
# useradd warning: stackable's uid 1000 is greater than SYS_UID_MAX 999
86+
# We can safely ignore this warning, to get rid of the warning we could change /etc/login.defs but that does not seem worth it
87+
# We'll leave the home directory hardcoded to /stackable because I don't want to deal with which chars might be valid and which might not in user name vs. directory
88+
useradd \
89+
--no-log-init \
90+
--gid ${STACKABLE_USER_GID} \
91+
--uid ${STACKABLE_USER_UID} \
92+
--system \
93+
--create-home \
94+
--home-dir /stackable \
95+
stackable
96+
microdnf remove shadow-utils
97+
microdnf clean all
98+
rm -rf /var/cache/yum
99+
EOF
29100

30101
COPY LICENSE /licenses/LICENSE
31102

32103
COPY --from=builder /app/* /usr/local/bin/
33-
COPY deploy/config-spec/properties.yaml /etc/stackable/hdfs-operator/config-spec/properties.yaml
34-
35-
RUN groupadd -g 1000 stackable && adduser -u 1000 -g stackable -c 'Stackable Operator' stackable
36-
37-
USER stackable:stackable
104+
# COPY deploy/config-spec/properties.yaml /etc/stackable/hdfs-operator/config-spec/properties.yaml
105+
#
106+
USER ${STACKABLE_USER_UID}
38107

39108
ENTRYPOINT ["stackable-hdfs-operator"]
40109
CMD ["run"]

0 commit comments

Comments
 (0)