@@ -172,6 +172,7 @@ ARG WOODSTOX_CORE
172
172
ARG JMX_EXPORTER
173
173
ARG TARGETARCH
174
174
ARG TINI
175
+ ARG STACKABLE_USER_UID
175
176
176
177
WORKDIR /stackable/spark-${PRODUCT}
177
178
@@ -283,45 +284,51 @@ LABEL name="Apache Spark" \
283
284
summary="The Stackable image for Apache Spark with PySpark support." \
284
285
description="This image is deployed by the Stackable Operator for Apache Spark on Kubernetes."
285
286
286
- RUN microdnf update && \
287
- microdnf install \
288
- gzip \
289
- hostname \
290
- # required for spark startup scripts
291
- procps \
292
- "python${PYTHON}" \
293
- "python${PYTHON}-pip" \
294
- zip \
295
- # This is needed by the Spark UI to display process information using jps and jmap
296
- # Copying the binaries from the builder stage failed.
297
- "java-${JAVA_VERSION}-openjdk-devel" \
298
- && microdnf clean all \
299
- && rm -rf /var/cache/yum
300
-
301
- RUN ln -s /usr/bin/python${PYTHON} /usr/bin/python \
302
- && ln -s /usr/bin/pip-${PYTHON} /usr/bin/pip
303
-
304
287
305
288
ENV HOME=/stackable
306
289
ENV SPARK_HOME=/stackable/spark
307
290
ENV PATH=$SPARK_HOME:$PATH:/bin:$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$HOME/.local/bin
308
291
ENV PYSPARK_PYTHON=/usr/bin/python
309
292
ENV PYTHONPATH=$SPARK_HOME/python
310
293
294
+ COPY spark-k8s/stackable /stackable
295
+ COPY spark-k8s/licenses /licenses
296
+
311
297
COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRODUCT}/dist /stackable/spark
312
298
COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRODUCT}/assembly/target/bom.json /stackable/spark/spark-${PRODUCT}.cdx.json
313
299
COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/jmx /stackable/jmx
314
300
COPY --from=spark-builder /usr/bin/tini /usr/bin/tini
315
301
316
- RUN ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" /stackable/jmx/jmx_prometheus_javaagent.jar \
317
- # Symlink example jar, so that we can easily use it in tests
318
- && ln -s /stackable/spark/examples/jars/spark-examples_*.jar /stackable/spark/examples/jars/spark-examples.jar
302
+ RUN <<EOF
303
+ microdnf update
304
+ # procps: required for spark startup scripts
305
+ # java-*-openjdk-devel: This is needed by the Spark UI to display process information using jps and jmap
306
+ # Copying just the binaries from the builder stage failed.
307
+ microdnf install \
308
+ gzip \
309
+ hostname \
310
+ procps \
311
+ "python${PYTHON}" \
312
+ "python${PYTHON}-pip" \
313
+ zip \
314
+ "java-${JAVA_VERSION}-openjdk-devel"
315
+ microdnf clean all
316
+ rm -rf /var/cache/yum
317
+
318
+ ln -s /usr/bin/python${PYTHON} /usr/bin/python
319
+ ln -s /usr/bin/pip-${PYTHON} /usr/bin/pip
319
320
320
- USER ${STACKABLE_USER_UID}
321
- WORKDIR /stackable
321
+ ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" /stackable/jmx/jmx_prometheus_javaagent.jar
322
+ # Symlink example jar, so that we can easily use it in tests
323
+ ln -s /stackable/spark/examples/jars/spark-examples_*.jar /stackable/spark/examples/jars/spark-examples.jar
322
324
323
- COPY spark-k8s/stackable /stackable
324
- COPY spark-k8s/licenses /licenses
325
+ # All files and folders owned by root to support running as arbitrary users
326
+ # This is best practice as all container users will belong to the root group (0)
327
+ chown -R ${STACKABLE_USER_UID}:0 /stackable
328
+ chmod -R g=u /stackable
329
+ EOF
330
+
331
+ USER ${STACKABLE_USER_UID}
325
332
326
333
WORKDIR /stackable/spark
327
334
ENTRYPOINT [ "/stackable/run-spark.sh" ]
0 commit comments