@@ -12,6 +12,7 @@ FROM stackable/image/hbase AS hbase-builder
12
12
FROM stackable/image/java-devel AS spark-source-builder
13
13
14
14
ARG PRODUCT
15
+ ARG STACKABLE_USER_UID
15
16
16
17
RUN <<EOF
17
18
microdnf update
34
35
35
36
WORKDIR /stackable/spark
36
37
37
- COPY --chown=stackable:stackable \
38
+ COPY --chown=${STACKABLE_USER_UID}:0 \
38
39
spark-k8s/stackable/patches/apply_patches.sh \
39
40
patches/apply_patches.sh
40
- COPY --chown=stackable:stackable \
41
+ COPY --chown=${STACKABLE_USER_UID}:0 \
41
42
spark-k8s/stackable/patches/${PRODUCT} \
42
43
patches/${PRODUCT}
43
44
@@ -52,6 +53,7 @@ ARG PRODUCT
52
53
ARG HADOOP
53
54
ARG HBASE
54
55
ARG HBASE_CONNECTOR
56
+ ARG STACKABLE_USER_UID
55
57
56
58
RUN <<EOF
57
59
microdnf update
@@ -70,7 +72,7 @@ WORKDIR /stackable
70
72
# versions used by Spark. The pom.xml defines child modules which are
71
73
# not required and not copied, therefore mvn must be called with the
72
74
# parameter --non-recursive.
73
- COPY --chown=stackable:stackable --from=spark-source-builder \
75
+ COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-source-builder \
74
76
/stackable/spark/pom.xml \
75
77
spark/
76
78
83
85
84
86
# Patch the hbase-connectors source code
85
87
WORKDIR /stackable/hbase-connectors
86
- COPY --chown=stackable:stackable \
88
+ COPY --chown=${STACKABLE_USER_UID}:0 \
87
89
spark-k8s/stackable/hbase-connectors-patches/apply_patches.sh \
88
90
patches/apply_patches.sh
89
- COPY --chown=stackable:stackable \
91
+ COPY --chown=${STACKABLE_USER_UID}:0 \
90
92
spark-k8s/stackable/hbase-connectors-patches/${HBASE_CONNECTOR} \
91
93
patches/${HBASE_CONNECTOR}
92
94
RUN patches/apply_patches.sh ${HBASE_CONNECTOR}
@@ -173,7 +175,7 @@ ARG TINI
173
175
174
176
WORKDIR /stackable/spark-${PRODUCT}
175
177
176
- COPY --chown=stackable:stackable --from=spark-source-builder \
178
+ COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-source-builder \
177
179
/stackable/spark/ \
178
180
./
179
181
@@ -200,35 +202,35 @@ RUN curl -o /usr/bin/tini "https://repo.stackable.tech/repository/packages/tini/
200
202
WORKDIR /stackable/spark-${PRODUCT}/dist/jars
201
203
202
204
# Copy modules required for s3a://
203
- COPY --from=hadoop-builder --chown=stackable:stackable \
205
+ COPY --from=hadoop-builder --chown=${STACKABLE_USER_UID}:0 \
204
206
/stackable/hadoop/share/hadoop/tools/lib/hadoop-aws-${HADOOP}.jar \
205
207
/stackable/hadoop/share/hadoop/tools/lib/aws-java-sdk-bundle-${AWS_JAVA_SDK_BUNDLE}.jar \
206
208
./
207
209
208
210
# Copy modules required for abfs://
209
- COPY --from=hadoop-builder --chown=stackable:stackable \
211
+ COPY --from=hadoop-builder --chown=${STACKABLE_USER_UID}:0 \
210
212
/stackable/hadoop/share/hadoop/tools/lib/hadoop-azure-${HADOOP}.jar \
211
213
/stackable/hadoop/share/hadoop/tools/lib/azure-storage-${AZURE_STORAGE}.jar \
212
214
/stackable/hadoop/share/hadoop/tools/lib/azure-keyvault-core-${AZURE_KEYVAULT_CORE}.jar \
213
215
./
214
216
215
217
# Copy the HBase connector including required modules
216
- COPY --from=hbase-connectors-builder --chown=stackable:stackable \
218
+ COPY --from=hbase-connectors-builder --chown=${STACKABLE_USER_UID}:0 \
217
219
/stackable/spark/jars/* \
218
220
./
219
221
220
222
# Copy modules required to access HBase
221
- COPY --from=hbase-builder --chown=stackable:stackable \
223
+ COPY --from=hbase-builder --chown=${STACKABLE_USER_UID}:0 \
222
224
/stackable/hbase/lib/shaded-clients/hbase-shaded-client-byo-hadoop-${HBASE}.jar \
223
225
/stackable/hbase/lib/shaded-clients/hbase-shaded-mapreduce-${HBASE}.jar \
224
226
./
225
227
# Copy modules required to access HBase if $HBASE == 2.4.x
226
- COPY --from=hbase-builder --chown=stackable:stackable \
228
+ COPY --from=hbase-builder --chown=${STACKABLE_USER_UID}:0 \
227
229
/stackable/hbase/lib/client-facing-thirdparty/htrace-core4-*-incubating.jar \
228
230
/stackable/hbase/lib/client-facing-thirdparty/slf4j-reload4j-*.jar \
229
231
./
230
232
# Copy modules required to access HBase if $HBASE == 2.6.x
231
- COPY --from=hbase-builder --chown=stackable:stackable \
233
+ COPY --from=hbase-builder --chown=${STACKABLE_USER_UID}:0 \
232
234
/stackable/hbase/lib/client-facing-thirdparty/opentelemetry-api-*.jar \
233
235
/stackable/hbase/lib/client-facing-thirdparty/opentelemetry-context-*.jar \
234
236
/stackable/hbase/lib/client-facing-thirdparty/opentelemetry-semconv-*-alpha.jar \
@@ -271,7 +273,7 @@ ARG PRODUCT
271
273
ARG PYTHON
272
274
ARG RELEASE
273
275
ARG JMX_EXPORTER
274
-
276
+ ARG STACKABLE_USER_UID
275
277
276
278
LABEL name="Apache Spark" \
277
279
maintainer="info@stackable.tech" \
@@ -306,21 +308,20 @@ ENV PATH=$SPARK_HOME:$PATH:/bin:$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$HOME/.local/b
306
308
ENV PYSPARK_PYTHON=/usr/bin/python
307
309
ENV PYTHONPATH=$SPARK_HOME/python
308
310
309
- COPY --chown=stackable:stackable --from=spark-builder /stackable/spark-${PRODUCT}/dist /stackable/spark
310
- COPY --chown=stackable:stackable --from=spark-builder /stackable/spark-${PRODUCT}/assembly/target/bom.json /stackable/spark/spark-${PRODUCT}.cdx.json
311
- COPY --chown=stackable:stackable --from=spark-builder /stackable/jmx /stackable/jmx
311
+ COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRODUCT}/dist /stackable/spark
312
+ COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/spark-${PRODUCT}/assembly/target/bom.json /stackable/spark/spark-${PRODUCT}.cdx.json
313
+ COPY --chown=${STACKABLE_USER_UID}:0 --from=spark-builder /stackable/jmx /stackable/jmx
312
314
COPY --from=spark-builder /usr/bin/tini /usr/bin/tini
313
315
314
316
RUN ln -s "/stackable/jmx/jmx_prometheus_javaagent-${JMX_EXPORTER}.jar" /stackable/jmx/jmx_prometheus_javaagent.jar \
315
317
# Symlink example jar, so that we can easily use it in tests
316
318
&& ln -s /stackable/spark/examples/jars/spark-examples_*.jar /stackable/spark/examples/jars/spark-examples.jar
317
319
318
- USER stackable
320
+ USER ${STACKABLE_USER_UID}
319
321
WORKDIR /stackable
320
322
321
323
COPY spark-k8s/stackable /stackable
322
324
COPY spark-k8s/licenses /licenses
323
325
324
-
325
326
WORKDIR /stackable/spark
326
327
ENTRYPOINT [ "/stackable/run-spark.sh" ]
0 commit comments