@@ -11,7 +11,6 @@ ARG PRODUCT
11
11
ARG ASYNC_PROFILER
12
12
ARG JMX_EXPORTER
13
13
ARG PROTOBUF
14
- ARG HDFS_UTILS
15
14
ARG TARGETARCH
16
15
ARG TARGETOS
17
16
@@ -59,19 +58,6 @@ RUN microdnf update && \
59
58
60
59
WORKDIR /stackable
61
60
62
- # The Stackable HDFS utils contain an OPA authorizer, group mapper & topology provider.
63
- # The topology provider provides rack awareness functionality for HDFS by allowing users to specify Kubernetes
64
- # labels to build a rackID from.
65
- # Starting with hdfs-utils version 0.3.0 the topology provider is not a standalone jar anymore and included in hdfs-utils.
66
-
67
- RUN curl --fail -L "https://github.com/stackabletech/hdfs-utils/archive/refs/tags/v${HDFS_UTILS}.tar.gz" | tar -xzC . && \
68
- cd hdfs-utils-${HDFS_UTILS} && \
69
- mvn clean package -P hadoop-${PRODUCT} -DskipTests -Dmaven.javadoc.skip=true && \
70
- mkdir -p /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib && \
71
- cp target/hdfs-utils-$HDFS_UTILS.jar /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib/hdfs-utils-${HDFS_UTILS}.jar && \
72
- rm -rf /stackable/hdfs-utils-main && \
73
- cd -
74
-
75
61
COPY hadoop/stackable/patches /stackable/patches
76
62
77
63
# Hadoop Pipes requires libtirpc to build, whose headers are not packaged in RedHat UBI, so skip building this module
@@ -105,10 +91,30 @@ COPY shared/log4shell_scanner /bin/log4shell_scanner
105
91
RUN /bin/log4shell_scanner s "/stackable/hadoop-${PRODUCT}"
106
92
# ===
107
93
94
+ FROM stackable/image/java-devel as hdfs-utils-builder
95
+
96
+ ARG HDFS_UTILS
97
+ ARG PRODUCT
98
+
99
+ WORKDIR /stackable
100
+
101
+ # The Stackable HDFS utils contain an OPA authorizer, group mapper & topology provider.
102
+ # The topology provider provides rack awareness functionality for HDFS by allowing users to specify Kubernetes
103
+ # labels to build a rackID from.
104
+ # Starting with hdfs-utils version 0.3.0 the topology provider is not a standalone jar anymore and included in hdfs-utils.
105
+
106
+ RUN curl --fail -L "https://github.com/stackabletech/hdfs-utils/archive/refs/tags/v${HDFS_UTILS}.tar.gz" | tar -xzC . && \
107
+ cd hdfs-utils-${HDFS_UTILS} && \
108
+ mvn clean package -P hadoop-${PRODUCT} -DskipTests -Dmaven.javadoc.skip=true && \
109
+ mkdir -p /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib && \
110
+ cp target/hdfs-utils-$HDFS_UTILS.jar /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib/hdfs-utils-${HDFS_UTILS}.jar && \
111
+ rm -rf /stackable/hdfs-utils-main
112
+
108
113
FROM stackable/image/java-base as final
109
114
110
115
ARG PRODUCT
111
116
ARG RELEASE
117
+ ARG HDFS_UTILS
112
118
113
119
LABEL name="Apache Hadoop" \
114
120
maintainer="info@stackable.tech" \
@@ -143,6 +149,7 @@ WORKDIR /stackable
143
149
COPY --chown=stackable:stackable --from=builder /stackable/hadoop-${PRODUCT} /stackable/hadoop-${PRODUCT}/
144
150
COPY --chown=stackable:stackable --from=builder /stackable/jmx /stackable/jmx/
145
151
COPY --chown=stackable:stackable --from=builder /stackable/async-profiler /stackable/async-profiler/
152
+ COPY --chown=stackable:stackable --from=hdfs-utils-builder /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib/hdfs-utils-${HDFS_UTILS}.jar /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib/hdfs-utils-${HDFS_UTILS}.jar
146
153
RUN ln -s /stackable/hadoop-${PRODUCT} /stackable/hadoop
147
154
148
155
COPY hadoop/stackable/fuse_dfs_wrapper /stackable/hadoop/bin
0 commit comments