forked from kube-reporting/hadoop
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Dockerfile.rhel
85 lines (69 loc) · 3.21 KB
/
Dockerfile.rhel
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
FROM registry.ci.openshift.org/openshift/release:rhel-7-release-openshift-4.8 AS build
# This FROM is RHEL7 based because the CI based hadoop build requires a precise
# version of protobuf (2.5.0) which is unavilable on RHEL8. Downstream
# production builds use RHEL8 for this builder image since protobuf 2.5.0 is not
# required and ARM builds require RHEL8.
RUN mkdir /build
COPY . /build
WORKDIR /build
COPY opt_maven_install.sh /tmp/
RUN chmod u+x /tmp/opt_maven_install.sh && /tmp/opt_maven_install.sh $OPENSHIFT_CI
FROM registry.ci.openshift.org/ocp/builder:rhel-8-base-openshift-4.8
RUN set -x; yum install --setopt=skip_missing_names_on_install=False -y \
java-1.8.0-openjdk \
java-1.8.0-openjdk-devel \
curl \
less \
procps \
net-tools \
bind-utils \
which \
jq \
rsync \
openssl \
faq \
tini \
&& yum clean all \
&& rm -rf /tmp/* /var/tmp/*
ENV JAVA_HOME=/etc/alternatives/jre
ENV HADOOP_VERSION 3.1.1
ENV HADOOP_HOME=/opt/hadoop
ENV HADOOP_LOG_DIR=$HADOOP_HOME/logs
ENV HADOOP_CLASSPATH=$HADOOP_HOME/share/hadoop/tools/lib/*
ENV HADOOP_CONF_DIR=/etc/hadoop
ENV PROMETHEUS_JMX_EXPORTER=/opt/jmx_exporter/jmx_exporter.jar
ENV PATH=$HADOOP_HOME/bin:$PATH
COPY --from=build /build/hadoop-dist/target/hadoop-$HADOOP_VERSION $HADOOP_HOME
COPY --from=build /build/jmx_prometheus_javaagent.jar $PROMETHEUS_JMX_EXPORTER
COPY --from=build /build/gcs-connector-hadoop3-2.0.0-RC2-shaded.jar $HADOOP_HOME/share/hadoop/tools/lib/gcs-connector-hadoop3-shaded.jar
WORKDIR $HADOOP_HOME
# remove unnecessary doc/src files
RUN rm -rf ${HADOOP_HOME}/share/doc \
&& for dir in common hdfs mapreduce tools yarn; do \
rm -rf ${HADOOP_HOME}/share/hadoop/${dir}/sources; \
done \
&& rm -rf ${HADOOP_HOME}/share/hadoop/common/jdiff \
&& rm -rf ${HADOOP_HOME}/share/hadoop/mapreduce/lib-examples \
&& rm -rf ${HADOOP_HOME}/share/hadoop/yarn/test \
&& find ${HADOOP_HOME}/share/hadoop -name *test*.jar | xargs rm -rf
RUN ln -s $HADOOP_HOME/etc/hadoop $HADOOP_CONF_DIR
RUN mkdir -p $HADOOP_LOG_DIR
# Debug artifact
RUN find /opt/ -name '*guava-*.jar'
# https://docs.oracle.com/javase/7/docs/technotes/guides/net/properties.html
# Java caches dns results forever, don't cache dns results forever:
RUN sed -i '/networkaddress.cache.ttl/d' $JAVA_HOME/lib/security/java.security
RUN sed -i '/networkaddress.cache.negative.ttl/d' $JAVA_HOME/lib/security/java.security
RUN echo 'networkaddress.cache.ttl=0' >> $JAVA_HOME/lib/security/java.security
RUN echo 'networkaddress.cache.negative.ttl=0' >> $JAVA_HOME/lib/security/java.security
# imagebuilder expects the directory to be created before VOLUME
RUN mkdir -p /hadoop/dfs/data /hadoop/dfs/name
# to allow running as non-root
RUN chown -R 1002:0 $HADOOP_HOME /hadoop $HADOOP_CONF_DIR $JAVA_HOME/lib/security/cacerts && \
chmod -R 774 $HADOOP_HOME /hadoop $HADOOP_CONF_DIR $JAVA_HOME/lib/security/cacerts
VOLUME /hadoop/dfs/data /hadoop/dfs/name
USER 1002
LABEL io.k8s.display-name="OpenShift Hadoop" \
io.k8s.description="This is an image used by operator-metering to to install and run HDFS." \
io.openshift.tags="openshift" \
maintainer="<metering-team@redhat.com>"