|
| 1 | +FROM centos:7 as build |
| 2 | + |
| 3 | +RUN yum -y update && yum clean all |
| 4 | + |
| 5 | +# newer maven |
| 6 | +RUN yum -y install --setopt=skip_missing_names_on_install=False centos-release-scl |
| 7 | +# cmake3 |
| 8 | +RUN yum -y install --setopt=skip_missing_names_on_install=False epel-release |
| 9 | + |
| 10 | +RUN yum -y install --setopt=skip_missing_names_on_install=False \ |
| 11 | + java-1.8.0-openjdk-devel \ |
| 12 | + java-1.8.0-openjdk \ |
| 13 | + rh-maven33 \ |
| 14 | + protobuf protobuf-compiler \ |
| 15 | + patch \ |
| 16 | + git \ |
| 17 | + lzo-devel zlib-devel gcc gcc-c++ make autoconf automake libtool openssl-devel fuse-devel \ |
| 18 | + cmake3 \ |
| 19 | + && yum clean all \ |
| 20 | + && rm -rf /var/cache/yum |
| 21 | + |
| 22 | +RUN ln -s /usr/bin/cmake3 /usr/bin/cmake |
| 23 | + |
| 24 | +RUN mkdir /build |
| 25 | +COPY .git /build/.git |
| 26 | +COPY hadoop-yarn-project /build/hadoop-yarn-project |
| 27 | +COPY hadoop-assemblies /build/hadoop-assemblies |
| 28 | +COPY hadoop-project /build/hadoop-project |
| 29 | +COPY hadoop-common-project /build/hadoop-common-project |
| 30 | +COPY hadoop-cloud-storage-project /build/hadoop-cloud-storage-project |
| 31 | +COPY hadoop-project-dist /build/hadoop-project-dist |
| 32 | +COPY hadoop-maven-plugins /build/hadoop-maven-plugins |
| 33 | +COPY hadoop-dist /build/hadoop-dist |
| 34 | +COPY hadoop-minicluster /build/hadoop-minicluster |
| 35 | +COPY hadoop-mapreduce-project /build/hadoop-mapreduce-project |
| 36 | +COPY hadoop-tools /build/hadoop-tools |
| 37 | +COPY hadoop-hdfs-project /build/hadoop-hdfs-project |
| 38 | +COPY hadoop-client-modules /build/hadoop-client-modules |
| 39 | +COPY hadoop-build-tools /build/hadoop-build-tools |
| 40 | +COPY dev-support /build/dev-support |
| 41 | +COPY pom.xml /build/pom.xml |
| 42 | +COPY LICENSE.txt /build/LICENSE.txt |
| 43 | +COPY BUILDING.txt /build/BUILDING.txt |
| 44 | +COPY NOTICE.txt /build/NOTICE.txt |
| 45 | +COPY README.txt /build/README.txt |
| 46 | + |
| 47 | +ENV CMAKE_C_COMPILER=gcc CMAKE_CXX_COMPILER=g++ |
| 48 | + |
| 49 | +# build hadoop |
| 50 | +RUN scl enable rh-maven33 'cd /build && mvn -B -e -Dtest=false -DskipTests -Dmaven.javadoc.skip=true clean package -Pdist,native -Dtar' |
| 51 | +# Install prometheus-jmx agent |
| 52 | +RUN scl enable rh-maven33 'mvn dependency:get -Dartifact=io.prometheus.jmx:jmx_prometheus_javaagent:0.3.1:jar -Ddest=/build/jmx_prometheus_javaagent.jar' |
| 53 | + |
| 54 | +FROM registry.access.redhat.com/ubi7/ubi |
| 55 | + |
| 56 | +# our copy of faq and jq |
| 57 | +COPY faq.repo /etc/yum.repos.d/ecnahc515-faq-epel-7.repo |
| 58 | + |
| 59 | +RUN yum install --setopt=skip_missing_names_on_install=False -y \ |
| 60 | + epel-release \ |
| 61 | + && yum install --setopt=skip_missing_names_on_install=False -y \ |
| 62 | + java-1.8.0-openjdk \ |
| 63 | + java-1.8.0-openjdk-devel \ |
| 64 | + curl \ |
| 65 | + less \ |
| 66 | + procps \ |
| 67 | + net-tools \ |
| 68 | + bind-utils \ |
| 69 | + which \ |
| 70 | + jq \ |
| 71 | + rsync \ |
| 72 | + openssl \ |
| 73 | + faq \ |
| 74 | + && yum clean all \ |
| 75 | + && rm -rf /tmp/* /var/tmp/* |
| 76 | + |
| 77 | +ENV JAVA_HOME=/etc/alternatives/jre |
| 78 | + |
| 79 | +ENV HADOOP_VERSION 3.1.1 |
| 80 | + |
| 81 | +ENV HADOOP_HOME=/opt/hadoop |
| 82 | +ENV HADOOP_LOG_DIR=$HADOOP_HOME/logs |
| 83 | +ENV HADOOP_CLASSPATH=$HADOOP_HOME/share/hadoop/tools/lib/* |
| 84 | +ENV HADOOP_CONF_DIR=/etc/hadoop |
| 85 | +ENV PROMETHEUS_JMX_EXPORTER /opt/jmx_exporter/jmx_exporter.jar |
| 86 | +ENV PATH=$HADOOP_HOME/bin:$PATH |
| 87 | + |
| 88 | +COPY --from=build /build/hadoop-dist/target/hadoop-$HADOOP_VERSION $HADOOP_HOME |
| 89 | +COPY --from=build /build/jmx_prometheus_javaagent.jar $PROMETHEUS_JMX_EXPORTER |
| 90 | +WORKDIR $HADOOP_HOME |
| 91 | + |
| 92 | +# remove unnecessary doc/src files |
| 93 | +RUN rm -rf ${HADOOP_HOME}/share/doc \ |
| 94 | + && for dir in common hdfs mapreduce tools yarn; do \ |
| 95 | + rm -rf ${HADOOP_HOME}/share/hadoop/${dir}/sources; \ |
| 96 | + done \ |
| 97 | + && rm -rf ${HADOOP_HOME}/share/hadoop/common/jdiff \ |
| 98 | + && rm -rf ${HADOOP_HOME}/share/hadoop/mapreduce/lib-examples \ |
| 99 | + && rm -rf ${HADOOP_HOME}/share/hadoop/yarn/test \ |
| 100 | + && find ${HADOOP_HOME}/share/hadoop -name *test*.jar | xargs rm -rf |
| 101 | + |
| 102 | +RUN ln -s $HADOOP_HOME/etc/hadoop $HADOOP_CONF_DIR |
| 103 | +RUN mkdir -p $HADOOP_LOG_DIR |
| 104 | + |
| 105 | +# https://docs.oracle.com/javase/7/docs/technotes/guides/net/properties.html |
| 106 | +# Java caches dns results forever, don't cache dns results forever: |
| 107 | +RUN sed -i '/networkaddress.cache.ttl/d' $JAVA_HOME/lib/security/java.security |
| 108 | +RUN sed -i '/networkaddress.cache.negative.ttl/d' $JAVA_HOME/lib/security/java.security |
| 109 | +RUN echo 'networkaddress.cache.ttl=0' >> $JAVA_HOME/lib/security/java.security |
| 110 | +RUN echo 'networkaddress.cache.negative.ttl=0' >> $JAVA_HOME/lib/security/java.security |
| 111 | + |
| 112 | +RUN useradd hadoop -m -u 1002 -d $HADOOP_HOME |
| 113 | + |
| 114 | +# imagebuilder expects the directory to be created before VOLUME |
| 115 | +RUN mkdir -p /hadoop/dfs/data /hadoop/dfs/name |
| 116 | +# to allow running as non-root |
| 117 | +RUN chown -R 1002:0 $HADOOP_HOME /hadoop $HADOOP_CONF_DIR && \ |
| 118 | + chmod -R 774 $HADOOP_HOME /hadoop $HADOOP_CONF_DIR /etc/passwd |
| 119 | + |
| 120 | +VOLUME /hadoop/dfs/data /hadoop/dfs/name |
| 121 | + |
| 122 | +USER 1002 |
| 123 | + |
| 124 | +LABEL io.k8s.display-name="OpenShift Hadoop" \ |
| 125 | + io.k8s.description="This is an image used by operator-metering to to install and run HDFS." \ |
| 126 | + io.openshift.tags="openshift" \ |
| 127 | + maintainer="Chance Zibolski < [email protected]>" |
| 128 | + |
0 commit comments