12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758 |
- FROM pl4tinum/spark-kube:3.0
- USER root
- RUN apt -y update
- RUN mkdir -p /usr/share/man/man1
- RUN apt -y install git bc wget vim maven scala python2.7 build-essential software-properties-common
- RUN wget -qO - https://adoptopenjdk.jfrog.io/adoptopenjdk/api/gpg/key/public | apt-key add -
- RUN add-apt-repository --yes https://adoptopenjdk.jfrog.io/adoptopenjdk/deb/
- RUN apt update
- RUN apt install adoptopenjdk-8-hotspot
- WORKDIR /
- RUN git clone https://github.com/Intel-bigdata/HiBench.git
- WORKDIR /HiBench
- ENV JAVA_HOME=/usr/lib/jvm/adoptopenjdk-8-hotspot-amd64
- RUN mvn -Psparkbench -Dspark=3.0 -Dhadoop=2.7 -Dscala=2.12 clean package
- ENV HIBENCH_HOME=/HiBench
- WORKDIR /usr/local
- RUN wget https://archive.apache.org/dist/hadoop/common/hadoop-2.7.4/hadoop-2.7.4.tar.gz
- RUN tar xzvf hadoop-2.7.4.tar.gz
- RUN mv hadoop-2.7.4 hadoop
- ENV HADOOP_HOME=/usr/local/hadoop
- ENV HADOOP_INSTALL=/usr/local/hadoop
- ENV HADOOP_PREFIX /usr/local/hadoop
- ENV HADOOP_COMMON_HOME /usr/local/hadoop
- ENV HADOOP_HDFS_HOME /usr/local/hadoop
- ENV HADOOP_MAPRED_HOME /usr/local/hadoop
- ENV HADOOP_YARN_HOME /usr/local/hadoop
- ENV HADOOP_CONF_DIR /usr/local/hadoop/etc/hadoop
- ENV HADOOP_EXAMPLES_JAR=/usr/local/hadoop/share/hadoop/mapreduce/hadoop-mapreduce-examples-2.7.4.jar
- ENV JAVA_HOME=/usr/local/openjdk-8/
- ENV PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin
- RUN rm ${HADOOP_CONF_DIR}/core-site.xml
- RUN mkdir /root/.kube
- COPY config /root/.kube/config
- COPY conf/core-site-nfs.xml /usr/local/hadoop/etc/hadoop/core-site.xml
- COPY conf/hadoop-nfs.conf /HiBench/conf/hadoop.conf
- COPY conf/spark.conf /HiBench/conf/spark.conf
- WORKDIR /
- RUN git clone https://github.com/opcm/pcm.git
- WORKDIR /pcm
- RUN make
- WORKDIR /
- COPY entrypoint.sh /entrypoint.sh
- RUN chmod ugo+x /entrypoint.sh
- COPY
- RUN mkdir /results
- ENTRYPOINT ["/entrypoint.sh"]
|