FROM cdr-docker-repo...com/mv/centos-java8:1.1.0
LABEL ****
RUN yum update -y
RUN yum install -y https://centos7.iuscommunity.org/ius-release.rpm
RUN yum install -y gcc gcc-c++ make wget
RUN yum install -y python36u python36u-libs python36u-devel python36u-pip python-pip
RUN yum install java-1.8.0-openjdk java-1.8.0-openjdk-devel gcc gcc-c++ wget -y
RUN echo " JAVA_HOME $JAVA_HOME"
ENV JAVA_HOME /usr/java/jdk1.8.0_162/jre
ENV PATH="$JAVA_HOME/bin:${PATH}"
RUN echo "JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::")" | tee -a /etc/profile && source /etc/profile && echo $JAVA_HOME
RUN echo " JAVA_HOME $JAVA_HOME"
ENV SCALA_TAR_URL http://www.scala-lang.org/files/archive
ENV SCALA_VERSION 2.12.7
ENV SPARK_TAR_URL http://www-eu.apache.org/dist/spark/spark-2.3.1
ENV SPARK_VERSION 2.3.1
RUN wget $SPARK_TAR_URL/spark-$SPARK_VERSION-bin-hadoop2.7.tgz
RUN tar xvf spark-$SPARK_VERSION-bin-hadoop2.7.tgz
RUN mv spark-$SPARK_VERSION-bin-hadoop2.7 /usr/lib/spark
RUN ln -s /opt/spark-$SPARK_VERSION-bin-hadoop2.7 /usr/lib/spark
ENV PATH $PATH:/usr/lib/spark/bin
ADD requirements.txt /tmp/
RUN pip install --upgrade pip
RUN pip install virtualenv
RUN virtualenv -p python3.6 tmf_env
RUN source tmf_env/bin/activate && pip install pyspark==2.3.1 && pip install numpy==1.15.1 && pip install -r /tmp/requirements.txt
RUN yum install -y make
I am getting this following error:
____________________ ERROR at setup of test_**** ____________________
request = >
@pytest.fixture(scope="session")
def spark_context(request):
conf = (SparkConf().setMaster("local[2]").setAppName("pytest-pyspark-
local-testing"))
request.addfinalizer(lambda: sc.stop())
error in this line>
sc = SparkContext(conf=conf).getOrCreate()
/temp/workspace/FUSION-Pipeline_feature_amit_ss3/extracts/src/test/conftest.py:13:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /tmf_env/lib/python3.6/site-packages/pyspark/context.py:115: in init SparkContext._ensure_initialized(self, gateway=gateway, conf=conf) /tmf_env/lib/python3.6/site-packages/pyspark/context.py:292: in _ensure_initialized SparkContext._gateway = gateway or launch_gateway(conf)
Comments
Post a Comment