Skip to content

Commit

Permalink
Add spark 3.5
Browse files Browse the repository at this point in the history
  • Loading branch information
julien bignon committed Aug 21, 2024
1 parent f8a8311 commit 9a669cd
Show file tree
Hide file tree
Showing 24 changed files with 1,112 additions and 0 deletions.
61 changes: 61 additions & 0 deletions technologies/job/spark/spark-3.5/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
FROM openjdk:11-jre-slim-bullseye

ARG SPARK_VERSION=3.5.2
ARG HADOOP_VERSION=3
ARG TINI_VERSION="v0.18.0"

ENV DEBIAN_FRONTEND noninteractive

ENV SPARK_HOME /opt/spark
ENV PATH "$PATH:$SPARK_HOME/bin"
ENV LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:/usr/lib/hadoop/lib/native"

ENV HADOOP_CONF_DIR=/etc/hadoop/conf

# LIGHT DEPENDENCIES START
RUN echo "deb http://deb.debian.org/debian/ bullseye-backports main contrib non-free" | tee /etc/apt/sources.list.d/bulleseye-backports.list && \
apt update -qq && apt install -yqq --no-install-recommends \
ftp wget curl unzip telnet openssh-client krb5-user zip procps && \
rm -rf /var/lib/apt/lists/*
# LIGHT DEPENDENCIES END

# TINI INSTALL START
RUN set -ex && \
mkdir -p /opt/spark && \
mkdir -p /opt/spark/work-dir && \
touch /opt/spark/RELEASE && \
rm /bin/sh && \
ln -sv /bin/bash /bin/sh && \
echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
chgrp root /etc/passwd && chmod ug+rw /etc/passwd && \
export TINI_HOME="/usr/local/sbin" && \
curl -fSL "https://github.com/krallin/tini/releases/download/$TINI_VERSION/tini" -o "${TINI_HOME}/tini" && \
curl -fSL "https://github.com/krallin/tini/releases/download/$TINI_VERSION/tini.asc" -o "${TINI_HOME}/tini.asc" && \
chmod +x "${TINI_HOME}/tini" && \
ln -s ${TINI_HOME}/tini /sbin/tini && \
"${TINI_HOME}/tini" -h
# TINI INSTALL END

# SPARK INSTALL START
RUN mkdir -p /tmp/spark && \
cd /tmp/spark && \
wget -nv https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz && \
tar xf spark-*.tgz && \
rm spark-*.tgz && \
cp -R /tmp/spark/*/jars /opt/spark && \
cp -R /tmp/spark/*/bin /opt/spark && \
cp -R /tmp/spark/*/sbin /opt/spark && \
rm -Rf /tmp/spark
# SPARK INSTALL END

COPY assets/hive_1.1.0_jars_download.sh /tmp/

RUN chmod +x /tmp/hive_1.1.0_jars_download.sh && \
/tmp/hive_1.1.0_jars_download.sh

COPY entrypoint.sh /opt/
RUN chmod 755 /opt/entrypoint.sh

WORKDIR /sandbox/

ENTRYPOINT [ "/opt/entrypoint.sh" ]
119 changes: 119 additions & 0 deletions technologies/job/spark/spark-3.5/assets/hive_1.1.0_jars_download.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
#!/bin/bash

wget -nv https://repo1.maven.org/maven2/org/apache/hive/hive-metastore/1.1.1/hive-metastore-1.1.1.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hive/hive-exec/1.1.1/hive-exec-1.1.1.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hive/hive-common/1.1.1/hive-common-1.1.1.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hive/hive-serde/1.1.1/hive-serde-1.1.1.jar
wget -nv https://repo1.maven.org/maven2/com/google/guava/guava/14.0.1/guava-14.0.1.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-client/2.7.4/hadoop-client-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hive/hive-shims/1.1.1/hive-shims-1.1.1.jar
wget -nv https://repo1.maven.org/maven2/com/jolbox/bonecp/0.8.0.RELEASE/bonecp-0.8.0.RELEASE.jar
wget -nv https://repo1.maven.org/maven2/commons-cli/commons-cli/1.2/commons-cli-1.2.jar
wget -nv https://repo1.maven.org/maven2/commons-lang/commons-lang/2.6/commons-lang-2.6.jar
wget -nv https://repo1.maven.org/maven2/commons-logging/commons-logging/1.1.3/commons-logging-1.1.3.jar
wget -nv https://repo1.maven.org/maven2/org/apache/derby/derby/10.11.1.1/derby-10.11.1.1.jar
wget -nv https://repo1.maven.org/maven2/org/datanucleus/datanucleus-api-jdo/3.2.6/datanucleus-api-jdo-3.2.6.jar
wget -nv https://repo1.maven.org/maven2/org/datanucleus/datanucleus-core/3.2.10/datanucleus-core-3.2.10.jar
wget -nv https://repo1.maven.org/maven2/org/datanucleus/datanucleus-rdbms/3.2.9/datanucleus-rdbms-3.2.9.jar
wget -nv https://repo1.maven.org/maven2/commons-pool/commons-pool/1.5.4/commons-pool-1.5.4.jar
wget -nv https://repo1.maven.org/maven2/commons-dbcp/commons-dbcp/1.4/commons-dbcp-1.4.jar
wget -nv https://repo1.maven.org/maven2/javax/jdo/jdo-api/3.0.1/jdo-api-3.0.1.jar
wget -nv https://repo1.maven.org/maven2/org/antlr/antlr-runtime/3.4/antlr-runtime-3.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/thrift/libfb303/0.9.2/libfb303-0.9.2.jar
wget -nv https://repo1.maven.org/maven2/org/apache/thrift/libthrift/0.9.2/libthrift-0.9.2.jar
wget -nv https://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/3.0.0/jsr305-3.0.0.jar
wget -nv https://repo1.maven.org/maven2/commons-codec/commons-codec/1.4/commons-codec-1.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/avro/avro/1.7.5/avro-1.7.5.jar
wget -nv https://repo1.maven.org/maven2/net/sf/opencsv/opencsv/2.3/opencsv-2.3.jar
wget -nv https://repo1.maven.org/maven2/com/twitter/parquet-hadoop-bundle/1.6.0rc3/parquet-hadoop-bundle-1.6.0rc3.jar
wget -nv https://repo1.maven.org/maven2/log4j/apache-log4j-extras/1.2.17/apache-log4j-extras-1.2.17.jar
wget -nv https://repo1.maven.org/maven2/org/apache/commons/commons-compress/1.4.1/commons-compress-1.4.1.jar
wget -nv https://repo1.maven.org/maven2/org/apache/ant/ant/1.9.1/ant-1.9.1.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hive/shims/hive-shims-common/1.1.1/hive-shims-common-1.1.1.jar
wget -nv https://repo1.maven.org/maven2/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar
wget -nv https://repo1.maven.org/maven2/org/apache/httpcomponents/httpclient/4.2.5/httpclient-4.2.5.jar
wget -nv https://repo1.maven.org/maven2/org/apache/httpcomponents/httpcore/4.2.5/httpcore-4.2.5.jar
wget -nv https://repo1.maven.org/maven2/jline/jline/2.12/jline-2.12.jar
wget -nv https://repo1.maven.org/maven2/io/netty/netty/3.7.0.Final/netty-3.7.0.Final.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hive/shims/hive-shims-0.20S/1.1.1/hive-shims-0.20S-1.1.1.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hive/shims/hive-shims-0.23/1.1.1/hive-shims-0.23-1.1.1.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hive/shims/hive-shims-scheduler/1.1.1/hive-shims-scheduler-1.1.1.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-yarn-server-resourcemanager/2.6.0/hadoop-yarn-server-resourcemanager-2.6.0.jar
wget -nv https://repo1.maven.org/maven2/com/google/inject/extensions/guice-servlet/3.0/guice-servlet-3.0.jar
wget -nv https://repo1.maven.org/maven2/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar
wget -nv https://repo1.maven.org/maven2/commons-io/commons-io/2.4/commons-io-2.4.jar
wget -nv https://repo1.maven.org/maven2/com/google/inject/guice/3.0/guice-3.0.jar
wget -nv https://repo1.maven.org/maven2/com/sun/jersey/jersey-json/1.14/jersey-json-1.14.jar
wget -nv https://repo1.maven.org/maven2/com/sun/jersey/contribs/jersey-guice/1.9/jersey-guice-1.9.jar
wget -nv https://repo1.maven.org/maven2/javax/xml/bind/jaxb-api/2.2.2/jaxb-api-2.2.2.jar
wget -nv https://repo1.maven.org/maven2/org/codehaus/jettison/jettison/1.1/jettison-1.1.jar
wget -nv https://repo1.maven.org/maven2/com/sun/jersey/jersey-core/1.14/jersey-core-1.14.jar
wget -nv https://repo1.maven.org/maven2/com/sun/jersey/jersey-client/1.9/jersey-client-1.9.jar
wget -nv https://repo1.maven.org/maven2/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-yarn-server-applicationhistoryservice/2.6.0/hadoop-yarn-server-applicationhistoryservice-2.6.0.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-yarn-server-web-proxy/2.6.0/hadoop-yarn-server-web-proxy-2.6.0.jar
wget -nv https://repo1.maven.org/maven2/javax/inject/javax.inject/1/javax.inject-1.jar
wget -nv https://repo1.maven.org/maven2/aopalliance/aopalliance/1.0/aopalliance-1.0.jar
wget -nv https://repo1.maven.org/maven2/org/sonatype/sisu/inject/cglib/2.2.1-v20090111/cglib-2.2.1-v20090111.jar
wget -nv https://repo1.maven.org/maven2/asm/asm/3.2/asm-3.2.jar
wget -nv https://repo1.maven.org/maven2/com/sun/xml/bind/jaxb-impl/2.2.3-1/jaxb-impl-2.2.3-1.jar
wget -nv https://repo1.maven.org/maven2/javax/xml/stream/stax-api/1.0-2/stax-api-1.0-2.jar
wget -nv https://repo1.maven.org/maven2/javax/activation/activation/1.1/activation-1.1.jar
wget -nv https://repo1.maven.org/maven2/com/sun/jersey/jersey-server/1.14/jersey-server-1.14.jar
wget -nv https://repo1.maven.org/maven2/org/tukaani/xz/1.0/xz-1.0.jar
wget -nv https://repo1.maven.org/maven2/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar
wget -nv https://repo1.maven.org/maven2/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar
wget -nv https://repo1.maven.org/maven2/org/apache/ant/ant-launcher/1.9.1/ant-launcher-1.9.1.jar
wget -nv https://repo1.maven.org/maven2/com/thoughtworks/paranamer/paranamer/2.3/paranamer-2.3.jar
wget -nv https://repo1.maven.org/maven2/org/xerial/snappy/snappy-java/1.0.5/snappy-java-1.0.5.jar
wget -nv https://repo1.maven.org/maven2/javax/transaction/jta/1.1/jta-1.1.jar
wget -nv https://repo1.maven.org/maven2/org/antlr/stringtemplate/3.2.1/stringtemplate-3.2.1.jar
wget -nv https://repo1.maven.org/maven2/antlr/antlr/2.7.7/antlr-2.7.7.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hive/hive-ant/1.1.1/hive-ant-1.1.1.jar
wget -nv https://repo1.maven.org/maven2/org/antlr/ST4/4.0.4/ST4-4.0.4.jar
wget -nv https://repo1.maven.org/maven2/org/codehaus/groovy/groovy-all/2.1.6/groovy-all-2.1.6.jar
wget -nv https://repo1.maven.org/maven2/stax/stax-api/1.0.1/stax-api-1.0.1.jar
wget -nv https://repo1.maven.org/maven2/org/apache/velocity/velocity/1.5/velocity-1.5.jar
wget -nv https://repo1.maven.org/maven2/oro/oro/2.0.8/oro-2.0.8.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-common/2.7.4/hadoop-common-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-hdfs/2.7.4/hadoop-hdfs-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-mapreduce-client-app/2.7.4/hadoop-mapreduce-client-app-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-yarn-api/2.7.4/hadoop-yarn-api-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-mapreduce-client-core/2.7.4/hadoop-mapreduce-client-core-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-mapreduce-client-jobclient/2.7.4/hadoop-mapreduce-client-jobclient-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-annotations/2.7.4/hadoop-annotations-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/commons/commons-math3/3.1.1/commons-math3-3.1.1.jar
wget -nv https://repo1.maven.org/maven2/xmlenc/xmlenc/0.52/xmlenc-0.52.jar
wget -nv https://repo1.maven.org/maven2/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar
wget -nv https://repo1.maven.org/maven2/commons-net/commons-net/3.1/commons-net-3.1.jar
wget -nv https://repo1.maven.org/maven2/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar
wget -nv https://repo1.maven.org/maven2/org/mortbay/jetty/jetty-sslengine/6.1.26/jetty-sslengine-6.1.26.jar
wget -nv https://repo1.maven.org/maven2/log4j/log4j/1.2.17/log4j-1.2.17.jar
wget -nv https://repo1.maven.org/maven2/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar
wget -nv https://repo1.maven.org/maven2/org/slf4j/slf4j-api/1.7.10/slf4j-api-1.7.10.jar
wget -nv https://repo1.maven.org/maven2/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar
wget -nv https://repo1.maven.org/maven2/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar
wget -nv https://repo1.maven.org/maven2/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-auth/2.7.4/hadoop-auth-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/htrace/htrace-core/3.1.0-incubating/htrace-core-3.1.0-incubating.jar
wget -nv https://repo1.maven.org/maven2/commons-digester/commons-digester/1.8/commons-digester-1.8.jar
wget -nv https://repo1.maven.org/maven2/commons-beanutils/commons-beanutils-core/1.8.0/commons-beanutils-core-1.8.0.jar
wget -nv https://repo1.maven.org/maven2/commons-beanutils/commons-beanutils/1.7.0/commons-beanutils-1.7.0.jar
wget -nv https://repo1.maven.org/maven2/org/apache/directory/server/apacheds-kerberos-codec/2.0.0-M15/apacheds-kerberos-codec-2.0.0-M15.jar
wget -nv https://repo1.maven.org/maven2/org/apache/directory/server/apacheds-i18n/2.0.0-M15/apacheds-i18n-2.0.0-M15.jar
wget -nv https://repo1.maven.org/maven2/org/apache/directory/api/api-asn1-api/1.0.0-M20/api-asn1-api-1.0.0-M20.jar
wget -nv https://repo1.maven.org/maven2/org/apache/directory/api/api-util/1.0.0-M20/api-util-1.0.0-M20.jar
wget -nv https://repo1.maven.org/maven2/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar
wget -nv https://repo1.maven.org/maven2/org/slf4j/slf4j-log4j12/1.7.10/slf4j-log4j12-1.7.10.jar
wget -nv https://repo1.maven.org/maven2/io/netty/netty-all/4.0.23.Final/netty-all-4.0.23.Final.jar
wget -nv https://repo1.maven.org/maven2/xerces/xercesImpl/2.9.1/xercesImpl-2.9.1.jar
wget -nv https://repo1.maven.org/maven2/xml-apis/xml-apis/1.3.04/xml-apis-1.3.04.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-mapreduce-client-common/2.7.4/hadoop-mapreduce-client-common-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-mapreduce-client-shuffle/2.7.4/hadoop-mapreduce-client-shuffle-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-yarn-common/2.7.4/hadoop-yarn-common-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-yarn-client/2.7.4/hadoop-yarn-client-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-yarn-server-common/2.7.4/hadoop-yarn-server-common-2.7.4.jar
wget -nv https://repo1.maven.org/maven2/org/codehaus/jackson/jackson-jaxrs/1.9.13/jackson-jaxrs-1.9.13.jar
wget -nv https://repo1.maven.org/maven2/org/codehaus/jackson/jackson-xc/1.9.13/jackson-xc-1.9.13.jar
mkdir /opt/spark/hive_1.1.0_jars
mv *.jar /opt/spark/hive_1.1.0_jars/
22 changes: 22 additions & 0 deletions technologies/job/spark/spark-3.5/build.gradle.kts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2019-2021.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.bmuschko.gradle.docker.DockerRemoteApiPlugin
import com.saagie.technologies.SaagieTechnologiesGradlePlugin

apply<DockerRemoteApiPlugin>()
apply<SaagieTechnologiesGradlePlugin>()
5 changes: 5 additions & 0 deletions technologies/job/spark/spark-3.5/context.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
id: "3.5"
label: "3.5"
available: true
recommended: true
trustLevel: stable
4 changes: 4 additions & 0 deletions technologies/job/spark/spark-3.5/dockerInfo.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
image: saagie/spark
baseTag: 3.5
dynamicVersion: 1.139.0_SDKTECHNO-207
version: 3.5-1.139.0
54 changes: 54 additions & 0 deletions technologies/job/spark/spark-3.5/entrypoint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# echo commands to the terminal output
set -e

# Check whether there is a passwd entry for the container UID
myuid=$(id -u)
mygid=$(id -g)
# turn off -e for getent because it will return error code in anonymous uid case
set +e
uidentry=$(getent passwd $myuid)
set -e

# If there is no passwd entry for the container UID, attempt to create one
if [ -z "$uidentry" ] ; then
if [ -w /etc/passwd ] ; then
echo "$myuid:x:$myuid:$mygid:${SPARK_USER_NAME:-anonymous uid}:$SPARK_HOME:/bin/false" >> /etc/passwd
else
echo "Container ENTRYPOINT failed to add passwd entry for anonymous UID"
fi
fi

# BEGIN SAAGIE SPECIFIC CODE
mkdir -p /opt/spark/conf/
cat conf/*.conf > /opt/spark/conf/spark-defaults.conf
echo "spark.kubernetes.driver.label.io.saagie/spark-submit-pod-uid $SPARK_SUBMIT_POD_UID" >> /opt/spark/conf/spark-defaults.conf

if test -f main_script;
then
# parse content and if pyfiles extract minio url and inject it
if grep -q "\--py-files" main_script;
then
echo "spark.kubernetes.driverEnv.PYSPARK_FILES `awk -F '.*--py-files=| ' '{print $2}' main_script`" >> /opt/spark/conf/spark-defaults.conf
fi;
sh ./main_script;
else exec "$@"
fi;
# END SAAGIE SPECIFIC CODE
62 changes: 62 additions & 0 deletions technologies/job/spark/spark-3.5/image_test.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
schemaVersion: "2.0.0"

metadataTest:
env:
- key: LANG
value: "C.UTF-8"
- key: JAVA_HOME
value: "/usr/local/openjdk-11"
- key: SPARK_HOME
value: "/opt/spark"

fileExistenceTests:
- name: "entrypoint.sh"
path: "/opt/entrypoint.sh"
shouldExist: true
permissions: "-rwxr-xr-x"

- name: "kinit"
path: "/usr/bin/kinit"
shouldExist: true
permissions: "-rwxr-xr-x"

fileContentTests:
- name: "entrypoint.sh"
path: "/opt/entrypoint.sh"
expectedContents:
[
'mkdir -p /opt/spark/conf/',
'cat conf/\*\.conf > /opt/spark/conf/spark-defaults.conf',
'echo "spark\.kubernetes\.driver\.label\.io\.saagie/spark-submit-pod-uid \$SPARK_SUBMIT_POD_UID" >> /opt/spark/conf/spark-defaults.conf',
'sh \./main_script',
]

commandTests:
- name: "java installation"
command: "which"
args: ["java"]
expectedOutput: ["/usr/local/openjdk-11/bin/java"]

- name: "java version"
command: "java"
args: ["-version"]
expectedError: ['openjdk version "11*']

- name: "Workdir"
command: "pwd"
expectedOutput: ["/sandbox"]

- name: "Spark version"
command: "/opt/spark/bin/spark-submit"
args: ["--version"]
expectedError: ["version 3.5.*"]

- name: "spark-submit on path"
command: "which"
args: ["spark-submit"]
expectedOutput: ["/opt/spark/bin/spark-submit"]

- name: "unzip"
command: "which"
args: ["unzip"]
expectedOutput: ["/usr/bin/unzip"]
Loading

0 comments on commit 9a669cd

Please sign in to comment.