File tree Expand file tree Collapse file tree 2 files changed +26
-29
lines changed Expand file tree Collapse file tree 2 files changed +26
-29
lines changed Original file line number Diff line number Diff line change @@ -491,6 +491,32 @@ function prepare_conda_env() {
491491}
492492
493493function prepare_common_env() {
494+ SPARK_NLP_VERSION="3.2.1" # Must include subminor version here
495+ SPARK_JARS_DIR=/usr/lib/spark/jars
496+ SPARK_CONF_DIR='/etc/spark/conf'
497+ SPARK_BIGQUERY_VERSION="$(get_metadata_attribute spark-bigquery-connector-version "${DEFAULT_SPARK_BIGQUERY_VERSION:-0.22.0}")"
498+ SPARK_VERSION="$(spark-submit --version 2>&1 | sed -n 's/.*version[[:blank:]]\+\([0-9]\+\.[0-9]\).*/\1/p' | head -n1)"
499+
500+ readonly SPARK_VERSION SPARK_BIGQUERY_VERSION SPARK_CONF_DIR SPARK_JARS_DIR SPARK_NLP_VERSION
501+
502+ if version_lt "${SPARK_VERSION}" "3.1" || \
503+ version_ge "${SPARK_VERSION}" "4.0" ; then
504+ echo "Error: Your Spark version is not supported. Please upgrade Spark to one of the supported versions."
505+ exit 1
506+ fi
507+
508+ # Detect dataproc image version
509+ if (! test -v DATAPROC_IMAGE_VERSION) ; then
510+ if test -v DATAPROC_VERSION ; then
511+ DATAPROC_IMAGE_VERSION="${DATAPROC_VERSION}"
512+ else
513+ if version_lt "${SPARK_VERSION}" "3.2" ; then DATAPROC_IMAGE_VERSION="2.0"
514+ elif version_lt "${SPARK_VERSION}" "3.4" ; then DATAPROC_IMAGE_VERSION="2.1"
515+ elif version_lt "${SPARK_VERSION}" "3.6" ; then DATAPROC_IMAGE_VERSION="2.2"
516+ else echo "Unknown dataproc image version" ; exit 1 ; fi
517+ fi
518+ fi
519+
494520 # Verify OS compatability and Secure boot state
495521 check_os
496522 check_secure_boot
Original file line number Diff line number Diff line change @@ -41,32 +41,3 @@ function install_spark_rapids() {
4141 "${pkg_bucket}/rapids-4-spark_${scala_ver}/${SPARK_RAPIDS_VERSION}/${jar_basename}" \
4242 "/usr/lib/spark/jars/${jar_basename}"
4343}
44-
45- function prepare_spark_env() {
46- SPARK_NLP_VERSION="3.2.1" # Must include subminor version here
47- SPARK_JARS_DIR=/usr/lib/spark/jars
48- SPARK_CONF_DIR='/etc/spark/conf'
49- SPARK_BIGQUERY_VERSION="$(get_metadata_attribute spark-bigquery-connector-version "${DEFAULT_SPARK_BIGQUERY_VERSION:-0.22.0}")"
50- SPARK_VERSION="$(spark-submit --version 2>&1 | sed -n 's/.*version[[:blank:]]\+\([0-9]\+\.[0-9]\).*/\1/p' | head -n1)"
51-
52- readonly SPARK_VERSION SPARK_BIGQUERY_VERSION SPARK_CONF_DIR SPARK_JARS_DIR SPARK_NLP_VERSION
53-
54- if version_lt "${SPARK_VERSION}" "3.1" || \
55- version_ge "${SPARK_VERSION}" "4.0" ; then
56- echo "Error: Your Spark version is not supported. Please upgrade Spark to one of the supported versions."
57- exit 1
58- fi
59-
60- # Detect dataproc image version
61- if (! test -v DATAPROC_IMAGE_VERSION) ; then
62- if test -v DATAPROC_VERSION ; then
63- DATAPROC_IMAGE_VERSION="${DATAPROC_VERSION}"
64- else
65- if version_lt "${SPARK_VERSION}" "3.2" ; then DATAPROC_IMAGE_VERSION="2.0"
66- elif version_lt "${SPARK_VERSION}" "3.4" ; then DATAPROC_IMAGE_VERSION="2.1"
67- elif version_lt "${SPARK_VERSION}" "3.6" ; then DATAPROC_IMAGE_VERSION="2.2"
68- else echo "Unknown dataproc image version" ; exit 1 ; fi
69- fi
70- fi
71-
72- }
You can’t perform that action at this time.
0 commit comments