diff --git a/.github/workflows/mvn-verify-check.yml b/.github/workflows/mvn-verify-check.yml
index b9b9e11c9..12bcb9d79 100644
--- a/.github/workflows/mvn-verify-check.yml
+++ b/.github/workflows/mvn-verify-check.yml
@@ -22,7 +22,9 @@ on:
jobs:
build:
runs-on: ubuntu-latest
-
+ strategy:
+ matrix:
+ spark-version: ['311', '320', '330', '341']
steps:
- uses: actions/checkout@v3
@@ -32,5 +34,5 @@ jobs:
distribution: adopt
java-version: 8
- - name: Run mvn verify
- run: cd core && mvn verify
+ - name: Run mvn verify with Spark ${{ matrix.spark-version }}
+ run: cd core && mvn -Dbuildver=${{ matrix.spark-version }} verify
diff --git a/core/pom.xml b/core/pom.xml
index 9c0987825..0a93b6cea 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -327,7 +327,7 @@
3.3.2
3.3.3-SNAPSHOT
3.4.0
- 3.4.1-SNAPSHOT
+ 3.4.1
3.5.0-SNAPSHOT
2.12
4.3.0
@@ -361,7 +361,7 @@
2.1.1
2.2.0
2.3.0
- 2.4.0rc1
+ 2.4.0
${delta10x.version}
1.8
3.11.0
diff --git a/core/src/main/scala/com/nvidia/spark/rapids/tool/profiling/AutoTuner.scala b/core/src/main/scala/com/nvidia/spark/rapids/tool/profiling/AutoTuner.scala
index 8af8dacb2..6a974cf21 100644
--- a/core/src/main/scala/com/nvidia/spark/rapids/tool/profiling/AutoTuner.scala
+++ b/core/src/main/scala/com/nvidia/spark/rapids/tool/profiling/AutoTuner.scala
@@ -763,8 +763,6 @@ class AutoTuner(
&& appInfoProvider.getRedundantReadSize > DEF_READ_SIZE_THRESHOLD) {
appendRecommendation("spark.rapids.filecache.enabled", "true")
appendComment("Enable file cache only if Spark local disks bandwidth is > 1 GB/s")
- } else {
- null
}
}
diff --git a/core/src/test/scala/com/nvidia/spark/rapids/tool/qualification/QualificationSuite.scala b/core/src/test/scala/com/nvidia/spark/rapids/tool/qualification/QualificationSuite.scala
index e20affa40..59c55a58e 100644
--- a/core/src/test/scala/com/nvidia/spark/rapids/tool/qualification/QualificationSuite.scala
+++ b/core/src/test/scala/com/nvidia/spark/rapids/tool/qualification/QualificationSuite.scala
@@ -1154,7 +1154,16 @@ class QualificationSuite extends BaseTestSuite {
try {
val lines = inputSource.getLines.toSeq
// 1 for header, 1 for values
- assert(lines.size == 6)
+
+ val expLinesSize =
+ if (ToolUtils.isSpark340OrLater()) {
+ 8
+ } else if (!ToolUtils.isSpark320OrLater()) {
+ 6
+ } else {
+ 7
+ }
+ assert(lines.size == expLinesSize)
assert(lines.head.contains("App ID,Unsupported Type,"))
assert(lines(1).contains("\"Read\",\"JSON\",\"Types not supported - bigint:int\""))
} finally {