Skip to content

Commit

Permalink
Fixed issue with avro r/w. Removed build for Spark 2.3.0. Added build…
Browse files Browse the repository at this point in the history
… for Spark 3.5.0. (#44)
  • Loading branch information
jverbus authored Feb 12, 2024
1 parent cc4289c commit 0ce97b6
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 11 deletions.
12 changes: 6 additions & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@ jobs:
strategy:
matrix:
include:
- scala-version: 2.11.12
spark-version: 2.3.0
- scala-version: 2.11.12
spark-version: 2.4.3
- scala-version: 2.12.18
Expand All @@ -37,12 +35,14 @@ jobs:
spark-version: 3.2.4
- scala-version: 2.12.18
spark-version: 3.3.3
- scala-version: 2.13.11
- scala-version: 2.13.12
spark-version: 3.3.3
- scala-version: 2.12.18
spark-version: 3.4.1
- scala-version: 2.13.11
spark-version: 3.4.1
spark-version: 3.4.2
- scala-version: 2.13.12
spark-version: 3.4.2
- scala-version: 2.13.12
spark-version: 3.5.0
if: "! contains(toJSON(github.event.commits.*.message), '[skip ci]')"
steps:
- name: Check out code
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ If you want to use the library with arbitrary Spark and Scala versions, you can
build command.

```bash
./gradlew build -PsparkVersion=3.4.1 -PscalaVersion=2.13.11
./gradlew build -PsparkVersion=3.4.1 -PscalaVersion=2.13.12
```

To force a rebuild of the library, you can use:
Expand Down
4 changes: 2 additions & 2 deletions isolation-forest/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@ plugins {
id 'scala'
}

def scalaVersion = findProperty("scalaVersion") ?: "2.11.8" // Scala 2.11.8 is the default Scala build version.
def scalaVersion = findProperty("scalaVersion") ?: "2.13.12"
println "Scala version: " + scalaVersion
// If scalaVersion == "2.11.8", then scalaVersionShort == "2.11".
def scalaVersionShort = VersionNumber.parse(scalaVersion).getMajor() + "." + VersionNumber.parse(scalaVersion).getMinor()

def sparkVersion = findProperty("sparkVersion") ?: "2.3.0" // Spark 2.3.0 is the default Spark build version.
def sparkVersion = findProperty("sparkVersion") ?: "3.4.1"
println "Spark version: " + sparkVersion

dependencies {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ private[isolationforest] case object IsolationForestModelReadWrite extends Loggi
val dataPath = new Path(path, "data").toString
logInfo(s"Loading IsolationForestModel tree data from path ${dataPath}")
val nodeData = spark.read
.format("com.databricks.spark.avro")
.format("avro")
.load(dataPath)
.as[EnsembleNodeData]
val rootNodesRDD = nodeData.rdd
Expand Down Expand Up @@ -260,7 +260,7 @@ private[isolationforest] case object IsolationForestModelReadWrite extends Loggi
spark.createDataFrame(nodeDataRDD)
.repartition(1)
.write
.format("com.databricks.spark.avro")
.format("avro")
.save(dataPath)
}

Expand Down

0 comments on commit 0ce97b6

Please sign in to comment.