Skip to content

Commit

Permalink
Add support for Spark 3.5 (#16)
Browse files Browse the repository at this point in the history
  • Loading branch information
izhangzhihao authored Oct 1, 2023
1 parent 489d16a commit 65722b2
Show file tree
Hide file tree
Showing 9 changed files with 51 additions and 17 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ jobs:
- scalaVersion: 2.13
sparkVersion: 3.4.0
scalaCompt: 2.13.10
- scalaVersion: 2.13
sparkVersion: 3.5.0
scalaCompt: 2.13.12
steps:
- uses: actions/checkout@v2

Expand Down
3 changes: 3 additions & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@ jobs:
- scalaVersion: 2.13
sparkVersion: 3.4.0
scalaCompt: 2.13.10
- scalaVersion: 2.13
sparkVersion: 3.5.0
scalaCompt: 2.13.12
steps:
- uses: actions/checkout@v2

Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,4 @@ metastore_db
*.jar
~$*.xlsx
spark-warehouse
**/bin
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ The compatible versions of [Spark](http://spark.apache.org/) are as follows:
| 3.2.x | 2.12 / 2.13
| 3.3.x | 2.12 / 2.13
| 3.4.x | 2.12 / 2.13
| 3.5.x | 2.13



Expand Down
18 changes: 15 additions & 3 deletions core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,15 @@ dependencies {
implementation 'org.apache.commons:commons-lang3:3.10'
implementation 'info.picocli:picocli:4.6.3'

implementation "io.circe:circe-yaml_$scalaVersion:0.11.0-M1"
implementation "io.circe:circe-generic_$scalaVersion:0.12.0-M3"
implementation "io.circe:circe-generic-extras_$scalaVersion:0.12.0-M3"
if (sparkVersion.startsWith("3.5")) {
implementation "io.circe:circe-yaml_$scalaVersion:0.15.0-RC1"
implementation "io.circe:circe-generic_$scalaVersion:0.15.0-M1"
implementation "io.circe:circe-generic-extras_$scalaVersion:0.14.3"
} else {
implementation "io.circe:circe-yaml_$scalaVersion:0.11.0-M1"
implementation "io.circe:circe-generic_$scalaVersion:0.12.0-M3"
implementation "io.circe:circe-generic-extras_$scalaVersion:0.12.0-M3"
}
compileOnly 'org.projectlombok:lombok:1.18.22'
annotationProcessor 'org.projectlombok:lombok:1.18.22'
// https://mvnrepository.com/artifact/com.jcraft/jsch
Expand Down Expand Up @@ -83,6 +89,12 @@ dependencies {
strictly '2.14.2'
}
}
} else if (sparkVersion.startsWith("3.5")) {
implementation("com.fasterxml.jackson.module:jackson-module-scala_$scalaVersion") {
version {
strictly '2.15.2'
}
}
} else {
implementation("com.fasterxml.jackson.module:jackson-module-scala_$scalaVersion") {
version {
Expand Down
32 changes: 23 additions & 9 deletions spark/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,9 @@ dependencies {
implementation "org.apache.spark:spark-streaming_$scalaVersion:$sparkVersion"
implementation "org.apache.spark:spark-streaming-kafka-0-10_$scalaVersion:$sparkVersion"
implementation "org.apache.kafka:kafka-clients:2.0.0"
if (sparkVersion.startsWith("3.4")) {
if (sparkVersion.startsWith("3.5")) {
implementation "io.delta:delta-core_$scalaVersion:2.4.0"
} else if (sparkVersion.startsWith("3.4")) {
implementation "io.delta:delta-core_$scalaVersion:2.4.0"
} else if (sparkVersion.startsWith("3.3")) {
implementation "io.delta:delta-core_$scalaVersion:2.3.0"
Expand Down Expand Up @@ -147,6 +149,12 @@ dependencies {
strictly '2.14.2'
}
}
} else if (sparkVersion.startsWith("3.5")) {
implementation("com.fasterxml.jackson.core:jackson-databind") {
version {
strictly '2.15.2'
}
}
} else {
implementation("com.fasterxml.jackson.core:jackson-databind") {
version {
Expand Down Expand Up @@ -318,12 +326,16 @@ shadowJar {
} else if (sparkVersion.startsWith("3")) {
include dependency("org.apache.kudu:kudu-spark3_$scalaVersion:1.15.0")
}
include dependency("io.circe:circe-yaml_$scalaVersion:0.11.0-M1")
include dependency("io.circe:circe-core_$scalaVersion:0.12.0-M3")
include dependency("io.circe:circe-generic_$scalaVersion:0.12.0-M3")
include dependency("io.circe:circe-generic-extras_$scalaVersion:0.12.0-M3")
include dependency("org.typelevel:cats-core_$scalaVersion:2.0.0-M4")
include dependency("org.typelevel:cats-kernel_$scalaVersion:2.0.0-M4")
if (sparkVersion.startsWith("3.5")) {
addDeps("io.circe:circe-yaml_$scalaVersion:0.15.0-RC1")
addDeps("io.circe:circe-generic_$scalaVersion:0.15.0-M1")
addDeps("io.circe:circe-generic-extras_$scalaVersion:0.14.3")
} else {
addDeps("io.circe:circe-yaml_$scalaVersion:0.11.0-M1")
addDeps("io.circe:circe-generic_$scalaVersion:0.12.0-M3")
addDeps("io.circe:circe-generic-extras_$scalaVersion:0.12.0-M3")
}

include dependency("org.postgresql:postgresql:42.2.14")
include dependency("com.jcraft:jsch:0.1.55")
include dependency("com.cloudera:ImpalaJDBC41:2.6.3")
Expand Down Expand Up @@ -364,7 +376,9 @@ shadowJar {
include dependency('info.picocli:picocli:4.6.3')
include dependency("net.liftweb:lift-json_$scalaVersion:3.4.3")

if (sparkVersion.startsWith("3.4")) {
if (sparkVersion.startsWith("3.5")) {
addDeps("io.delta:delta-core_$scalaVersion:2.4.0")
} else if (sparkVersion.startsWith("3.4")) {
addDeps("io.delta:delta-core_$scalaVersion:2.4.0")
} else if (sparkVersion.startsWith("3.3")) {
addDeps("io.delta:delta-core_$scalaVersion:2.3.0")
Expand Down Expand Up @@ -603,7 +617,7 @@ if (scalaVersion.startsWith('2.11')) {
}
}
}
} else if (sparkVersion.startsWith("3.2") || sparkVersion.startsWith("3.3") || sparkVersion.startsWith("3.4")) {
} else if (sparkVersion.startsWith("3.2") || sparkVersion.startsWith("3.3") || sparkVersion.startsWith("3.4")|| sparkVersion.startsWith("3.5")) {
sourceSets {
main {
scala {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ object UDFExtension {
}))

//TODO: 可能不工作
val inputEncoders = Seq(Option(RowEncoder.apply(schema).resolveAndBind()))
//val inputEncoders = Seq(Option(RowEncoder.apply(schema).resolveAndBind()))

val returnType = ScalaReflection.schemaFor(methodSymbol.returnType).dataType
val fun = generateFunction(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ class DeltaLakeSpec extends DeltaSuit {
)

it("delta should works") {
if (spark.version.startsWith("2.3")) {
ETLLogger.error("Delta Lake does NOT support Spark 2.3.x")
if (spark.version.startsWith("2.3") || spark.version.startsWith("3.5")) {
ETLLogger.error("Delta Lake does NOT support Spark 2.3.x and Spark 3.5.x")
} else if (spark.version.startsWith("2.4") || spark.version.startsWith("3.0") || spark.version.startsWith("3.1")) {
ETLLogger.error("Delta Lake does not works well on Spark 2.4.x, " +
"CREATE TABLE USING delta is not supported by Spark before 3.0.0 and Delta Lake before 0.7.0.")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ class FlyDeltaSpec extends AnyFunSpec
with BeforeAndAfterEach {

it("should just run with delta") {
if (spark.version.startsWith("2.3")) {
ETLLogger.error("Delta Lake does NOT support Spark 2.3.x")
if (spark.version.startsWith("2.3") || spark.version.startsWith("3.5")) {
ETLLogger.error("Delta Lake does NOT support Spark 2.3.x and Spark 3.5.x")
} else if (spark.version.startsWith("2.4") || spark.version.startsWith("3.0") || spark.version.startsWith("3.1")) {
ETLLogger.error("Delta Lake does not works well on Spark 2.4.x, " +
"CREATE TABLE USING delta is not supported by Spark before 3.0.0 and Delta Lake before 0.7.0.")
Expand Down

0 comments on commit 65722b2

Please sign in to comment.