@@ -27,7 +27,7 @@ val sparkShortVersion: Def.Initialize[String] = Def.setting {
27
27
}
28
28
29
29
val jacksonVersion : Def .Initialize [String ] = Def .setting {
30
- Option (System .getenv(" JACKSON_VERSION" )).getOrElse(" 2.12.7 " )
30
+ Option (System .getenv(" JACKSON_VERSION" )).getOrElse(" 2.16.2 " )
31
31
}
32
32
33
33
val sparkTestVersion : Def .Initialize [String ] = Def .setting {
@@ -58,14 +58,14 @@ lazy val excludeJacksonModule = ExclusionRule(organization = "com.fasterxml.
58
58
lazy val excludeAWS = ExclusionRule (organization = " com.amazonaws" )
59
59
60
60
libraryDependencies ++= Seq (
61
- " org.apache.spark" %% " spark-core" % sparkVersion.value % " provided" ,
62
- " org.apache.spark" %% " spark-sql" % sparkVersion.value % " provided" ,
63
- " org.apache.spark" %% " spark-mllib" % sparkVersion.value % " provided" ,
64
- " org.apache.spark" %% " spark-hive" % sparkVersion.value % " provided" ,
65
- " org.apache.spark" %% " spark-sql-kafka-0-10" % sparkVersion.value % " provided" ,
66
- " org.apache.spark" %% " spark-streaming" % sparkVersion.value % " provided" ,
67
- " org.apache.spark" %% " spark-avro" % sparkVersion.value % " provided" ,
68
- " org.apache.spark" %% " spark-hadoop-cloud" % sparkVersion.value % " provided" excludeAll (excludeAWS),
61
+ " org.apache.spark" %% " spark-core" % sparkVersion.value % " provided" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
62
+ " org.apache.spark" %% " spark-sql" % sparkVersion.value % " provided" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
63
+ " org.apache.spark" %% " spark-mllib" % sparkVersion.value % " provided" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
64
+ " org.apache.spark" %% " spark-hive" % sparkVersion.value % " provided" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
65
+ " org.apache.spark" %% " spark-sql-kafka-0-10" % sparkVersion.value % " provided" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
66
+ " org.apache.spark" %% " spark-streaming" % sparkVersion.value % " provided" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
67
+ " org.apache.spark" %% " spark-avro" % sparkVersion.value % " provided" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
68
+ " org.apache.spark" %% " spark-hadoop-cloud" % sparkVersion.value % " provided" excludeAll (excludeAWS) excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
69
69
" com.holdenkarau" %% " spark-testing-base" % sparkTestVersion.value % " test" excludeAll (excludeSpark),
70
70
" com.github.scopt" %% " scopt" % " 4.1.0" ,
71
71
" org.scala-lang" % " scala-library" % scalaVersion.value,
@@ -86,19 +86,19 @@ libraryDependencies ++= Seq(
86
86
" com.redislabs" %% " spark-redis" % " 3.1.0" ,
87
87
" org.apache.kafka" %% " kafka" % " 3.9.0" ,
88
88
" za.co.absa" %% " abris" % " 3.2.2" % " provided" excludeAll (excludeAvro, excludeSpark),
89
- " org.apache.hudi" %% " hudi-spark-bundle" % " 0.10.0" % " provided" ,
90
- " org.apache.parquet" % " parquet-avro" % " 1.15.0" % " provided" ,
89
+ " org.apache.hudi" %% " hudi-spark-bundle" % " 0.10.0" % " provided" ,
90
+ " org.apache.parquet" % " parquet-avro" % " 1.15.0" % " provided" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
91
91
" com.amazon.deequ" % " deequ" % (" 2.0.9-spark-" + sparkShortVersion.value) excludeAll (excludeSpark, excludeScalanlp),
92
- " org.apache.avro" % " avro" % " 1.12.0" % " provided" ,
93
- " com.databricks" %% " spark-xml" % " 0.18.0" ,
94
- " com.outr" %% " hasher" % " 1.2.2" ,
95
- " org.mongodb.spark" %% " mongo-spark-connector" % " 10.4.1" ,
96
- " mysql" % " mysql-connector-java" % " 8.0.33" ,
97
- " org.apache.logging.log4j" % " log4j-api" % " 2.24.3" % " provided" ,
98
- " org.apache.logging.log4j" % " log4j-core" % " 2.24.3" % " provided" ,
99
- " org.apache.logging.log4j" % " log4j-slf4j-impl" % " 2.24.3" % " provided" ,
100
- " org.postgresql" % " postgresql" % " 42.7.5" ,
101
- " io.delta" %% " delta-core" % " 2.4.0" ,
92
+ " org.apache.avro" % " avro" % " 1.12.0" % " provided" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
93
+ " com.databricks" %% " spark-xml" % " 0.18.0" ,
94
+ " com.outr" %% " hasher" % " 1.2.2" ,
95
+ " org.mongodb.spark" %% " mongo-spark-connector" % " 10.4.1" ,
96
+ " mysql" % " mysql-connector-java" % " 8.0.33" ,
97
+ " org.apache.logging.log4j" % " log4j-api" % " 2.24.3" % " provided" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
98
+ " org.apache.logging.log4j" % " log4j-core" % " 2.24.3" % " provided" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
99
+ " org.apache.logging.log4j" % " log4j-slf4j-impl" % " 2.24.3" % " provided" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule) ,
100
+ " org.postgresql" % " postgresql" % " 42.7.5" ,
101
+ " io.delta" %% " delta-core" % " 2.4.0" ,
102
102
" io.vertx" % " vertx-json-schema" % " 4.5.12" excludeAll (excludeJacksonCore, excludeJacksonDataFormat, excludeJacksonDataType, excludeJacksonModule),
103
103
" com.google.guava" % " guava" % " 25.1-jre" ,
104
104
" org.apache.sedona" %% (" sedona-spark-" + sparkShortVersion.value) % " 1.6.1" excludeAll (excludeSpark),
0 commit comments