1
- import sbt .Keys .{libraryDependencies , * }
1
+ import sbt .Keys .{dependencyOverrides , libraryDependencies , * }
2
2
import sbt .{Test , * }
3
3
import sbt .Tests .{Group , SubProcess }
4
4
5
+ import scala .collection .Seq
6
+
5
7
// Notes about a few dependencies - and how we land on versions
6
8
// Our approach is to use the latest stable versions of deps as of today (July 24) and pin to them for a few years
7
9
// this should simplify our build setup, speed up CI and deployment
@@ -64,10 +66,10 @@ val spark_all = Seq(
64
66
" org.apache.spark" %% " spark-hive" ,
65
67
" org.apache.spark" %% " spark-core" ,
66
68
" org.apache.spark" %% " spark-streaming" ,
67
- " org.apache.spark" %% " spark-sql-kafka-0-10" ,
69
+ " org.apache.spark" %% " spark-sql-kafka-0-10"
68
70
).map(_ % spark_3_5) :+ (
69
- " javax.servlet" % " javax.servlet-api" % " 3.1.0" ,
70
- )
71
+ " javax.servlet" % " javax.servlet-api" % " 3.1.0" ,
72
+ )
71
73
val spark_all_provided = spark_all.map(_ % " provided" )
72
74
73
75
val jackson = Seq (
@@ -106,10 +108,10 @@ lazy val aggregator = project
106
108
.dependsOn(api.% (" compile->compile;test->test" ))
107
109
.settings(
108
110
libraryDependencies ++= Seq (
109
- " org.apache.datasketches" % " datasketches-java" % " 6.1.0" ,
110
- " com.google.code.gson" % " gson" % " 2.10.1"
111
- ),
112
- libraryDependencies ++= spark_sql_provided,
111
+ " org.apache.datasketches" % " datasketches-java" % " 6.1.0" ,
112
+ " com.google.code.gson" % " gson" % " 2.10.1"
113
+ ),
114
+ libraryDependencies ++= spark_sql_provided
113
115
)
114
116
115
117
// todo add a service module with spark as a hard dependency
@@ -122,7 +124,7 @@ lazy val online = project
122
124
" com.datadoghq" % " java-dogstatsd-client" % " 4.4.1" ,
123
125
" org.rogach" %% " scallop" % " 5.1.0" ,
124
126
" net.jodah" % " typetools" % " 0.6.3" ,
125
- " com.github.ben-manes.caffeine" % " caffeine" % " 3.1.8" ,
127
+ " com.github.ben-manes.caffeine" % " caffeine" % " 3.1.8"
126
128
),
127
129
libraryDependencies ++= jackson,
128
130
libraryDependencies ++= spark_all.map(_ % " provided" ),
@@ -158,15 +160,20 @@ lazy val spark = project
158
160
libraryDependencies ++= spark_all_provided,
159
161
libraryDependencies ++= spark_all.map(_ % " test" ),
160
162
libraryDependencies += " jakarta.servlet" % " jakarta.servlet-api" % " 4.0.3" ,
161
- libraryDependencies += " com.google.guava" % " guava" % " 33.3.1-jre"
162
- )
163
+ libraryDependencies += " com.google.guava" % " guava" % " 33.3.1-jre" ,
164
+ // Ensure consistent versions of logging libraries
165
+ libraryDependencies += " ch.qos.logback" % " logback-classic" % " 1.2.13" ,
166
+ dependencyOverrides ++= Seq (
167
+ " org.slf4j" % " slf4j-api" % " 1.7.36"
168
+ )
169
+ )
163
170
164
171
lazy val flink = project
165
172
.dependsOn(aggregator.% (" compile->compile;test->test" ), online)
166
173
.settings(
167
174
libraryDependencies ++= spark_all,
168
175
libraryDependencies ++= flink_all,
169
- libraryDependencies += " org.apache.flink" % " flink-test-utils" % flink_1_17 % Test excludeAll(
176
+ libraryDependencies += " org.apache.flink" % " flink-test-utils" % flink_1_17 % Test excludeAll (
170
177
ExclusionRule (organization = " org.apache.logging.log4j" , name = " log4j-api" ),
171
178
ExclusionRule (organization = " org.apache.logging.log4j" , name = " log4j-core" ),
172
179
ExclusionRule (organization = " org.apache.logging.log4j" , name = " log4j-slf4j-impl" )
@@ -258,7 +265,7 @@ lazy val hub = (project in file("hub"))
258
265
// Ensure consistent versions of logging libraries
259
266
dependencyOverrides ++= Seq (
260
267
" org.slf4j" % " slf4j-api" % " 1.7.36" ,
261
- " ch.qos.logback" % " logback-classic" % " 1.2.11 "
268
+ " ch.qos.logback" % " logback-classic" % " 1.2.13 "
262
269
)
263
270
)
264
271
0 commit comments