@@ -17,6 +17,8 @@ import sbt.Tests.{Group, SubProcess}
17
17
// java incompatibility is probably not an issue, hopefully we can cross build flink 1.17 & 1.18 without code changes
18
18
19
19
lazy val scala_2_12 = " 2.12.18"
20
+ lazy val scala_2_13 = " 2.13.14"
21
+
20
22
// spark deps: https://mvnrepository.com/artifact/org.apache.spark/spark-core_2.12/3.5.0
21
23
// avro 1.11.2, jackson: 2.15.2
22
24
lazy val spark_3_5 = " 3.5.1"
@@ -48,7 +50,7 @@ inThisBuild(
48
50
lazy val supportedVersions = List (scala_2_12) // List(scala211, scala212, scala213)
49
51
50
52
lazy val root = (project in file(" ." ))
51
- .aggregate(api, aggregator, online, spark, flink, cloud_gcp)
53
+ .aggregate(api, aggregator, online, spark, flink, cloud_gcp, hub )
52
54
.settings(name := " chronon" )
53
55
54
56
val spark_sql = Seq (
@@ -91,7 +93,6 @@ lazy val api = project
91
93
}.taskValue,
92
94
crossScalaVersions := supportedVersions,
93
95
libraryDependencies ++= spark_sql_provided,
94
-
95
96
libraryDependencies ++= Seq (
96
97
" org.apache.thrift" % " libthrift" % " 0.13.0" , // cannot upgrade this without breaking compatibility
97
98
" org.scala-lang" % " scala-reflect" % scalaVersion.value,
@@ -100,16 +101,16 @@ lazy val api = project
100
101
" org.scalatest" %% " scalatest" % " 3.2.19" % " test" ,
101
102
" org.scalatestplus" %% " mockito-3-4" % " 3.2.10.0" % " test"
102
103
)
103
- )
104
+ )
104
105
105
106
lazy val aggregator = project
106
107
.dependsOn(api.% (" compile->compile;test->test" ))
107
108
.settings(
108
109
libraryDependencies ++= Seq (
109
- " com.yahoo.datasketches" % " sketches-core" % " 0.13.4" ,
110
- " com.google.code.gson" % " gson" % " 2.10.1"
111
- ),
112
- libraryDependencies ++= spark_sql_provided,
110
+ " com.yahoo.datasketches" % " sketches-core" % " 0.13.4" ,
111
+ " com.google.code.gson" % " gson" % " 2.10.1"
112
+ ),
113
+ libraryDependencies ++= spark_sql_provided
113
114
)
114
115
115
116
// todo add a service module with spark as a hard dependency
@@ -129,7 +130,6 @@ lazy val online = project
129
130
libraryDependencies ++= flink_all.map(_ % " provided" )
130
131
)
131
132
132
-
133
133
lazy val tmp_warehouse = " /tmp/chronon/"
134
134
def cleanSparkMeta (): Unit = {
135
135
Folder .clean(file(" ." ) / " spark" / " spark-warehouse" ,
@@ -158,7 +158,7 @@ lazy val spark = project
158
158
crossScalaVersions := supportedVersions,
159
159
libraryDependencies ++= spark_all_provided,
160
160
libraryDependencies ++= spark_all.map(_ % " test" ),
161
- libraryDependencies += " jakarta.servlet" % " jakarta.servlet-api" % " 4.0.3" ,
161
+ libraryDependencies += " jakarta.servlet" % " jakarta.servlet-api" % " 4.0.3"
162
162
)
163
163
164
164
lazy val flink = project
@@ -177,12 +177,24 @@ lazy val cloud_gcp = project
177
177
libraryDependencies ++= spark_all
178
178
)
179
179
180
+ lazy val hub = (project in file(" hub" ))
181
+ .enablePlugins(PlayScala )
182
+ .settings(
183
+ name := " hub" ,
184
+ // play dropped support for Scala 2.12 in release 2.9
185
+ scalaVersion := scala_2_13,
186
+ libraryDependencies ++= Seq (
187
+ guice,
188
+ " org.scalatestplus.play" %% " scalatestplus-play" % " 5.1.0" % Test
189
+ )
190
+ )
191
+
180
192
ThisBuild / assemblyMergeStrategy := {
181
193
case PathList (" META-INF" , " MANIFEST.MF" ) => MergeStrategy .discard
182
- case PathList (" META-INF" , _* ) => MergeStrategy .filterDistinctLines
194
+ case PathList (" META-INF" , _* ) => MergeStrategy .filterDistinctLines
183
195
case " plugin.xml" => MergeStrategy .last
184
- case PathList (" com" , " fasterxml" , _* ) => MergeStrategy .last
185
- case PathList (" com" , " google" , _* ) => MergeStrategy .last
196
+ case PathList (" com" , " fasterxml" , _* ) => MergeStrategy .last
197
+ case PathList (" com" , " google" , _* ) => MergeStrategy .last
186
198
case _ => MergeStrategy .first
187
199
}
188
200
exportJars := true
0 commit comments