Skip to content

Commit c68b1f3

Browse files
fmt
Co-authored-by: Thomas Chow <[email protected]>
1 parent 65d666a commit c68b1f3

File tree

6 files changed

+17
-14
lines changed

6 files changed

+17
-14
lines changed

build.sbt

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -224,7 +224,6 @@ lazy val cloud_gcp = project
224224
libraryDependencies += "org.json4s" %% "json4s-native" % "3.7.0-M11",
225225
libraryDependencies += "org.json4s" %% "json4s-core" % "3.7.0-M11",
226226
libraryDependencies += "org.yaml" % "snakeyaml" % "2.3",
227-
// libraryDependencies += "com.google.cloud.spark.bigtable" %% "spark-bigtable" % "0.2.1",
228227
libraryDependencies += "com.google.cloud.bigtable" % "bigtable-hbase-2.x" % "2.14.2",
229228
libraryDependencies ++= avro,
230229
libraryDependencies ++= spark_all_provided,

cloud_gcp/src/main/scala/ai/chronon/integrations/cloud_gcp/DataprocSubmitter.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ object DataprocSubmitter {
102102
.map((jVal) => render(jVal))
103103
.map(compact)
104104
.map(parse(_).extract[SubmitterConf])
105-
.getOrElse(throw new IllegalArgumentException(s"Yaml conf not found or invalid yaml"))
105+
.getOrElse(throw new IllegalArgumentException("Yaml conf not found or invalid yaml"))
106106

107107
}
108108
}

cloud_gcp/src/test/scala/ai/chronon/integrations/cloud_gcp/BigQueryCatalogTest.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,13 @@ import ai.chronon.spark.TableUtils
55
import com.google.cloud.hadoop.fs.gcs.GoogleHadoopFS
66
import com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem
77
import com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration
8+
import com.google.cloud.hadoop.fs.gcs.HadoopConfigurationProperty
9+
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageFileSystem
810
import org.apache.spark.sql.SparkSession
911
import org.junit.Assert.assertEquals
1012
import org.junit.Assert.assertTrue
1113
import org.scalatest.funsuite.AnyFunSuite
1214
import org.scalatestplus.mockito.MockitoSugar
13-
import com.google.cloud.hadoop.gcsio.GoogleCloudStorageFileSystem
14-
import com.google.cloud.hadoop.fs.gcs.HadoopConfigurationProperty
1515

1616
class BigQueryCatalogTest extends AnyFunSuite with MockitoSugar {
1717

cloud_gcp/src/test/scala/ai/chronon/integrations/cloud_gcp/GCSFormatTest.scala

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11
package ai.chronon.integrations.cloud_gcp
22

33
import ai.chronon.spark.SparkSessionBuilder
4-
import org.apache.spark.sql.{Row, SaveMode, SparkSession}
4+
import org.apache.spark.sql.Row
5+
import org.apache.spark.sql.SaveMode
6+
import org.apache.spark.sql.SparkSession
57
import org.apache.spark.sql.functions._
6-
import org.apache.spark.sql.types.{StringType, StructField, StructType}
7-
8+
import org.apache.spark.sql.types.StringType
9+
import org.apache.spark.sql.types.StructField
10+
import org.apache.spark.sql.types.StructType
811
import org.junit.Assert.assertEquals
912
import org.scalatest.funsuite.AnyFunSuite
1013

spark/src/main/scala/ai/chronon/spark/Driver.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -49,11 +49,14 @@ import org.apache.spark.sql.streaming.StreamingQueryListener
4949
import org.apache.spark.sql.streaming.StreamingQueryListener.QueryProgressEvent
5050
import org.apache.spark.sql.streaming.StreamingQueryListener.QueryStartedEvent
5151
import org.apache.spark.sql.streaming.StreamingQueryListener.QueryTerminatedEvent
52+
import org.json4s._
53+
import org.json4s.jackson.JsonMethods._
5254
import org.rogach.scallop.ScallopConf
5355
import org.rogach.scallop.ScallopOption
5456
import org.rogach.scallop.Subcommand
5557
import org.slf4j.Logger
5658
import org.slf4j.LoggerFactory
59+
import org.yaml.snakeyaml.Yaml
5760

5861
import java.io.File
5962
import java.nio.file.Files
@@ -69,9 +72,6 @@ import scala.reflect.internal.util.ScalaClassLoader
6972
import scala.util.Failure
7073
import scala.util.Success
7174
import scala.util.Try
72-
import org.json4s._
73-
import org.json4s.jackson.JsonMethods._
74-
import org.yaml.snakeyaml.Yaml
7575

7676
// useful to override spark.sql.extensions args - there is no good way to unset that conf apparently
7777
// so we give it dummy extensions

spark/src/test/scala/ai/chronon/spark/test/OfflineSubcommandTest.scala

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,17 +18,18 @@ package ai.chronon.spark.test
1818

1919
import ai.chronon.spark.Driver.OfflineSubcommand
2020
import org.apache.spark.sql.SparkSession
21+
import org.json4s._
22+
import org.json4s.jackson.JsonMethods._
2123
import org.junit.Assert.assertEquals
2224
import org.junit.Assert.assertTrue
2325
import org.junit.Test
2426
import org.rogach.scallop.ScallopConf
25-
import org.json4s._
26-
import org.json4s.jackson.JsonMethods._
2727
import org.yaml.snakeyaml.Yaml
28-
import collection.JavaConverters._
2928

3029
import scala.io.Source
3130

31+
import collection.JavaConverters._
32+
3233
class OfflineSubcommandTest {
3334

3435
class TestArgs(args: Array[String]) extends ScallopConf(args) with OfflineSubcommand {
@@ -83,7 +84,7 @@ class OfflineSubcommandTest {
8384
.map((jVal) => render(jVal))
8485
.map(compact)
8586
.map(parse(_).extract[Map[String, String]])
86-
.getOrElse(throw new IllegalArgumentException(s"Yaml conf not found or invalid yaml"))
87+
.getOrElse(throw new IllegalArgumentException("Yaml conf not found or invalid yaml"))
8788

8889
val confKey = "spark.chronon.table.format_provider.class"
8990
assertEquals(confs.get(confKey), sparkSession.conf.getOption(confKey))

0 commit comments

Comments
 (0)