@@ -3,13 +3,13 @@ package ai.chronon.integrations.cloud_gcp
3
3
import ai .chronon .spark .Format
4
4
import ai .chronon .spark .FormatProvider
5
5
import ai .chronon .spark .Hive
6
- import org .apache .spark .sql .SparkSession
7
- import com .google .cloud .bigquery .connector .common .BigQueryUtil
8
6
import com .google .cloud .bigquery .BigQueryOptions
9
- import com .google .cloud .spark .bigquery .repackaged .com .google .cloud .bigquery .TableId
10
- import com .google .cloud .bigquery .{TableId => BTableId }
11
7
import com .google .cloud .bigquery .ExternalTableDefinition
12
8
import com .google .cloud .bigquery .StandardTableDefinition
9
+ import com .google .cloud .bigquery .connector .common .BigQueryUtil
10
+ import com .google .cloud .bigquery .{TableId => BTableId }
11
+ import com .google .cloud .spark .bigquery .repackaged .com .google .cloud .bigquery .TableId
12
+ import org .apache .spark .sql .SparkSession
13
13
14
14
case class GcpFormatProvider (sparkSession : SparkSession ) extends FormatProvider {
15
15
lazy val bigQueryClient = BigQueryOptions .getDefaultInstance.getService
@@ -24,8 +24,8 @@ case class GcpFormatProvider(sparkSession: SparkSession) extends FormatProvider
24
24
tableOpt match {
25
25
case Some (table) => {
26
26
table.getDefinition match {
27
- case ExternalTableDefinition => BQuery (unshadedTI.getProject)
28
- case StandardTableDefinition => GCS (unshadedTI.getProject)
27
+ case _ : ExternalTableDefinition => BQuery (unshadedTI.getProject)
28
+ case _ : StandardTableDefinition => GCS (unshadedTI.getProject)
29
29
}
30
30
}
31
31
case None => Hive
0 commit comments