Skip to content

Commit 7fcb67e

Browse files
update test
Co-authored-by: Thomas Chow <[email protected]>
1 parent 4982d1d commit 7fcb67e

File tree

5 files changed

+6
-10
lines changed

5 files changed

+6
-10
lines changed

cloud_gcp/BUILD.bazel

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ shared_deps = [
3535
maven_artifact("org.threeten:threetenbp"),
3636
maven_artifact("org.apache.kafka:kafka-clients"),
3737
maven_artifact("com.google.cloud.spark:spark-3.5-bigquery"),
38-
scala_artifact_with_suffix("com.google.cloud.spark:spark-bigquery"),
38+
# scala_artifact_with_suffix("com.google.cloud.spark:spark-bigquery"),
3939
# scala_artifact_with_suffix("com.google.cloud.spark:spark-bigquery-with-dependencies"),
4040
scala_artifact_with_suffix("org.apache.iceberg:iceberg-spark-runtime-3.5"),
4141
]
Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1 @@
11
com.google.cloud.spark.bigquery.v2.Spark35BigQueryTableProvider
2-
com.google.cloud.spark.bigquery.BigQueryRelationProvider

cloud_gcp/src/main/scala/ai/chronon/integrations/cloud_gcp/BigQueryFormat.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ case class BigQueryFormat(project: String, bqClient: BigQuery, override val opti
2020
extends Format {
2121
override def name: String = "bigquery"
2222

23-
private val bqFormat = "com.google.cloud.spark.bigquery.BigQueryRelationProvider"
23+
private val bqFormat = "com.google.cloud.spark.bigquery.v2.Spark35BigQueryTableProvider"
2424

2525
override def alterTableProperties(tableName: String,
2626
tableProperties: Map[String, String]): (String => Unit) => Unit = {

cloud_gcp/src/main/scala/ai/chronon/integrations/cloud_gcp/GcpFormatProvider.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ case class GcpFormatProvider(sparkSession: SparkSession) extends FormatProvider
6363
case _ => throw new IllegalStateException(s"Cannot support table of type: ${table.getFriendlyName}")
6464
}
6565

66-
private def format(tableName: String): scala.Option[Format] = {
66+
private[cloud_gcp] def format(tableName: String): scala.Option[Format] = {
6767

6868
val btTableIdentifier: TableId = BigQueryUtil.parseTableId(tableName)
6969
val table = scala.Option(bigQueryClient.getTable(btTableIdentifier.getDataset, btTableIdentifier.getTable))

cloud_gcp/src/test/scala/ai/chronon/integrations/cloud_gcp/BigQueryCatalogTest.scala

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -98,16 +98,13 @@ class BigQueryCatalogTest extends AnyFlatSpec with MockitoSugar {
9898

9999
it should "integration testing bigquery partitions" ignore {
100100
// TODO(tchow): This test is ignored because it requires a running instance of the bigquery. Need to figure out stubbing locally.
101-
// to run this:
102-
// 1. Set up a tunnel to dataproc federation proxy:
103-
// gcloud compute ssh zipline-canary-cluster-m \
104-
// --zone us-central1-c \
105-
// -- -f -N -L 9083:localhost:9083
106-
// 2. enable this test and off you go.
107101
val externalPartitions = tableUtils.partitions("data.checkouts_parquet")
108102
println(externalPartitions)
109103
val nativePartitions = tableUtils.partitions("data.sample_native")
110104
println(nativePartitions)
105+
val tblFormat = GcpFormatProvider(spark).format("data.purchases").get
106+
val partitions = tblFormat.partitions("data.purchases")(spark)
107+
assertEquals(partitions.flatMap(_.keys), Seq("ds"))
111108
}
112109

113110
it should "kryo serialization for ResolvingFileIO" in {

0 commit comments

Comments
 (0)