Skip to content

Commit 2c12753

Browse files
committed
rebase
1 parent 78a6e99 commit 2c12753

File tree

1 file changed

+53
-1
lines changed

1 file changed

+53
-1
lines changed
Lines changed: 53 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,58 @@
11
package ai.chronon.integrations.cloud_gcp.test
22

3+
import ai.chronon.integrations.cloud_gcp.GCPFormatProvider
4+
import ai.chronon.integrations.cloud_gcp.BQuery
5+
import ai.chronon.spark.{SparkSessionBuilder, TableUtils}
36
import org.scalatest.funsuite.AnyFunSuite
47
import org.scalatestplus.mockito.MockitoSugar
8+
import org.apache.spark.sql.SparkSession
9+
import org.junit.Assert.assertEquals
10+
import com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem
11+
import com.google.cloud.hadoop.fs.gcs.GoogleHadoopFS
12+
import com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystemConfiguration
13+
import com.google.cloud.hadoop.fs.gcs.HadoopCredentialsConfiguration
514

6-
class BigQueryCatalogTest extends AnyFunSuite with MockitoSugar {}
15+
class BigQueryCatalogTest extends AnyFunSuite with MockitoSugar {
16+
17+
lazy val spark: SparkSession = SparkSessionBuilder.build(
18+
"BigQuerySparkTest",
19+
local = true,
20+
additionalConfig = Some(
21+
Map(
22+
"spark.chronon.table.format_provider.class" -> classOf[GCPFormatProvider].getName,
23+
"hive.metastore.uris" -> "thrift://localhost:9083",
24+
"spark.chronon.partition.column" -> "c",
25+
"spark.hadoop.fs.gs.impl" -> classOf[GoogleHadoopFileSystem].getName,
26+
"spark.hadoop.fs.AbstractFileSystem.gs.impl" -> classOf[GoogleHadoopFS].getName,
27+
"spark.hadoop.google.cloud.auth.service.account.enable" -> true.toString,
28+
"spark.hadoop.fs.gs.impl" -> classOf[GoogleHadoopFileSystem].getName
29+
))
30+
)
31+
lazy val tableUtils: TableUtils = TableUtils(spark)
32+
33+
test("hive uris are set") {
34+
assertEquals("thrift://localhost:9083", spark.sqlContext.getConf("hive.metastore.uris"))
35+
}
36+
37+
test("verify dynamic classloading of GCP providers") {
38+
import org.junit.Assert.assertTrue
39+
assertTrue(tableUtils.tableReadFormat("data.sample_native") match {
40+
case BQuery(_) => true
41+
case _ => false
42+
})
43+
}
44+
45+
ignore("integration testing bigquery partitions") {
46+
// TODO(tchow): This test is ignored because it requires a running instance of the bigquery. Need to figure out stubbing locally.
47+
// to run this:
48+
// 1. Set up a tunnel to dataproc federation proxy:
49+
// gcloud compute ssh zipline-canary-cluster-m \
50+
// --zone us-central1-c \
51+
// -- -f -N -L 9083:localhost:9083
52+
// 2. enable this test and off you go.
53+
val externalPartitions = tableUtils.partitions("data.checkouts_parquet")
54+
println(externalPartitions)
55+
val nativePartitions = tableUtils.partitions("data.sample_native")
56+
println(nativePartitions)
57+
}
58+
}

0 commit comments

Comments
 (0)