Skip to content

Commit 387d643

Browse files
clean up
Co-authored-by: Thomas Chow <[email protected]>
1 parent 51c1e78 commit 387d643

File tree

2 files changed

+4
-6
lines changed

2 files changed

+4
-6
lines changed

cloud_gcp/src/main/scala/ai/chronon/integrations/cloud_gcp/DelegatingBigQueryMetastoreCatalog.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,6 @@ class DelegatingBigQueryMetastoreCatalog extends CatalogExtension {
9595
Try { icebergCatalog.loadTable(ident) }
9696
.recover {
9797
case _ => {
98-
val connectorTable = connectorCatalog.loadTable(ident)
9998
val tId = ident.namespace().toList match {
10099
case database :: Nil => TableId.of(database, ident.name())
101100
case project :: database :: Nil => TableId.of(project, database, ident.name())
@@ -122,6 +121,7 @@ class DelegatingBigQueryMetastoreCatalog extends CatalogExtension {
122121
Map(TableCatalog.PROP_EXTERNAL -> "true", TableCatalog.PROP_LOCATION -> uri))
123122
}
124123
case _: StandardTableDefinition => {
124+
val connectorTable = connectorCatalog.loadTable(ident)
125125
DelegatingTable(connectorTable, Map(TableCatalog.PROP_EXTERNAL -> "false"))
126126
}
127127
case _ => throw new IllegalStateException(s"Cannot support table of type: ${table.getFriendlyName}")

spark/src/main/scala/ai/chronon/spark/TableUtils.scala

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ class TableUtils(@transient val sparkSession: SparkSession) extends Serializable
134134
}
135135

136136
def loadTable(tableName: String): DataFrame = {
137-
sparkSession.sql(s"select * from ${tableName}")
137+
sparkSession.sql(s"SELECT * FROM ${tableName}")
138138
// sparkSession.read.load(DataPointer.from(tableName, sparkSession))
139139
}
140140

@@ -741,16 +741,14 @@ class TableUtils(@transient val sparkSession: SparkSession) extends Serializable
741741
rangeWheres: Seq[String],
742742
fallbackSelects: Option[Map[String, String]] = None): DataFrame = {
743743

744-
val dp = DataPointer.from(table, sparkSession)
745744
var df = sparkSession.sql(s"SELECT * FROM ${table}")
745+
// val dp = DataPointer.from(table, sparkSession)
746746
// var df = sparkSession.read.load(dp)
747747

748748
val selects = QueryUtils.buildSelects(selectMap, fallbackSelects)
749749

750750
logger.info(s""" Scanning data:
751-
| table: ${dp.tableOrPath.green}
752-
| options: ${dp.readOptions}
753-
| format: ${dp.readFormat}
751+
| table: ${table.green}
754752
| selects:
755753
| ${selects.mkString("\n ").green}
756754
| wheres:

0 commit comments

Comments
 (0)