Skip to content

Commit 9269a22

Browse files
wip
Co-authored-by: Thomas Chow <[email protected]>
1 parent 900a7a1 commit 9269a22

File tree

2 files changed

+10
-4
lines changed

2 files changed

+10
-4
lines changed

api/python/ai/chronon/resources/gcp/zipline-cli-install.sh

100644100755
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,8 @@ done
4949

5050
gcloud storage cp "${ARTIFACT_PREFIX%/}/release/$VERSION/wheels/zipline_ai-$VERSION-py3-none-any.whl" .
5151

52+
trap 'rm -f ./zipline_ai-$VERSION-py3-none-any.whl' EXIT
53+
5254
pip3 uninstall zipline-ai
5355

5456
pip3 install ./zipline_ai-$VERSION-py3-none-any.whl
55-
56-
trap 'rm -f ./zipline_ai-$VERSION-py3-none-any.whl' EXIT

cloud_gcp/src/main/scala/ai/chronon/integrations/cloud_gcp/DelegatingBigQueryMetastoreCatalog.scala

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import java.util
2424
import scala.jdk.CollectionConverters._
2525
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
2626
import scala.util.{Failure, Success, Try}
27+
import org.slf4j.{Logger, LoggerFactory}
2728

2829
/** Galactus catalog that allows us to interact with BigQuery metastore as a spark catalog. This allows for
2930
* querying of a variety of table types directly in spark sql or the dataframe api.
@@ -47,6 +48,8 @@ class DelegatingBigQueryMetastoreCatalog extends TableCatalog with SupportsNames
4748
@transient private lazy val icebergCatalog: SparkCatalog = new SparkCatalog()
4849
@transient private lazy val connectorCatalog: BigQueryCatalog = new BigQueryCatalog()
4950

51+
@transient protected lazy val logger: Logger = LoggerFactory.getLogger(getClass)
52+
5053
private var catalogName: String =
5154
null // This corresponds to `spark_catalog in `spark.sql.catalog.spark_catalog`. This is necessary for spark to correctly choose which implementation to use.
5255

@@ -118,14 +121,17 @@ class DelegatingBigQueryMetastoreCatalog extends TableCatalog with SupportsNames
118121
)
119122
fileBasedTable
120123
}
121-
case _: StandardTableDefinition => {
124+
case stTable: StandardTableDefinition => {
122125
//todo(tchow): Support partitioning
123126

124127
// Hack because there's a bug in the BigQueryCatalog where they ignore the projectId.
125128
// See: https://github.com/GoogleCloudDataproc/spark-bigquery-connector/pull/1340
126129
// ideally it should be the below:
127130
// val connectorTable = connectorCatalog.loadTable(ident)
128-
connectorCatalog.loadTable(Identifier.of(Array(tId.getDataset), tId.getTable))
131+
val nativeTable = connectorCatalog.loadTable(Identifier.of(Array(tId.getDataset), tId.getTable))
132+
logger.info("Table columns:")
133+
logger.info(f"${nativeTable.columns()}")
134+
nativeTable
129135
}
130136
case _ => throw new IllegalStateException(s"Cannot support table of type: ${table.getDefinition}")
131137
}

0 commit comments

Comments
 (0)