Skip to content

Commit 46b6ccb

Browse files
bersprocketsMaxGekk
authored andcommitted
[SPARK-52692][SQL] Support caching the TIME type
### What changes were proposed in this pull request? The proposed changes are largely based on commit 0674327, which added caching support for TIMESTAMP_NTZ. This PR makes the same changes, except for the TIME type. ### Why are the changes needed? To support caching the TIME type, e.g.: ``` CACHE TABLE v1 AS SELECT TIME'22:00:00'; ``` ### Does this PR introduce _any_ user-facing change? No. The TIME type is not released yet. ### How was this patch tested? New unit test. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #51385 from bersprockets/time_cache. Authored-by: Bruce Robbins <[email protected]> Signed-off-by: Max Gekk <[email protected]>
1 parent 4b86268 commit 46b6ccb

File tree

4 files changed

+18
-4
lines changed

4 files changed

+18
-4
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnBuilder.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -185,7 +185,7 @@ private[columnar] object ColumnBuilder {
185185
case ByteType => new ByteColumnBuilder
186186
case ShortType => new ShortColumnBuilder
187187
case IntegerType | DateType | _: YearMonthIntervalType => new IntColumnBuilder
188-
case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType =>
188+
case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType | _: TimeType =>
189189
new LongColumnBuilder
190190
case FloatType => new FloatColumnBuilder
191191
case DoubleType => new DoubleColumnBuilder

sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/ColumnType.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -869,7 +869,8 @@ private[columnar] object ColumnType {
869869
case ByteType => BYTE
870870
case ShortType => SHORT
871871
case IntegerType | DateType | _: YearMonthIntervalType => INT
872-
case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType => LONG
872+
case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType | _: TimeType =>
873+
LONG
873874
case FloatType => FLOAT
874875
case DoubleType => DOUBLE
875876
case s: StringType => STRING(s)

sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/GenerateColumnAccessor.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ object GenerateColumnAccessor extends CodeGenerator[Seq[DataType], ColumnarItera
8282
case ByteType => classOf[ByteColumnAccessor].getName
8383
case ShortType => classOf[ShortColumnAccessor].getName
8484
case IntegerType | DateType | _: YearMonthIntervalType => classOf[IntColumnAccessor].getName
85-
case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType =>
85+
case LongType | TimestampType | TimestampNTZType | _: DayTimeIntervalType | _: TimeType =>
8686
classOf[LongColumnAccessor].getName
8787
case FloatType => classOf[FloatColumnAccessor].getName
8888
case DoubleType => classOf[DoubleColumnAccessor].getName

sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.sql
1919

2020
import java.io.{File, FilenameFilter}
2121
import java.nio.file.{Files, Paths}
22-
import java.time.{Duration, LocalDateTime, Period}
22+
import java.time.{Duration, LocalDateTime, LocalTime, Period}
2323
import java.util.concurrent.atomic.AtomicBoolean
2424

2525
import scala.collection.mutable.HashSet
@@ -1792,6 +1792,19 @@ class CachedTableSuite extends QueryTest with SQLTestUtils
17921792
}
17931793
}
17941794

1795+
test("SPARK-52692: Support cache/uncache table with Time type") {
1796+
val tableName = "timeCache"
1797+
withTable(tableName) {
1798+
sql(s"CACHE TABLE $tableName AS SELECT TIME'22:00:00'")
1799+
checkAnswer(spark.table(tableName), Row(LocalTime.parse("22:00:00")))
1800+
spark.table(tableName).queryExecution.withCachedData.collect {
1801+
case cached: InMemoryRelation =>
1802+
assert(cached.stats.sizeInBytes === 8)
1803+
}
1804+
sql(s"UNCACHE TABLE $tableName")
1805+
}
1806+
}
1807+
17951808
Seq(true, false).foreach { callerEnableAQE =>
17961809
test(s"SPARK-49982: AQE negative caching with in memory table cache - callerEnableAQE=" +
17971810
callerEnableAQE) {

0 commit comments

Comments
 (0)