Skip to content

[SPARK-52617][SQL]Cast TIME to/from TIMESTAMP_NTZ #51381

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 40 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 19 commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
8799de5
[SPARK-52617][SQL]Cast TIME to/from TIMESTAMP_NTZ
subham611 Jul 5, 2025
c37bbf0
Adds time to TimestampNTZType conversion
subham611 Jul 5, 2025
352aacb
Fix linting
subham611 Jul 5, 2025
7faed87
Remove unused import
subham611 Jul 5, 2025
11c3a26
Adds can cast
subham611 Jul 5, 2025
4be2758
Fix import
subham611 Jul 5, 2025
e020fae
Resolved ambiguous import
subham611 Jul 5, 2025
5b9a10e
Fix UT failure
subham611 Jul 5, 2025
d6f4424
Enable casting in ansi mode
subham611 Jul 5, 2025
f60c6cb
Calculate current day outside buildCast
subham611 Jul 5, 2025
828d214
Adds RewriteTimeCastToTimestampNTZ rule
subham611 Jul 6, 2025
90482d6
Fix additional change
subham611 Jul 6, 2025
a9d1bdd
Fix unused import
subham611 Jul 6, 2025
fdc9b92
Fix UT
subham611 Jul 6, 2025
b919e4e
Fix UT failure
subham611 Jul 6, 2025
e2ef8af
Fix UT
subham611 Jul 7, 2025
0bada28
Fix UT
subham611 Jul 7, 2025
2a5e9ad
Move to resolver
subham611 Jul 7, 2025
7d77c1f
Revert unwanted changes
subham611 Jul 7, 2025
cb6ad55
Resolve comment
subham611 Jul 7, 2025
cd503a5
Delete resolver
subham611 Jul 7, 2025
8bc58da
Fix import
subham611 Jul 7, 2025
8a30b2a
Add back RewriteTimeCastToTimestampNTZ rule
subham611 Jul 8, 2025
fd1aef3
Fix UT
subham611 Jul 8, 2025
5c6c97f
Resolve comments
subham611 Jul 8, 2025
6169ea4
Update sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/anal…
SubhamSinghal Jul 8, 2025
29ca860
Update sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util…
SubhamSinghal Jul 8, 2025
011b254
Merge master
subham611 Jul 8, 2025
ca02825
Resolve comments
subham611 Jul 8, 2025
72bb9b5
remove new line
subham611 Jul 8, 2025
68f0c98
Fix lint
subham611 Jul 8, 2025
6b5ed62
Lint fix
subham611 Jul 8, 2025
e53bfe3
Modify rule
subham611 Jul 8, 2025
b488aae
Rewrite rule
subham611 Jul 8, 2025
867967e
Fix import order
subham611 Jul 8, 2025
ebf16ed
Adds codegen
subham611 Jul 9, 2025
58c0bea
Fix UT
subham611 Jul 9, 2025
b82e21c
Resolve comment
subham611 Jul 10, 2025
4239bef
Lint fix
subham611 Jul 10, 2025
fd5e76e
Adds type coersion rule
subham611 Jul 10, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis.resolver

import org.apache.spark.sql.catalyst.expressions.{Cast, CurrentDate, Expression, MakeTimestampNTZ}
import org.apache.spark.sql.types.{TimestampNTZType, TimeType}

/**
* Rewrites Cast from TIME -> TIMESTAMP_NTZ to MakeTimestampNTZ(CurrentDate(), TIME)
*
* Example: CAST(TIME '15:30:00' AS TIMESTAMP_NTZ) => MakeTimestampNTZ(CurrentDate(),
* Literal(15:30:00))
*/
object RewriteTimeCastToTimestampNTZ {
def rewrite(expr: Expression): Expression = expr match {
case c @ Cast(child, TimestampNTZType, _, _)
if child.resolved && child.dataType.isInstanceOf[TimeType] =>
MakeTimestampNTZ(CurrentDate(), child)
case other =>
other
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,12 @@ class TimezoneAwareExpressionResolver(expressionResolver: ExpressionResolver)
override def resolve(unresolvedTimezoneExpression: TimeZoneAwareExpression): Expression = {
val expressionWithResolvedChildren =
withResolvedChildren(unresolvedTimezoneExpression, expressionResolver.resolve _)

val expressionWithResolvedCast =
RewriteTimeCastToTimestampNTZ.rewrite(expressionWithResolvedChildren)

val expressionWithResolvedChildrenAndTimeZone = TimezoneAwareExpressionResolver.resolveTimezone(
expressionWithResolvedChildren,
expressionWithResolvedCast,
traversals.current.sessionLocalTimeZone
)
coerceExpressionTypes(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ object Cast extends QueryErrorsBase {
case (_: StringType, _: TimeType) => true
case (TimestampType, DateType) => true
case (TimestampNTZType, DateType) => true
case (TimestampNTZType, _: TimeType) => true

case (_: NumericType, _: NumericType) => true
case (_: StringType, _: NumericType) => true
Expand All @@ -135,6 +136,7 @@ object Cast extends QueryErrorsBase {
case (_, VariantType) => variant.VariantGet.checkDataType(from, allowStructsAndMaps = false)

case (_: TimeType, _: TimeType) => true
case (_: TimeType, TimestampNTZType) => true

// non-null variants can generate nulls even in ANSI mode
case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
Expand Down Expand Up @@ -229,6 +231,7 @@ object Cast extends QueryErrorsBase {
case (_: StringType, _: TimeType) => true
case (TimestampType, DateType) => true
case (TimestampNTZType, DateType) => true
case (TimestampNTZType, _: TimeType) => true

case (_: StringType, CalendarIntervalType) => true
case (_: StringType, _: DayTimeIntervalType) => true
Expand All @@ -254,6 +257,7 @@ object Cast extends QueryErrorsBase {
case (_, VariantType) => variant.VariantGet.checkDataType(from, allowStructsAndMaps = false)

case (_: TimeType, _: TimeType) => true
case (_: TimeType, TimestampNTZType) => true

case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
canCast(fromType, toType) &&
Expand Down Expand Up @@ -746,6 +750,13 @@ case class Cast(
}
case _: TimeType =>
buildCast[Long](_, nanos => DateTimeUtils.truncateTimeToPrecision(nanos, to.precision))
case _: TimestampNTZType =>
buildCast[Long](
_,
micros => {
val nanosInDay = DateTimeUtils.getNanosInADay(micros)
DateTimeUtils.truncateTimeToPrecision(nanosInDay, to.precision)
})
}

// IntervalConverter
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -834,4 +834,19 @@ object DateTimeUtils extends SparkDateTimeUtils {
def makeTimestampNTZ(days: Int, nanos: Long): Long = {
localDateTimeToMicros(LocalDateTime.of(daysToLocalDate(days), nanosToLocalTime(nanos)))
}

/**
* Returns the number of nanoseconds past midnight for a given timestamp in microseconds.
*
* This method uses `toJulianDay`, which splits the timestamp into (Julian day, nanoseconds in
* day), and returns only the nanosecond component.
*
* @param micros
* The timestamp in microseconds since the epoch.
* @return
* The number of nanoseconds past midnight on that day.
*/
def getNanosInADay(micros: Long): Long = {
toJulianDay(micros)._2
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.catalyst.analysis.resolver

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.expressions.{Cast, CurrentDate, Literal, MakeTimestampNTZ}
import org.apache.spark.sql.types.{TimestampNTZType, TimeType}

class RewriteTimeCastToTimestampNTZSuite extends SparkFunSuite {

test("SPARK-52617: rewrite TIME -> TIMESTAMP_NTZ cast to MakeTimestampNTZ") {
// TIME: 15:30:00 -> seconds = 15*3600 + 30*60 = 55800
val nanos = 55800L * 1_000_000_000L
val timeLiteral = Literal(nanos, TimeType(6))

val castExpr = Cast(timeLiteral, TimestampNTZType)
val rewrittenExpr = RewriteTimeCastToTimestampNTZ.rewrite(castExpr)

val expectedExpr = MakeTimestampNTZ(CurrentDate(), timeLiteral)

assert(
rewrittenExpr.semanticEquals(expectedExpr),
s"""
|Expected:
| $expectedExpr
|But got:
| $rewrittenExpr
|""".stripMargin)
}

test("should not rewrite non-time casts") {
val literal = Literal(42)
val castExpr = Cast(literal, TimestampNTZType)

val rewrittenExpr = RewriteTimeCastToTimestampNTZ.rewrite(castExpr)
assert(rewrittenExpr eq castExpr)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext
import org.apache.spark.sql.catalyst.util.DateTimeConstants._
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._
import org.apache.spark.sql.catalyst.util.DateTimeUtils._
import org.apache.spark.sql.catalyst.util.IntervalUtils
import org.apache.spark.sql.catalyst.util.{DateTimeUtils, IntervalUtils}
import org.apache.spark.sql.catalyst.util.IntervalUtils.microsToDuration
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
Expand Down Expand Up @@ -1507,4 +1507,32 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper {
}
}
}

test("SPARK-52617: cast TimestampNTZType to time") {
specialTs.foreach { s =>
val ldt = LocalDateTime.parse(s) // parsed as local timestamp
val micros = DateTimeUtils.localDateTimeToMicros(ldt)

val nanosOfDay = ldt.toLocalTime().toNanoOfDay
val expected = DateTimeUtils.truncateTimeToPrecision(nanosOfDay, TimeType.DEFAULT_PRECISION)

checkEvaluation(Cast(Literal(micros, TimestampNTZType), TimeType(0)), expected)
}
}

test("SPARK-52617: cast time to TimestampNTZType") {
val testCases = Seq(
("2023-01-01T15:30:00.123456789", 9),
("2023-01-01T15:30:00.123456", 6),
("2023-01-01T15:30:00", 0))

testCases.foreach { case (s, precision) =>
val ldt = LocalDateTime.parse(s)
val micros = DateTimeUtils.localDateTimeToMicros(ldt)
val nanosOfDay = ldt.toLocalTime().toNanoOfDay
val expected = DateTimeUtils.truncateTimeToPrecision(nanosOfDay, precision)

checkEvaluation(Cast(Literal(micros, TimestampNTZType), TimeType(precision)), expected)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1250,4 +1250,27 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
assert(truncateTimeToPrecision(localTime(23, 59, 59, 987654), 1) ==
localTime(23, 59, 59, 900000))
}

test("get nanos in a day") {
def toMicros(hours: Int, minutes: Int, seconds: Int): Long = {
val microsInSecond = 1_000_000L
(hours * 3600L + minutes * 60 + seconds) * microsInSecond
}

// Midnight: 00:00:00
val midnightMicros = toMicros(0, 0, 0)
assert(getNanosInADay(midnightMicros) === 0L)

// Noon: 12:00:00
val noonMicros = toMicros(12, 0, 0)
assert(getNanosInADay(noonMicros) === 12L * 3600 * 1_000_000_000L)

// 23:59:59
val endOfDayMicros = toMicros(23, 59, 59)
assert(getNanosInADay(endOfDayMicros) === ((23L * 3600 + 59 * 60 + 59) * 1_000_000_000L))

// 01:30:15
val earlyMorningMicros = toMicros(1, 30, 15)
assert(getNanosInADay(earlyMorningMicros) === ((1L * 3600 + 30 * 60 + 15) * 1_000_000_000L))
}
}