Skip to content

[SPARK-52617][SQL]Cast TIME to/from TIMESTAMP_NTZ #51381

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 40 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 9 commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
8799de5
[SPARK-52617][SQL]Cast TIME to/from TIMESTAMP_NTZ
subham611 Jul 5, 2025
c37bbf0
Adds time to TimestampNTZType conversion
subham611 Jul 5, 2025
352aacb
Fix linting
subham611 Jul 5, 2025
7faed87
Remove unused import
subham611 Jul 5, 2025
11c3a26
Adds can cast
subham611 Jul 5, 2025
4be2758
Fix import
subham611 Jul 5, 2025
e020fae
Resolved ambiguous import
subham611 Jul 5, 2025
5b9a10e
Fix UT failure
subham611 Jul 5, 2025
d6f4424
Enable casting in ansi mode
subham611 Jul 5, 2025
f60c6cb
Calculate current day outside buildCast
subham611 Jul 5, 2025
828d214
Adds RewriteTimeCastToTimestampNTZ rule
subham611 Jul 6, 2025
90482d6
Fix additional change
subham611 Jul 6, 2025
a9d1bdd
Fix unused import
subham611 Jul 6, 2025
fdc9b92
Fix UT
subham611 Jul 6, 2025
b919e4e
Fix UT failure
subham611 Jul 6, 2025
e2ef8af
Fix UT
subham611 Jul 7, 2025
0bada28
Fix UT
subham611 Jul 7, 2025
2a5e9ad
Move to resolver
subham611 Jul 7, 2025
7d77c1f
Revert unwanted changes
subham611 Jul 7, 2025
cb6ad55
Resolve comment
subham611 Jul 7, 2025
cd503a5
Delete resolver
subham611 Jul 7, 2025
8bc58da
Fix import
subham611 Jul 7, 2025
8a30b2a
Add back RewriteTimeCastToTimestampNTZ rule
subham611 Jul 8, 2025
fd1aef3
Fix UT
subham611 Jul 8, 2025
5c6c97f
Resolve comments
subham611 Jul 8, 2025
6169ea4
Update sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/anal…
SubhamSinghal Jul 8, 2025
29ca860
Update sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util…
SubhamSinghal Jul 8, 2025
011b254
Merge master
subham611 Jul 8, 2025
ca02825
Resolve comments
subham611 Jul 8, 2025
72bb9b5
remove new line
subham611 Jul 8, 2025
68f0c98
Fix lint
subham611 Jul 8, 2025
6b5ed62
Lint fix
subham611 Jul 8, 2025
e53bfe3
Modify rule
subham611 Jul 8, 2025
b488aae
Rewrite rule
subham611 Jul 8, 2025
867967e
Fix import order
subham611 Jul 8, 2025
ebf16ed
Adds codegen
subham611 Jul 9, 2025
58c0bea
Fix UT
subham611 Jul 9, 2025
b82e21c
Resolve comment
subham611 Jul 10, 2025
4239bef
Lint fix
subham611 Jul 10, 2025
fd5e76e
Adds type coersion rule
subham611 Jul 10, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ object Cast extends QueryErrorsBase {
case (_: StringType, _: TimeType) => true
case (TimestampType, DateType) => true
case (TimestampNTZType, DateType) => true
case (TimestampNTZType, _: TimeType) => true

case (_: NumericType, _: NumericType) => true
case (_: StringType, _: NumericType) => true
Expand All @@ -135,6 +136,7 @@ object Cast extends QueryErrorsBase {
case (_, VariantType) => variant.VariantGet.checkDataType(from, allowStructsAndMaps = false)

case (_: TimeType, _: TimeType) => true
case (_: TimeType, TimestampNTZType) => true

// non-null variants can generate nulls even in ANSI mode
case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
Expand Down Expand Up @@ -229,6 +231,7 @@ object Cast extends QueryErrorsBase {
case (_: StringType, _: TimeType) => true
case (TimestampType, DateType) => true
case (TimestampNTZType, DateType) => true
case (TimestampNTZType, _: TimeType) => true

case (_: StringType, CalendarIntervalType) => true
case (_: StringType, _: DayTimeIntervalType) => true
Expand All @@ -254,6 +257,7 @@ object Cast extends QueryErrorsBase {
case (_, VariantType) => variant.VariantGet.checkDataType(from, allowStructsAndMaps = false)

case (_: TimeType, _: TimeType) => true
case (_: TimeType, TimestampNTZType) => true

case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
canCast(fromType, toType) &&
Expand Down Expand Up @@ -701,6 +705,16 @@ case class Cast(
buildCast[Int](_, d => daysToMicros(d, ZoneOffset.UTC))
case TimestampType =>
buildCast[Long](_, ts => convertTz(ts, ZoneOffset.UTC, zoneId))
case _: TimeType =>
buildCast[Long](
_,
nanos => {
val currentDay = DateTimeUtils.currentDate(zoneId)
val NANOS_PER_DAY = 86_400_000_000_000L // 24 * 60 * 60 * 1_000_000_000
val nanosOfDay = ((nanos % NANOS_PER_DAY) + NANOS_PER_DAY) % NANOS_PER_DAY
DateTimeUtils.makeTimestampNTZ(currentDay, nanosOfDay)
}
)
}

private[this] def decimalToTimestamp(d: Decimal): Long = {
Expand Down Expand Up @@ -746,6 +760,13 @@ case class Cast(
}
case _: TimeType =>
buildCast[Long](_, nanos => DateTimeUtils.truncateTimeToPrecision(nanos, to.precision))
case _: TimestampNTZType =>
buildCast[Long](
_,
micros => {
val nanosInDay = DateTimeUtils.toJulianDay(micros)._2
DateTimeUtils.truncateTimeToPrecision(nanosInDay, to.precision)
})
}

// IntervalConverter
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import org.apache.spark.sql.catalyst.expressions.Cast._
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext
import org.apache.spark.sql.catalyst.util.DateTimeConstants._
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils._
import org.apache.spark.sql.catalyst.util.DateTimeUtils._
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.catalyst.util.IntervalUtils
import org.apache.spark.sql.catalyst.util.IntervalUtils.microsToDuration
import org.apache.spark.sql.internal.SQLConf
Expand Down Expand Up @@ -305,11 +305,11 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper {
checkEvaluation(cast(ts, DoubleType), 15.003)

checkEvaluation(cast(cast(tss, ShortType), TimestampType),
fromJavaTimestamp(ts) * MILLIS_PER_SECOND)
DateTimeUtils.fromJavaTimestamp(ts) * MILLIS_PER_SECOND)
checkEvaluation(cast(cast(tss, IntegerType), TimestampType),
fromJavaTimestamp(ts) * MILLIS_PER_SECOND)
DateTimeUtils.fromJavaTimestamp(ts) * MILLIS_PER_SECOND)
checkEvaluation(cast(cast(tss, LongType), TimestampType),
fromJavaTimestamp(ts) * MILLIS_PER_SECOND)
DateTimeUtils.fromJavaTimestamp(ts) * MILLIS_PER_SECOND)
checkEvaluation(
cast(cast(millis.toFloat / MILLIS_PER_SECOND, TimestampType), FloatType),
millis.toFloat / MILLIS_PER_SECOND)
Expand All @@ -334,18 +334,18 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper {

for (tz <- ALL_TIMEZONES) {
val timeZoneId = Option(tz.getId)
var c = Calendar.getInstance(TimeZoneUTC)
var c = Calendar.getInstance(DateTimeUtils.TimeZoneUTC)
c.set(2015, 2, 8, 2, 30, 0)
checkEvaluation(
cast(cast(new Timestamp(c.getTimeInMillis), StringType, timeZoneId),
TimestampType, timeZoneId),
millisToMicros(c.getTimeInMillis))
c = Calendar.getInstance(TimeZoneUTC)
DateTimeUtils.millisToMicros(c.getTimeInMillis))
c = Calendar.getInstance(DateTimeUtils.TimeZoneUTC)
c.set(2015, 10, 1, 2, 30, 0)
checkEvaluation(
cast(cast(new Timestamp(c.getTimeInMillis), StringType, timeZoneId),
TimestampType, timeZoneId),
millisToMicros(c.getTimeInMillis))
DateTimeUtils.millisToMicros(c.getTimeInMillis))
}

checkEvaluation(cast("abdef", StringType), "abdef")
Expand All @@ -356,7 +356,7 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper {
checkEvaluation(cast(cast(nts, TimestampType, UTC_OPT), StringType, UTC_OPT), nts)
checkEvaluation(
cast(cast(ts, StringType, UTC_OPT), TimestampType, UTC_OPT),
fromJavaTimestamp(ts))
DateTimeUtils.fromJavaTimestamp(ts))

// all convert to string type to check
checkEvaluation(
Expand Down Expand Up @@ -1507,4 +1507,36 @@ abstract class CastSuiteBase extends SparkFunSuite with ExpressionEvalHelper {
}
}
}

test("SPARK-52617: cast TimestampNTZType to time") {
specialTs.foreach { s =>
val ldt = LocalDateTime.parse(s) // parsed as local timestamp
val micros = DateTimeUtils.localDateTimeToMicros(ldt)

val nanosOfDay = ldt.toLocalTime().toNanoOfDay
val expected = DateTimeUtils.truncateTimeToPrecision(nanosOfDay, TimeType.DEFAULT_PRECISION)

checkEvaluation(Cast(Literal(micros, TimestampNTZType), TimeType(0)), expected)
}
}

test("SPARK-52617: cast time to TimestampNTZType") {
val testCases = Seq(
("2023-01-01T15:30:00.123456789", 9),
("2023-01-01T15:30:00.123456", 6),
("2023-01-01T15:30:00", 0)
)

testCases.foreach { case (s, precision) =>
val ldt = LocalDateTime.parse(s)
val micros = DateTimeUtils.localDateTimeToMicros(ldt)
val nanosOfDay = ldt.toLocalTime().toNanoOfDay
val expected = DateTimeUtils.truncateTimeToPrecision(nanosOfDay, precision)

checkEvaluation(
Cast(Literal(micros, TimestampNTZType), TimeType(precision)),
expected
)
}
}
}