Skip to content

Commit f7d1226

Browse files
committed
Adds type coersion rule
1 parent 4239bef commit f7d1226

File tree

7 files changed

+1759
-1654
lines changed

7 files changed

+1759
-1654
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 1716 additions & 1535 deletions
Large diffs are not rendered by default.

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercion.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,7 @@ object AnsiTypeCoercion extends TypeCoercionBase {
9393
StackCoercion ::
9494
Division ::
9595
IntegralDivision ::
96+
RewriteTimeCastToTimestampNTZ ::
9697
ImplicitTypeCasts ::
9798
DateTimeOperations ::
9899
WindowFrameCoercion ::

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/RewriteTimeCastToTimestampNTZ.scala

Lines changed: 0 additions & 54 deletions
This file was deleted.

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ object TypeCoercion extends TypeCoercionBase {
6666
StackCoercion ::
6767
Division ::
6868
IntegralDivision ::
69+
RewriteTimeCastToTimestampNTZ ::
6970
ImplicitTypeCasts ::
7071
DateTimeOperations ::
7172
WindowFrameCoercion ::

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionBase.scala

Lines changed: 34 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -19,39 +19,15 @@ package org.apache.spark.sql.catalyst.analysis
1919

2020
import scala.annotation.tailrec
2121
import scala.collection.mutable
22-
23-
import org.apache.spark.sql.catalyst.expressions.{
24-
Alias,
25-
CaseWhen,
26-
Cast,
27-
Concat,
28-
Elt,
29-
Expression,
30-
MapZipWith,
31-
Stack,
32-
WindowSpecDefinition
33-
}
34-
import org.apache.spark.sql.catalyst.plans.logical.{
35-
AddColumns,
36-
AlterColumns,
37-
Call,
38-
CreateTable,
39-
Except,
40-
Intersect,
41-
LogicalPlan,
42-
Project,
43-
ReplaceTable,
44-
Union,
45-
UnionLoop,
46-
Unpivot
47-
}
22+
import org.apache.spark.sql.catalyst.expressions.{Alias, CaseWhen, Cast, Concat, CurrentDate, Elt, Expression, MakeTimestampNTZ, MapZipWith, Stack, WindowSpecDefinition}
23+
import org.apache.spark.sql.catalyst.plans.logical.{AddColumns, AlterColumns, Call, CreateTable, Except, Intersect, LogicalPlan, Project, ReplaceTable, Union, UnionLoop, Unpivot}
4824
import org.apache.spark.sql.catalyst.rules.Rule
4925
import org.apache.spark.sql.catalyst.trees.CurrentOrigin.withOrigin
5026
import org.apache.spark.sql.catalyst.util.ResolveDefaultColumns
5127
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.MultipartIdentifierHelper
5228
import org.apache.spark.sql.connector.catalog.procedures.BoundProcedure
5329
import org.apache.spark.sql.errors.DataTypeErrors.cannotMergeIncompatibleDataTypesError
54-
import org.apache.spark.sql.types.DataType
30+
import org.apache.spark.sql.types.{DataType, TimeType, TimestampNTZType}
5531

5632
abstract class TypeCoercionBase extends TypeCoercionHelper {
5733

@@ -480,4 +456,35 @@ abstract class TypeCoercionBase extends TypeCoercionHelper {
480456
case withChildrenResolved => StringLiteralTypeCoercion(withChildrenResolved)
481457
}
482458
}
459+
460+
/**
461+
* Rewrites a cast from [[TimeType]] to [[TimestampNTZType]] into a [[MakeTimestampNTZ]]
462+
* expression.
463+
*
464+
* The conversion from TIME to TIMESTAMP_NTZ requires a date component, which TIME itself does
465+
* not provide. This rule injects [[CurrentDate]] as the implicit date part, effectively
466+
* treating the TIME value as a time of day on the current date. This rewrite ensures that all
467+
* such casts within a query use a consistent date, as required by the [[ComputeCurrentTime]]
468+
* rule which replaces [[CurrentDate]] with a fixed value during analysis.
469+
*
470+
* For example, the following SQL:
471+
* {{{
472+
* SELECT CAST(make_time(12, 30, 0) AS TIMESTAMP_NTZ)
473+
* }}}
474+
* will be rewritten to:
475+
* {{{
476+
* SELECT make_timestamp_ntz(current_date, make_time(12, 30, 0))
477+
* }}}
478+
*/
479+
object RewriteTimeCastToTimestampNTZ extends TypeCoercionRule {
480+
override def transform: PartialFunction[Expression, Expression] = {
481+
case c @ Cast(child, TimestampNTZType, _, _) if child.resolved =>
482+
child.dataType match {
483+
case _: TimeType =>
484+
// Convert TIME -> TIMESTAMP_NTZ using MakeTimestampNTZ(CurrentDate(), time)
485+
MakeTimestampNTZ(CurrentDate(), child)
486+
case _ => c
487+
}
488+
}
489+
}
483490
}

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/RewriteTimeCastToTimestampNTZSuite.scala

Lines changed: 0 additions & 38 deletions
This file was deleted.

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1772,6 +1772,13 @@ class TypeCoercionSuite extends TypeCoercionSuiteBase {
17721772
assert(wp1.isInstanceOf[Project])
17731773
assert(wp1.expressions.forall(!_.exists(_ == t1.output.head)))
17741774
}
1775+
1776+
test("SPARK-52617: RewriteTimeCastToTimestampNTZ: TIME to TIMESTAMP_NTZ coercion") {
1777+
val expr = Cast(Literal.create(123456789L, TimeType(6)), TimestampNTZType)
1778+
val coerced = RewriteTimeCastToTimestampNTZ.transform.apply(expr)
1779+
val expected = MakeTimestampNTZ(CurrentDate(), Literal.create(123456789L, TimeType(6)))
1780+
assert(coerced.semanticEquals(expected))
1781+
}
17751782
}
17761783

17771784

0 commit comments

Comments
 (0)