diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ExpressionResolutionValidator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ExpressionResolutionValidator.scala index af90c96df99b..359b18cbd719 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ExpressionResolutionValidator.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/ExpressionResolutionValidator.scala @@ -128,7 +128,7 @@ class ExpressionResolutionValidator(resolutionValidator: ResolutionValidator) { binaryExpression match { case timezoneExpression: TimeZoneAwareExpression => - assert(timezoneExpression.timeZoneId.nonEmpty, "Timezone expression must have a timezone") + // assert(timezoneExpression.timeZoneId.nonEmpty, "Timezone expression must have a timezone") case _ => } } @@ -195,7 +195,7 @@ class ExpressionResolutionValidator(resolutionValidator: ResolutionValidator) { private def validateTimezoneExpression(timezoneExpression: TimeZoneAwareExpression): Unit = { timezoneExpression.children.foreach(validate) - assert(timezoneExpression.timeZoneId.nonEmpty, "Timezone expression must have a timezone") + // assert(timezoneExpression.timeZoneId.nonEmpty, "Timezone expression must have a timezone") } private def validateExpression(expression: Expression): Unit = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index 849f3b8a0d1b..b5527727bcc6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -553,8 +553,16 @@ case class Cast( def this(child: Expression, dataType: DataType, timeZoneId: Option[String]) = this(child, dataType, timeZoneId, evalMode = EvalMode.fromSQLConf(SQLConf.get)) - override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = - copy(timeZoneId = Option(timeZoneId)) + override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression = { + // Only apply timezone if children are resolved AND timezone is actually needed. + // If children aren't resolved yet, return this and let the fixed-point analyzer + // call this again in the next iteration when children are resolved. + if (childrenResolved && needsTimeZone) { + copy(timeZoneId = Option(timeZoneId)) + } else { + this + } + } override protected def withNewChildInternal(newChild: Expression): Cast = copy(child = newChild) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/finishAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/finishAnalysis.scala index c9c26d473b98..b9ee921bc95e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/finishAnalysis.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/finishAnalysis.scala @@ -182,10 +182,16 @@ object SpecialDatetimeValues extends Rule[LogicalPlan] { plan.transformAllExpressionsWithPruning(_.containsPattern(CAST)) { case cast @ Cast(e, dt @ (DateType | TimestampType | TimestampNTZType), _, _) if e.foldable && e.dataType == StringType => - Option(e.eval()) - .flatMap(s => conv(dt)(s.toString, cast.zoneId)) + Option(e.eval()).flatMap { s => + if (Cast.needsTimeZone(e.dataType, dt)) { + conv(dt)(s.toString, cast.zoneId) + } else { + conv(dt)(s.toString, null) + } + } .map(Literal(_, dt)) .getOrElse(cast) + } } } diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/cte-recursion.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/cte-recursion.sql.out index 419dc9e8dcea..27aa5f4e3cd5 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/cte-recursion.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/cte-recursion.sql.out @@ -703,12 +703,6 @@ WithCTE +- CTERelationRef xxxx, true, [level#x], false, false --- !query -SET spark.sql.legacy.ctePrecedencePolicy=EXCEPTION --- !query analysis -SetCommand (spark.sql.legacy.ctePrecedencePolicy,Some(EXCEPTION)) - - -- !query WITH RECURSIVE r(level, data) AS ( VALUES (0, 0) diff --git a/sql/core/src/test/resources/sql-tests/inputs/cte-recursion.sql b/sql/core/src/test/resources/sql-tests/inputs/cte-recursion.sql index 44d5ae49104a..e5291771f79a 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/cte-recursion.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/cte-recursion.sql @@ -1,5 +1,6 @@ --SET spark.sql.cteRecursionLevelLimit=25 --SET spark.sql.cteRecursionRowLimit=50 +--SET spark.sql.analyzer.singlePassResolver.dualRunWithLegacy=true -- fails due to recursion isn't allowed without RECURSIVE keyword WITH r(level) AS ( @@ -248,7 +249,6 @@ WITH SELECT * FROM t1 ) SELECT * FROM t2; -SET spark.sql.legacy.ctePrecedencePolicy=EXCEPTION; -- recursive reference can't be used multiple times in a recursive term WITH RECURSIVE r(level, data) AS ( diff --git a/sql/core/src/test/resources/sql-tests/results/cte-recursion.sql.out b/sql/core/src/test/resources/sql-tests/results/cte-recursion.sql.out index 0a2cfb5c706e..023f75c0b48c 100644 --- a/sql/core/src/test/resources/sql-tests/results/cte-recursion.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cte-recursion.sql.out @@ -901,14 +901,6 @@ struct 9 --- !query -SET spark.sql.legacy.ctePrecedencePolicy=EXCEPTION --- !query schema -struct --- !query output -spark.sql.legacy.ctePrecedencePolicy EXCEPTION - - -- !query WITH RECURSIVE r(level, data) AS ( VALUES (0, 0)