diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala index 3f9fba185ca4f..960f0ee7a729a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala @@ -268,6 +268,9 @@ object Literal { s"but class ${Utils.getSimpleName(value.getClass)} found.") } + /** + * Inverse of [[Literal.sql]] + */ def fromSQL(sql: String): Expression = { CatalystSqlParser.parseExpression(sql).transformUp { case u: UnresolvedFunction => @@ -278,10 +281,6 @@ object Literal { assert(u.orderingWithinGroup.isEmpty) assert(!u.isInternal) FunctionRegistry.builtin.lookupFunction(FunctionIdentifier(u.nameParts.head), u.arguments) - } match { - case c: Cast if c.needsTimeZone => - c.withTimeZone(SQLConf.get.sessionLocalTimeZone) - case e: Expression => e } } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala index 6499e5c40049d..323f715ede2ce 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala @@ -320,14 +320,20 @@ object ResolveDefaultColumns extends QueryErrorsBase } /** - * Analyze EXISTS_DEFAULT value. This skips some steps of analyze as most of the - * analysis has been done before. + * Analyze EXISTS_DEFAULT value. EXISTS_DEFAULT value was created from CURRENT_DEFAQULT + * via [[analyze]] and thus this can skip most of those steps. */ private def analyzeExistenceDefaultValue(field: StructField): Expression = { val defaultSQL = field.metadata.getString(EXISTS_DEFAULT_COLUMN_METADATA_KEY) // Parse the expression. - val expr = Literal.fromSQL(defaultSQL) + val expr = Literal.fromSQL(defaultSQL) match { + // EXISTS_DEFAULT will have a cast from analyze() due to coerceDefaultValue + // hence we need to add timezone to the cast if necessary + case c: Cast if c.needsTimeZone => + c.withTimeZone(SQLConf.get.sessionLocalTimeZone) + case e: Expression => e + } // Check invariants if (expr.containsPattern(PLAN_EXPRESSION)) {