@@ -19,14 +19,15 @@ package org.apache.spark.sql.catalyst.util
19
19
20
20
import scala .collection .mutable .ArrayBuffer
21
21
22
- import org .apache .spark .{SparkThrowable , SparkUnsupportedOperationException }
22
+ import org .apache .spark .{SparkException , SparkThrowable , SparkUnsupportedOperationException }
23
23
import org .apache .spark .internal .{Logging , MDC }
24
24
import org .apache .spark .internal .LogKeys ._
25
25
import org .apache .spark .sql .AnalysisException
26
26
import org .apache .spark .sql .catalyst .{InternalRow , SQLConfHelper }
27
27
import org .apache .spark .sql .catalyst .analysis ._
28
28
import org .apache .spark .sql .catalyst .catalog .{CatalogDatabase , InMemoryCatalog , SessionCatalog }
29
29
import org .apache .spark .sql .catalyst .expressions ._
30
+ import org .apache .spark .sql .catalyst .expressions .{Literal => ExprLiteral }
30
31
import org .apache .spark .sql .catalyst .optimizer .{ConstantFolding , Optimizer }
31
32
import org .apache .spark .sql .catalyst .parser .{CatalystSqlParser , ParseException }
32
33
import org .apache .spark .sql .catalyst .plans .logical ._
@@ -340,12 +341,43 @@ object ResolveDefaultColumns extends QueryErrorsBase
340
341
throw QueryCompilationErrors .defaultValuesMayNotContainSubQueryExpressions(
341
342
" " , field.name, defaultSQL)
342
343
}
343
- if (! expr.resolved) {
344
- throw QueryCompilationErrors .defaultValuesUnresolvedExprError(
345
- " " , field.name, defaultSQL, null )
344
+
345
+ val resolvedExpr = expr match {
346
+ case _ : ExprLiteral | _ : Cast => expr
347
+ case _ =>
348
+ fallbackResolveExistenceDefaultValue(field, defaultSQL)
346
349
}
347
350
348
- coerceDefaultValue(expr, field.dataType, " " , field.name, defaultSQL)
351
+ coerceDefaultValue(resolvedExpr, field.dataType, " " , field.name, defaultSQL)
352
+ }
353
+
354
+ // In most cases, column existsDefault should already be persisted as resolved
355
+ // and constant-folded literal sql, but because they are fetched from external catalog,
356
+ // it is possible that this assumption does not hold, so we fallback to full analysis
357
+ // if we encounter an unresolved existsDefault
358
+ private def fallbackResolveExistenceDefaultValue (
359
+ field : StructField ,
360
+ defaultSQL : String ): Expression = {
361
+ logWarning(log " Encountered unresolved exists default value: " +
362
+ log " ' ${MDC (COLUMN_DEFAULT_VALUE , defaultSQL)}' " +
363
+ log " for column ${MDC (COLUMN_NAME , field.name)} " +
364
+ log " with ${MDC (COLUMN_DATA_TYPE_SOURCE , field.dataType)}, " +
365
+ log " falling back to full analysis. " )
366
+
367
+ field.getExistenceDefaultValue().map { text : String =>
368
+ val expr = analyze(field, " " , EXISTS_DEFAULT_COLUMN_METADATA_KEY )
369
+ val literal = expr match {
370
+ case _ : ExprLiteral | _ : Cast => expr
371
+ case _ => throw SparkException .internalError(s " parse existence default as literal err, " +
372
+ s " field name: ${field.name}, value: $text" )
373
+ }
374
+ // sanity check
375
+ if (! literal.resolved) {
376
+ throw QueryCompilationErrors .defaultValuesUnresolvedExprError(
377
+ " " , field.name, defaultSQL, null )
378
+ }
379
+ literal
380
+ }.orNull
349
381
}
350
382
351
383
/**
0 commit comments