Skip to content

Commit f3c2d39

Browse files
committed
Revert "[SPARK-53348][SQL] Always persist ANSI value when creating a view or assume it when querying if not stored"
This reverts commit fbedfb1.
1 parent 17c3a58 commit f3c2d39

File tree

8 files changed

+13
-215
lines changed

8 files changed

+13
-215
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 1 addition & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -258,10 +258,7 @@ object Analyzer {
258258
"spark.sql.expressionTreeChangeLog.level"
259259
)
260260

261-
def retainResolutionConfigsForAnalysis(
262-
newConf: SQLConf,
263-
existingConf: SQLConf,
264-
createSparkVersion: String = ""): Unit = {
261+
def retainResolutionConfigsForAnalysis(newConf: SQLConf, existingConf: SQLConf): Unit = {
265262
val retainedConfigs = existingConf.getAllConfs.filter { case (key, _) =>
266263
// Also apply catalog configs
267264
RETAINED_ANALYSIS_FLAGS.contains(key) || key.startsWith("spark.sql.catalog.")
@@ -270,25 +267,6 @@ object Analyzer {
270267
retainedConfigs.foreach { case (k, v) =>
271268
newConf.settings.put(k, v)
272269
}
273-
274-
trySetAnsiValue(newConf, createSparkVersion)
275-
}
276-
277-
/**
278-
* In case ANSI value wasn't persisted for a view or a UDF, we set it to `true` in case Spark
279-
* version used to create the view is 4.0.0 or higher. We set it to `false` in case Spark version
280-
* is lower than 4.0.0 or if the Spark version wasn't stored (in that case we assume that the
281-
* value is `false`)
282-
*/
283-
def trySetAnsiValue(sqlConf: SQLConf, createSparkVersion: String = ""): Unit = {
284-
if (conf.getConf(SQLConf.ASSUME_ANSI_FALSE_IF_NOT_PERSISTED) &&
285-
!sqlConf.settings.containsKey(SQLConf.ANSI_ENABLED.key)) {
286-
if (createSparkVersion.startsWith("4.")) {
287-
sqlConf.settings.put(SQLConf.ANSI_ENABLED.key, "true")
288-
} else {
289-
sqlConf.settings.put(SQLConf.ANSI_ENABLED.key, "false")
290-
}
291-
}
292270
}
293271
}
294272

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ViewResolution.scala

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -41,13 +41,7 @@ object ViewResolution {
4141
view
4242
)
4343
}
44-
SQLConf.withExistingConf(
45-
View.effectiveSQLConf(
46-
configs = view.desc.viewSQLConfigs,
47-
isTempView = view.isTempView,
48-
createSparkVersion = view.desc.createVersion
49-
)
50-
) {
44+
SQLConf.withExistingConf(View.effectiveSQLConf(view.desc.viewSQLConfigs, view.isTempView)) {
5145
resolveChild(view.child)
5246
}
5347
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala

Lines changed: 2 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -982,13 +982,7 @@ class SessionCatalog(
982982
objectType = Some("VIEW"),
983983
objectName = Some(metadata.qualifiedName)
984984
)
985-
val parsedPlan = SQLConf.withExistingConf(
986-
View.effectiveSQLConf(
987-
configs = viewConfigs,
988-
isTempView = isTempView,
989-
createSparkVersion = metadata.createVersion
990-
)
991-
) {
985+
val parsedPlan = SQLConf.withExistingConf(View.effectiveSQLConf(viewConfigs, isTempView)) {
992986
CurrentOrigin.withOrigin(origin) {
993987
parser.parseQuery(viewText)
994988
}
@@ -1016,11 +1010,7 @@ class SessionCatalog(
10161010
// Note that, the column names may have duplication, e.g. `CREATE VIEW v(x, y) AS
10171011
// SELECT 1 col, 2 col`. We need to make sure that the matching attributes have the same
10181012
// number of duplications, and pick the corresponding attribute by ordinal.
1019-
val viewConf = View.effectiveSQLConf(
1020-
configs = metadata.viewSQLConfigs,
1021-
isTempView = isTempView,
1022-
createSparkVersion = metadata.createVersion
1023-
)
1013+
val viewConf = View.effectiveSQLConf(metadata.viewSQLConfigs, isTempView)
10241014
val normalizeColName: String => String = if (viewConf.caseSensitiveAnalysis) {
10251015
identity
10261016
} else {
@@ -1627,7 +1617,6 @@ class SessionCatalog(
16271617
// Use captured SQL configs when parsing a SQL function.
16281618
val conf = new SQLConf()
16291619
function.getSQLConfigs.foreach { case (k, v) => conf.settings.put(k, v) }
1630-
Analyzer.trySetAnsiValue(conf)
16311620
SQLConf.withExistingConf(conf) {
16321621
val inputParam = function.inputParam
16331622
val returnType = function.getScalarFuncReturnType

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicLogicalOperators.scala

Lines changed: 2 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -835,10 +835,7 @@ case class View(
835835
}
836836

837837
object View {
838-
def effectiveSQLConf(
839-
configs: Map[String, String],
840-
isTempView: Boolean,
841-
createSparkVersion: String = ""): SQLConf = {
838+
def effectiveSQLConf(configs: Map[String, String], isTempView: Boolean): SQLConf = {
842839
val activeConf = SQLConf.get
843840
// For temporary view, we always use captured sql configs
844841
if (activeConf.useCurrentSQLConfigsForView && !isTempView) return activeConf
@@ -847,12 +844,7 @@ object View {
847844
for ((k, v) <- configs) {
848845
sqlConf.settings.put(k, v)
849846
}
850-
Analyzer.retainResolutionConfigsForAnalysis(
851-
newConf = sqlConf,
852-
existingConf = activeConf,
853-
createSparkVersion = createSparkVersion
854-
)
855-
847+
Analyzer.retainResolutionConfigsForAnalysis(newConf = sqlConf, existingConf = activeConf)
856848
sqlConf
857849
}
858850
}

sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5594,15 +5594,6 @@ object SQLConf {
55945594
.booleanConf
55955595
.createWithDefault(true)
55965596

5597-
val ASSUME_ANSI_FALSE_IF_NOT_PERSISTED =
5598-
buildConf("spark.sql.assumeAnsiFalseIfNotPersisted.enabled")
5599-
.internal()
5600-
.doc("If enabled, assume ANSI mode is false if not persisted during view or UDF " +
5601-
"creation. Otherwise use the default value.")
5602-
.version("4.0.1")
5603-
.booleanConf
5604-
.createWithDefault(true)
5605-
56065597
/**
56075598
* Holds information about keys that have been deprecated.
56085599
*

sql/core/src/main/scala/org/apache/spark/sql/execution/command/CreateUserDefinedFunctionCommand.scala

Lines changed: 1 addition & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,6 @@ package org.apache.spark.sql.execution.command
1919

2020
import java.util.Locale
2121

22-
import scala.collection.mutable
23-
2422
import org.apache.spark.sql.AnalysisException
2523
import org.apache.spark.sql.catalyst.FunctionIdentifier
2624
import org.apache.spark.sql.catalyst.catalog.{LanguageSQL, RoutineLanguage, UserDefinedFunctionErrors}
@@ -89,25 +87,10 @@ object CreateUserDefinedFunctionCommand {
8987
* [[org.apache.spark.sql.catalyst.expressions.ExpressionInfo]], all SQL configs and other
9088
* function properties (such as the function parameters and the function return type)
9189
* are saved together in a property map.
92-
*
93-
* Here we only capture the SQL configs that are modifiable and should be captured, i.e. not in
94-
* the denyList and in the allowList. Besides mentioned ones we also capture `ANSI_ENABLED`.
95-
*
96-
* We need to always capture them to make sure we apply the same configs when querying the
97-
* function.
9890
*/
9991
def sqlConfigsToProps(conf: SQLConf): Map[String, String] = {
10092
val modifiedConfs = ViewHelper.getModifiedConf(conf)
101-
102-
val alwaysCaptured = Seq(SQLConf.ANSI_ENABLED)
103-
.filter(c => !modifiedConfs.contains(c.key))
104-
.map(c => (c.key, conf.getConf(c).toString))
105-
106-
val props = new mutable.HashMap[String, String]
107-
for ((key, value) <- modifiedConfs ++ alwaysCaptured) {
108-
props.put(s"$SQL_CONFIG_PREFIX$key", value)
109-
}
110-
props.toMap
93+
modifiedConfs.map { case (key, value) => s"$SQL_CONFIG_PREFIX$key" -> value }
11194
}
11295

11396
/**

sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -481,19 +481,16 @@ object ViewHelper extends SQLConfHelper with Logging {
481481
}
482482

483483
/**
484-
* Convert the view SQL configs to `properties`. Here we only capture the SQL configs that are
485-
* modifiable and should be captured, i.e. not in the denyList and in the allowList. We also
486-
* capture `SESSION_LOCAL_TIMEZONE` whose default value relies on the JVM system timezone and
487-
* the `ANSI_ENABLED` value.
488-
*
489-
* We need to always capture them to make sure we apply the same configs when querying the view.
484+
* Convert the view SQL configs to `properties`.
490485
*/
491486
private def sqlConfigsToProps(conf: SQLConf): Map[String, String] = {
492487
val modifiedConfs = getModifiedConf(conf)
493-
494-
val alwaysCaptured = Seq(SQLConf.SESSION_LOCAL_TIMEZONE, SQLConf.ANSI_ENABLED)
488+
// Some configs have dynamic default values, such as SESSION_LOCAL_TIMEZONE whose
489+
// default value relies on the JVM system timezone. We need to always capture them to
490+
// to make sure we apply the same configs when reading the view.
491+
val alwaysCaptured = Seq(SQLConf.SESSION_LOCAL_TIMEZONE)
495492
.filter(c => !modifiedConfs.contains(c.key))
496-
.map(c => (c.key, conf.getConf(c).toString))
493+
.map(c => (c.key, conf.getConf(c)))
497494

498495
val props = new mutable.HashMap[String, String]
499496
for ((key, value) <- modifiedConfs ++ alwaysCaptured) {

sql/core/src/test/scala/org/apache/spark/sql/DefaultANSIValueSuite.scala

Lines changed: 0 additions & 126 deletions
This file was deleted.

0 commit comments

Comments
 (0)