diff --git a/common/utils/src/main/resources/error/error-conditions.json b/common/utils/src/main/resources/error/error-conditions.json index 79e8ab4567f01..d0f6ac922c58e 100644 --- a/common/utils/src/main/resources/error/error-conditions.json +++ b/common/utils/src/main/resources/error/error-conditions.json @@ -5837,6 +5837,11 @@ "ALTER TABLE SET SERDE is not supported for table created with the datasource API. Consider using an external Hive table or updating the table properties with compatible options for your table format." ] }, + "ALTER_TABLE_UNSET_SERDE_PROPERTIES_FOR_DATASOURCE_TABLE" : { + "message" : [ + "ALTER TABLE UNSET SERDEPROPERTIES is not supported for table created with the datasource API. Consider using an external Hive table or updating the table properties with compatible options for your table format." + ] + }, "ANALYZE_UNCACHED_TEMP_VIEW" : { "message" : [ "The ANALYZE TABLE FOR COLUMNS command can operate on temporary views that have been cached already. Consider to cache the view ." diff --git a/docs/sql-ref-syntax-ddl-alter-table.md b/docs/sql-ref-syntax-ddl-alter-table.md index 28ecc44a5bf7e..87bb6ab09d092 100644 --- a/docs/sql-ref-syntax-ddl-alter-table.md +++ b/docs/sql-ref-syntax-ddl-alter-table.md @@ -302,6 +302,16 @@ ALTER TABLE table_identifier [ partition_spec ] SET SERDE serde_class_name [ WITH SERDEPROPERTIES ( key1 = val1, key2 = val2, ... ) ] ``` +#### UNSET SERDE PROPERTIES + +`ALTER TABLE UNSET` command can also be used to drop the SERDE properties for tables and Hive table partitions. + +##### Syntax + +```sql +ALTER TABLE table_identifier partition_spec UNSET SERDEPROPERTIES [ IF EXISTS ] ( key1, key2, ... ) +``` + #### SET LOCATION And SET FILE FORMAT `ALTER TABLE SET` command can also be used for changing the file location and file format for diff --git a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 index d95a32de33854..32804785d1ff2 100644 --- a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 +++ b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 @@ -253,6 +253,8 @@ statement SET SERDE stringLit (WITH SERDEPROPERTIES propertyList)? #setTableSerDe | ALTER TABLE identifierReference (partitionSpec)? SET SERDEPROPERTIES propertyList #setTableSerDe + | ALTER TABLE identifierReference (partitionSpec)? + UNSET SERDEPROPERTIES (IF EXISTS)? propertyList #unsetTableSerDeProperties | ALTER (TABLE | VIEW) identifierReference ADD (IF errorCapturingNot EXISTS)? partitionSpecLocation+ #addTablePartition | ALTER TABLE identifierReference diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index df348f1f6051e..fe6ac2f46eca9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -6006,6 +6006,26 @@ class AstBuilder extends DataTypeAstBuilder Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec)) } + /** + * Create an [[UnsetTableSerDeProperties]] + * + * For example: + * {{{ + * ALTER TABLE multi_part_name [PARTITION spec] UNSET SERDEPROPERTIES [IF EXISTS] ('key'); + * }}} + */ + override def visitUnsetTableSerDeProperties( + ctx: UnsetTableSerDePropertiesContext): LogicalPlan = withOrigin(ctx) { + val properties = visitPropertyKeys(ctx.propertyList) + val ifExists = ctx.EXISTS != null + UnsetTableSerDeProperties( + createUnresolvedTable(ctx.identifierReference, "ALTER TABLE ... UNSET SERDEPROPERTIES", true), + properties, + ifExists, + // TODO a partition spec is allowed to have optional values. This is currently violated. + Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec)) + } + /** * Alter the query of a view. This creates a [[AlterViewAs]] * diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala index 089b1a4afab1f..0b54e1dff03f3 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala @@ -1503,6 +1503,18 @@ case class SetTableSerDeProperties( copy(child = newChild) } +/** + * The logical plan of the ALTER TABLE ... UNSET SERDEPROPERTIES command. + */ +case class UnsetTableSerDeProperties( + child: LogicalPlan, + propertyKeys: Seq[String], + ifExists: Boolean, + partitionSpec: Option[TablePartitionSpec]) extends UnaryCommand { + override protected def withNewChildInternal(newChild: LogicalPlan): UnsetTableSerDeProperties = + copy(child = newChild) +} + /** * The logical plan of the CACHE TABLE command. */ diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 76a5a73659722..17d1834053b74 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -1638,6 +1638,10 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat notSupportedForV2TablesError("ALTER TABLE ... SET [SERDE|SERDEPROPERTIES]") } + def alterTableUnsetSerDePropertiesNotSupportedForV2TablesError(): Throwable = { + notSupportedForV2TablesError("ALTER TABLE ... UNSET SERDEPROPERTIES") + } + def describeAsJsonNotSupportedForV2TablesError(): Throwable = { notSupportedForV2TablesError("DESCRIBE TABLE AS JSON") } @@ -2855,6 +2859,13 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat messageParameters = Map("tableName" -> toSQLId(tableName))) } + def alterTableUnsetSerdePropertiesNotSupportedError(tableName: String): Throwable = { + new AnalysisException( + errorClass = "UNSUPPORTED_FEATURE.ALTER_TABLE_UNSET_SERDE_PROPERTIES_FOR_DATASOURCE_TABLE", + messageParameters = Map("tableName" -> toSQLId(tableName)) + ) + } + def cmdOnlyWorksOnPartitionedTablesError( operation: String, tableIdentWithDB: String): Throwable = { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala index d7d8fc07b872f..19e119f662422 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala @@ -409,6 +409,13 @@ class ResolveSessionCatalog(val catalogManager: CatalogManager) serdeProperties, partitionSpec) + case UnsetTableSerDeProperties( + ResolvedV1TableIdentifierInSessionCatalog(ident), + propertyKeys, + ifExists, + partitionSpec) => + AlterTableUnsetSerDePropertiesCommand(ident, propertyKeys, ifExists, partitionSpec) + case SetTableLocation(ResolvedV1TableIdentifier(ident), None, location) => AlterTableSetLocationCommand(ident, None, location) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index 6eb81e6ec670b..db38b8841168c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -515,6 +515,44 @@ case class AlterTableSerDePropertiesCommand( } +/** + * A command that unsets the serde properties of a table/partition. + * + * The syntax of this command is: + * {{{ + * ALTER TABLE table [PARTITION spec] UNSET SERDEPROPERTIES [IF EXISTS] ('key1', 'key2', ...); + * }}} + */ +case class AlterTableUnsetSerDePropertiesCommand( + tableName: TableIdentifier, + propKeys: Seq[String], + ifExists: Boolean, + partSpec: Option[TablePartitionSpec]) + extends LeafRunnableCommand { + + override def run(sparkSession: SparkSession): Seq[Row] = { + val catalog = sparkSession.sessionState.catalog + val table = catalog.getTableRawMetadata(tableName) + // For datasource tables, disallow unsetting partition serde properties + if (partSpec.isDefined && DDLUtils.isDatasourceTable(table)) { + throw QueryCompilationErrors.alterTableUnsetSerdePropertiesNotSupportedError( + table.qualifiedName) + } + if (partSpec.isEmpty) { + val newProperties = table.storage.properties.filter { case (k, _) => !propKeys.contains(k) } + val newTable = table.withNewStorage(properties = newProperties) + catalog.alterTable(newTable) + } else { + val spec = partSpec.get + val part = catalog.getPartition(table.identifier, spec) + val newProperties = part.storage.properties.filter { case (k, _) => !propKeys.contains(k) } + val newPart = part.copy(storage = part.storage.copy(properties = newProperties)) + catalog.alterPartitions(table.identifier, Seq(newPart)) + } + Seq.empty[Row] + } +} + /** * Add Partition in ALTER TABLE: add the table partitions. * diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala index 9cbea3b69ab79..9435bde497773 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala @@ -476,6 +476,9 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat case SetTableSerDeProperties(_: ResolvedTable, _, _, _) => throw QueryCompilationErrors.alterTableSerDePropertiesNotSupportedForV2TablesError() + case UnsetTableSerDeProperties(_: ResolvedTable, _, _, _) => + throw QueryCompilationErrors.alterTableUnsetSerDePropertiesNotSupportedForV2TablesError() + case LoadData(_: ResolvedTable, _, _, _, _) => throw QueryCompilationErrors.loadDataNotSupportedForV2TablesError() diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableUnsetSerdePropertiesParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableUnsetSerdePropertiesParserSuite.scala new file mode 100644 index 0000000000000..14754f68b50f6 --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableUnsetSerdePropertiesParserSuite.scala @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command + +import org.apache.spark.SparkThrowable +import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTable} +import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan +import org.apache.spark.sql.catalyst.parser.ParseException +import org.apache.spark.sql.catalyst.plans.logical.UnsetTableSerDeProperties +import org.apache.spark.sql.test.SharedSparkSession + +class AlterTableUnsetSerdePropertiesParserSuite extends AnalysisTest with SharedSparkSession { + + private def parseException(sqlText: String): SparkThrowable = { + intercept[ParseException](sql(sqlText).collect()) + } + + // ALTER TABLE table_name [PARTITION spec] UNSET SERDEPROPERTIES [IF EXISTS] ('key1', 'key2'); + test("alter table unset serde properties") { + val sql1 = "ALTER TABLE table_name UNSET SERDEPROPERTIES ('key1', 'key2')" + val sql2 = "ALTER TABLE table_name PARTITION (a=1, b='str') UNSET SERDEPROPERTIES ('key')" + + comparePlans( + parsePlan(sql1), + UnsetTableSerDeProperties( + UnresolvedTable(Seq("table_name"), "ALTER TABLE ... UNSET SERDEPROPERTIES", + suggestAlternative = true), + Seq("key1", "key2"), + ifExists = false, + partitionSpec = None) + ) + comparePlans( + parsePlan(sql2), + UnsetTableSerDeProperties( + UnresolvedTable(Seq("table_name"), "ALTER TABLE ... UNSET SERDEPROPERTIES", + suggestAlternative = true), + Seq("key"), + ifExists = false, + partitionSpec = Some(Map("a" -> "1", "b" -> "str")))) + } + + test("alter table unset serde properties - property values must NOT be set") { + val sql = "ALTER TABLE my_tab UNSET SERDEPROPERTIES('key_without_value', 'key_with_value'='x')" + checkError( + exception = parseException(sql), + condition = "_LEGACY_ERROR_TEMP_0035", + parameters = Map("message" -> "Values should not be specified for key(s): [key_with_value]"), + context = ExpectedContext( + fragment = sql, + start = 0, + stop = 82)) + } + + test("alter table unset serde properties - partition values must be full") { + val sql = "ALTER TABLE table_name PARTITION (a=1, b) UNSET SERDEPROPERTIES ('key')" + checkError( + exception = parseException(sql), + condition = "INVALID_SQL_SYNTAX.EMPTY_PARTITION_VALUE", + parameters = Map("partKey" -> "`b`"), + context = ExpectedContext( + fragment = "PARTITION (a=1, b)", + start = 23, + stop = 40)) + } +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableUnsetSerdePropertiesSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableUnsetSerdePropertiesSuiteBase.scala new file mode 100644 index 0000000000000..dde8744171064 --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableUnsetSerdePropertiesSuiteBase.scala @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command + +import org.apache.spark.sql.QueryTest + +/** + * This base suite contains unified tests for the `ALTER TABLE .. UNSET SERDEPROPERTIES` + * command that check V1 and V2 table catalogs. The tests that cannot run for all supported + * catalogs are located in more specific test suites: + * + * - V2 table catalog tests: + * `org.apache.spark.sql.execution.command.v2.AlterTableUnsetSerdePropertiesSuite` + * - V1 table catalog tests: + * `org.apache.spark.sql.execution.command.v1.AlterTableUnsetSerdePropertiesSuiteBase` + * - V1 In-Memory catalog: + * `org.apache.spark.sql.execution.command.v1.AlterTableUnsetSerdePropertiesSuite` + * - V1 Hive External catalog: + * `org.apache.spark.sql.hive.execution.command.AlterTableUnsetSerdePropertiesSuite` + */ +trait AlterTableUnsetSerdePropertiesSuiteBase extends QueryTest with DDLCommandTestUtils { + override val command = "ALTER TABLE ... UNSET SERDEPROPERTIES" +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableUnsetSerdePropertiesSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableUnsetSerdePropertiesSuite.scala new file mode 100644 index 0000000000000..2ad7e0fb9b1ed --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableUnsetSerdePropertiesSuite.scala @@ -0,0 +1,127 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command.v1 + +import org.apache.spark.sql.AnalysisException +import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.execution.command +import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION + +/** + * This base suite contains unified tests for the `ALTER TABLE ... UNSET SERDEPROPERTIES` + * command that check V1 table catalogs. The tests that cannot run for all V1 catalogs + * are located in more specific test suites: + * + * - V1 In-Memory catalog: + * `org.apache.spark.sql.execution.command.v1.AlterTableUnsetSerdePropertiesSuite` + * - V1 Hive External catalog: + * `org.apache.spark.sql.hive.execution.command.AlterTableUnsetSerdePropertiesSuite` + */ +trait AlterTableUnsetSerdePropertiesSuiteBase + extends command.AlterTableUnsetSerdePropertiesSuiteBase { + + private[sql] lazy val sessionCatalog = spark.sessionState.catalog + + private def isUsingHiveMetastore: Boolean = { + spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "hive" + } + + private def normalizeSerdeProp(props: Map[String, String]): Map[String, String] = { + props.filterNot(p => Seq("serialization.format", "path").contains(p._1)) + } + + private[sql] def checkSerdeProps(tableIdent: TableIdentifier, + partitionSpec: Option[Map[String, String]], + expectedSerdeProps: Map[String, String]): Unit = { + val serdeProp = if (partitionSpec.isEmpty) { + sessionCatalog.getTableMetadata(tableIdent).storage.properties + } else { + sessionCatalog.getPartition(tableIdent, partitionSpec.get).storage.properties + } + if (isUsingHiveMetastore) { + assert(normalizeSerdeProp(serdeProp) == expectedSerdeProps) + } else { + assert(serdeProp == expectedSerdeProps) + } + } +} + +/** + * The class contains tests for the `ALTER TABLE ... UNSET SERDEPROPERTIES` command to check + * V1 In-Memory table catalog. + */ +class AlterTableUnsetSerdePropertiesSuite extends AlterTableUnsetSerdePropertiesSuiteBase + with CommandSuiteBase { + + test("In-Memory catalog - datasource table: alter table unset serde properties") { + withNamespaceAndTable("ns", "tbl") { t => + sql(s"CREATE TABLE $t (col1 int, col2 string, a int, b int) $defaultUsing " + + s"PARTITIONED by (a, b)") + val tableIdent = TableIdentifier("tbl", Some("ns")) + assert(sessionCatalog.getTableMetadata(tableIdent).storage.serde.isEmpty) + checkSerdeProps(tableIdent, None, Map.empty[String, String]) + + // set serde properties + sql(s"ALTER TABLE $t SET SERDEPROPERTIES ('k' = 'vvv', 'kay' = 'vee')") + checkSerdeProps(tableIdent, None, Map("k" -> "vvv", "kay" -> "vee")) + + // unset serde properties + sql(s"ALTER TABLE $t UNSET SERDEPROPERTIES ('k', 'key_non_exist')") + checkSerdeProps(tableIdent, None, Map("kay" -> "vee")) + + // table to alter does not exist + val e = intercept[AnalysisException] { + sql("ALTER TABLE does_not_exist UNSET SERDEPROPERTIES ('x')") + } + checkErrorTableNotFound(e, "`does_not_exist`", + ExpectedContext("does_not_exist", 12, 11 + "does_not_exist".length)) + } + } + + test("In-Memory catalog - datasource table: alter table unset partition serde properties") { + withNamespaceAndTable("ns", "tbl") { t => + sql(s"CREATE TABLE $t (col1 int, col2 string, a int, b int) $defaultUsing " + + s"PARTITIONED BY (a, b)") + sql(s"INSERT INTO $t PARTITION (a = 1, b = 2) SELECT 1, 'abc'") + sql(s"INSERT INTO $t PARTITION (a = 1, b = 3) SELECT 2, 'def'") + sql(s"INSERT INTO $t PARTITION (a = 2, b = 2) SELECT 3, 'ghi'") + sql(s"INSERT INTO $t PARTITION (a = 2, b = 3) SELECT 4, 'jkl'") + + val tableIdent = TableIdentifier("tbl", Some("ns")) + val spec = Map("a" -> "1", "b" -> "2") + assert(sessionCatalog.getPartition(tableIdent, spec).storage.serde.isEmpty) + checkSerdeProps(tableIdent, Some(spec), Map.empty[String, String]) + + // unset partition serde properties + checkError( + exception = intercept[AnalysisException] { + sql(s"ALTER TABLE $t PARTITION (a = 1, b = 2) " + + "UNSET SERDEPROPERTIES ('k', 'key_non_exist')") + }, + condition = "UNSUPPORTED_FEATURE.ALTER_TABLE_UNSET_SERDE_PROPERTIES_FOR_DATASOURCE_TABLE", + parameters = Map("tableName" -> "`spark_catalog`.`ns`.`tbl`")) + + // table to alter does not exist + val e = intercept[AnalysisException] { + sql("ALTER TABLE does_not_exist PARTITION (a = 1, b = 2) UNSET SERDEPROPERTIES ('x')") + } + checkErrorTableNotFound(e, "`does_not_exist`", + ExpectedContext("does_not_exist", 12, 11 + "does_not_exist".length)) + } + } +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableUnsetSerdePropertiesSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableUnsetSerdePropertiesSuite.scala new file mode 100644 index 0000000000000..73a0f9839f13c --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableUnsetSerdePropertiesSuite.scala @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command.v2 + +import org.apache.spark.SparkConf +import org.apache.spark.sql.AnalysisException +import org.apache.spark.sql.connector.catalog.InMemoryCatalog +import org.apache.spark.sql.execution.command + +/** + * The class contains tests for the `ALTER TABLE ... UNSET SERDEPROPERTIES` command to + * check V2 table catalogs. + */ +class AlterTableUnsetSerdePropertiesSuite extends command.AlterTableUnsetSerdePropertiesSuiteBase + with CommandSuiteBase { + + override def sparkConf: SparkConf = super.sparkConf + .set("spark.sql.catalog.testcat", classOf[InMemoryCatalog].getName) + + test("v2 catalog doesn't support ALTER TABLE Unset SerDe properties") { + val t = "testcat.ns1.ns2.tbl" + withTable(t) { + spark.sql(s"CREATE TABLE $t (id bigint, data string) " + + s"USING foo PARTITIONED BY (id)") + checkError( + exception = intercept[AnalysisException] { + sql(s"ALTER TABLE $t UNSET SERDEPROPERTIES ('columns', 'field.delim')") + }, + condition = "NOT_SUPPORTED_COMMAND_FOR_V2_TABLE", + sqlState = "0A000", + parameters = Map("cmd" -> "ALTER TABLE ... UNSET SERDEPROPERTIES") + ) + } + } +} diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableUnsetSerdePropertiesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableUnsetSerdePropertiesSuite.scala new file mode 100644 index 0000000000000..cba479bb3bd31 --- /dev/null +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableUnsetSerdePropertiesSuite.scala @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.hive.execution.command + +import org.apache.spark.sql.AnalysisException +import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.execution.command.v1 + +/** + * The class contains tests for the `ALTER TABLE ... UNSET SERDEPROPERTIES` command + * to check V1 Hive external table catalog. + */ +class AlterTableUnsetSerdePropertiesSuite extends v1.AlterTableUnsetSerdePropertiesSuiteBase + with CommandSuiteBase { + + test("Hive external catalog - hiveformat table: alter table unset serde properties") { + withNamespaceAndTable("ns", "tbl") { t => + sql(s"CREATE TABLE $t (col1 int, col2 string, a int, b int) " + + s"PARTITIONED BY (a, b) " + + s"ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' " + + s"STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' " + + s"OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'") + + val tableIdent = TableIdentifier("tbl", Some("ns")) + checkSerdeProps(tableIdent, None, Map.empty[String, String]) + + // set serde properties + sql(s"ALTER TABLE $t SET SERDEPROPERTIES ('k' = 'vvv', 'kay' = 'vee')") + checkSerdeProps(tableIdent, None, Map("k" -> "vvv", "kay" -> "vee")) + + // unset serde properties + sql(s"ALTER TABLE $t UNSET SERDEPROPERTIES ('k', 'key_non_exist')") + checkSerdeProps(tableIdent, None, Map("kay" -> "vee")) + + // table to alter does not exist + val e = intercept[AnalysisException] { + sql("ALTER TABLE does_not_exist UNSET SERDEPROPERTIES ('x')") + } + checkErrorTableNotFound(e, "`does_not_exist`", + ExpectedContext("does_not_exist", 12, 11 + "does_not_exist".length)) + } + } + + test("Hive external catalog - hiveformat table: alter table unset partition serde properties") { + withNamespaceAndTable("ns", "tbl") { t => + sql(s"CREATE TABLE $t (col1 int, col2 string, a int, b int) " + + s"PARTITIONED BY (a, b) " + + s"ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' " + + s"STORED AS INPUTFORMAT 'org.apache.hadoop.mapred.SequenceFileInputFormat' " + + s"OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat'") + sql(s"INSERT INTO $t PARTITION (a = 1, b = 2) SELECT 1, 'abc'") + sql(s"INSERT INTO $t PARTITION (a = 1, b = 3) SELECT 2, 'def'") + sql(s"INSERT INTO $t PARTITION (a = 2, b = 2) SELECT 3, 'ghi'") + sql(s"INSERT INTO $t PARTITION (a = 2, b = 3) SELECT 4, 'jkl'") + + val tableIdent = TableIdentifier("tbl", Some("ns")) + val spec = Map("a" -> "1", "b" -> "2") + checkSerdeProps(tableIdent, Some(spec), Map.empty[String, String]) + + // set partition serde properties + sql(s"ALTER TABLE $t PARTITION (a = 1, b = 2) " + + "SET SERDEPROPERTIES ('k' = 'vvv', 'kay' = 'vee')") + checkSerdeProps(tableIdent, Some(spec), Map("k" -> "vvv", "kay" -> "vee")) + + // unset serde properties + sql(s"ALTER TABLE $t PARTITION (a = 1, b = 2) UNSET SERDEPROPERTIES ('k', 'key_non_exist')") + checkSerdeProps(tableIdent, Some(spec), Map("kay" -> "vee")) + + // table to alter does not exist + val e = intercept[AnalysisException] { + sql("ALTER TABLE does_not_exist UNSET SERDEPROPERTIES ('x')") + } + checkErrorTableNotFound(e, "`does_not_exist`", + ExpectedContext("does_not_exist", 12, 11 + "does_not_exist".length)) + } + } +}