Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
Original file line number Diff line number Diff line change
Expand Up @@ -468,7 +468,7 @@ public enum ErrorMsg {
RESOURCE_PLAN_ALREADY_EXISTS(10417, "Resource plan {0} already exists", true),
RESOURCE_PLAN_NOT_EXISTS(10418, "Resource plan {0} does not exist", true),
INCOMPATIBLE_STRUCT(10419, "Incompatible structs.", true),
OBJECTNAME_CONTAINS_DOT(10420, "Table or database name may not contain dot(.) character", true),
OBJECTNAME_CONTAINS_DOT(10420, "Catalog or table or database name may not contain dot(.) character", true),
WITHIN_GROUP_NOT_ALLOWED(10421,
"Not an ordered-set aggregate function: {0}. WITHIN GROUP clause is not allowed.", true),
WITHIN_GROUP_PARAMETER_MISMATCH(10422,
Expand Down
22 changes: 11 additions & 11 deletions parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ alterStatement
: KW_ALTER KW_TABLE tableName alterTableStatementSuffix -> ^(TOK_ALTERTABLE tableName alterTableStatementSuffix)
| KW_ALTER KW_VIEW tableName KW_AS? alterViewStatementSuffix -> ^(TOK_ALTERVIEW tableName alterViewStatementSuffix)
| KW_ALTER KW_MATERIALIZED KW_VIEW tableNameTree=tableName alterMaterializedViewStatementSuffix[$tableNameTree.tree] -> alterMaterializedViewStatementSuffix
| KW_ALTER (KW_DATABASE|KW_SCHEMA) alterDatabaseStatementSuffix -> alterDatabaseStatementSuffix
| KW_ALTER (KW_DATABASE|KW_SCHEMA) databaseName alterDatabaseStatementSuffix -> ^(TOK_ALTERDATABASE databaseName alterDatabaseStatementSuffix)
| KW_ALTER KW_DATACONNECTOR alterDataConnectorStatementSuffix -> alterDataConnectorStatementSuffix
| KW_OPTIMIZE KW_TABLE tableName optimizeTableStatementSuffix -> ^(TOK_ALTERTABLE tableName optimizeTableStatementSuffix)
| KW_ALTER KW_CATALOG alterCatalogStatementSuffix -> alterCatalogStatementSuffix
Expand Down Expand Up @@ -181,31 +181,31 @@ alterDatabaseStatementSuffix
alterDatabaseSuffixProperties
@init { gParent.pushMsg("alter database properties statement", state); }
@after { gParent.popMsg(state); }
: name=identifier KW_SET KW_DBPROPERTIES dbProperties
-> ^(TOK_ALTERDATABASE_PROPERTIES $name dbProperties)
: KW_SET KW_DBPROPERTIES dbProperties
-> ^(TOK_ALTERDATABASE_PROPERTIES dbProperties)
;

alterDatabaseSuffixSetOwner
@init { gParent.pushMsg("alter database set owner", state); }
@after { gParent.popMsg(state); }
: dbName=identifier KW_SET KW_OWNER principalName
-> ^(TOK_ALTERDATABASE_OWNER $dbName principalName)
: KW_SET KW_OWNER principalName
-> ^(TOK_ALTERDATABASE_OWNER principalName)
;

alterDatabaseSuffixSetLocation
@init { gParent.pushMsg("alter database set location", state); }
@after { gParent.popMsg(state); }
: dbName=identifier KW_SET KW_LOCATION newLocation=StringLiteral
-> ^(TOK_ALTERDATABASE_LOCATION $dbName $newLocation)
| dbName=identifier KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral
-> ^(TOK_ALTERDATABASE_MANAGEDLOCATION $dbName $newLocation)
: KW_SET KW_LOCATION newLocation=StringLiteral
-> ^(TOK_ALTERDATABASE_LOCATION $newLocation)
| KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral
-> ^(TOK_ALTERDATABASE_MANAGEDLOCATION $newLocation)
;

alterDatabaseSuffixSetManagedLocation
@init { gParent.pushMsg("alter database set managed location", state); }
@after { gParent.popMsg(state); }
: dbName=identifier KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral
-> ^(TOK_ALTERDATABASE_MANAGEDLOCATION $dbName $newLocation)
: KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral
-> ^(TOK_ALTERDATABASE_MANAGEDLOCATION $newLocation)
;

alterStatementSuffixRename[boolean table]
Expand Down
11 changes: 11 additions & 0 deletions parser/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,17 @@ uniqueJoinTableSource
-> ^(TOK_TABREF $tabname $ts? $alias?)
;

databaseName
@init { gParent.pushMsg("database name", state); }
@after { gParent.popMsg(state); }
:
catalog=identifier DOT db=identifier?
-> ^(TOK_DBNAME $catalog $db)
|
db=identifier
-> ^(TOK_DBNAME $db)
;

tableName
@init { gParent.pushMsg("table name", state); }
@after { gParent.popMsg(state); }
Expand Down
26 changes: 18 additions & 8 deletions parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
Original file line number Diff line number Diff line change
Expand Up @@ -378,11 +378,13 @@ TOK_DESCCATALOG;
TOK_CATALOGLOCATION;
TOK_CATALOGCOMMENT;
TOK_ALTERCATALOG_LOCATION;
TOK_SWITCHCATALOG;
TOK_DESCDATABASE;
TOK_DATABASEPROPERTIES;
TOK_DATABASELOCATION;
TOK_DATABASE_MANAGEDLOCATION;
TOK_DBPROPLIST;
TOK_ALTERDATABASE;
TOK_ALTERDATABASE_PROPERTIES;
TOK_ALTERDATABASE_OWNER;
TOK_ALTERDATABASE_LOCATION;
Expand Down Expand Up @@ -1011,6 +1013,7 @@ ddlStatement
@after { popMsg(state); }
: createCatalogStatement
| dropCatalogStatement
| switchCatalogStatement
| createDatabaseStatement
| switchDatabaseStatement
| dropDatabaseStatement
Expand Down Expand Up @@ -1151,12 +1154,19 @@ dropCatalogStatement
-> ^(TOK_DROPCATALOG identifier ifExists?)
;

switchCatalogStatement
@init { pushMsg("switch catalog statement", state); }
@after { popMsg(state); }
: KW_SET KW_CATALOG identifier
-> ^(TOK_SWITCHCATALOG identifier)
;

createDatabaseStatement
@init { pushMsg("create database statement", state); }
@after { popMsg(state); }
: KW_CREATE (KW_DATABASE|KW_SCHEMA)
ifNotExists?
name=identifier
name=databaseName
databaseComment?
dbLocation?
dbManagedLocation?
Expand All @@ -1165,7 +1175,7 @@ createDatabaseStatement

| KW_CREATE KW_REMOTE (KW_DATABASE|KW_SCHEMA)
ifNotExists?
name=identifier
name=databaseName
databaseComment?
dbConnectorName
(KW_WITH KW_DBPROPERTIES dbprops=dbProperties)?
Expand Down Expand Up @@ -1210,15 +1220,15 @@ dbConnectorName
switchDatabaseStatement
@init { pushMsg("switch database statement", state); }
@after { popMsg(state); }
: KW_USE identifier
-> ^(TOK_SWITCHDATABASE identifier)
: KW_USE databaseName
-> ^(TOK_SWITCHDATABASE databaseName)
;

dropDatabaseStatement
@init { pushMsg("drop database statement", state); }
@after { popMsg(state); }
: KW_DROP (KW_DATABASE|KW_SCHEMA) ifExists? identifier restrictOrCascade?
-> ^(TOK_DROPDATABASE identifier ifExists? restrictOrCascade?)
: KW_DROP (KW_DATABASE|KW_SCHEMA) ifExists? databaseName restrictOrCascade?
-> ^(TOK_DROPDATABASE databaseName ifExists? restrictOrCascade?)
;

databaseComment
Expand Down Expand Up @@ -1284,7 +1294,7 @@ descStatement
(
(KW_CATALOG) => (KW_CATALOG) KW_EXTENDED? (catName=identifier) -> ^(TOK_DESCCATALOG $catName KW_EXTENDED?)
|
(KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) KW_EXTENDED? (dbName=identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?)
(KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) KW_EXTENDED? (dbName=databaseName) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?)
|
(KW_DATACONNECTOR) => (KW_DATACONNECTOR) KW_EXTENDED? (dcName=identifier) -> ^(TOK_DESCDATACONNECTOR $dcName KW_EXTENDED?)
|
Expand Down Expand Up @@ -1323,7 +1333,7 @@ showStatement
| KW_SHOW KW_FUNCTIONS (KW_LIKE showFunctionIdentifier)? -> ^(TOK_SHOWFUNCTIONS KW_LIKE? showFunctionIdentifier?)
| KW_SHOW KW_PARTITIONS tabName=tableName partitionSpec? whereClause? orderByClause? limitClause? -> ^(TOK_SHOWPARTITIONS $tabName partitionSpec? whereClause? orderByClause? limitClause?)
| KW_SHOW KW_CREATE (
(KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) db_name=identifier -> ^(TOK_SHOW_CREATEDATABASE $db_name)
(KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) db_name=databaseName -> ^(TOK_SHOW_CREATEDATABASE $db_name)
|
KW_TABLE tabName=tableName -> ^(TOK_SHOW_CREATETABLE $tabName)
)
Expand Down
4 changes: 2 additions & 2 deletions parser/src/java/org/apache/hadoop/hive/ql/parse/LockParser.g
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ lockStatement
lockDatabase
@init { gParent.pushMsg("lock database statement", state); }
@after { gParent.popMsg(state); }
: KW_LOCK (KW_DATABASE|KW_SCHEMA) (dbName=identifier) lockMode -> ^(TOK_LOCKDB $dbName lockMode)
: KW_LOCK (KW_DATABASE|KW_SCHEMA) (dbName=databaseName) lockMode -> ^(TOK_LOCKDB $dbName lockMode)
;

lockMode
Expand All @@ -79,5 +79,5 @@ unlockStatement
unlockDatabase
@init { gParent.pushMsg("unlock database statement", state); }
@after { gParent.popMsg(state); }
: KW_UNLOCK (KW_DATABASE|KW_SCHEMA) (dbName=identifier) -> ^(TOK_UNLOCKDB $dbName)
: KW_UNLOCK (KW_DATABASE|KW_SCHEMA) (dbName=databaseName) -> ^(TOK_UNLOCKDB $dbName)
;
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.ddl.catalog.use;

import org.apache.hadoop.hive.metastore.api.Catalog;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.SemanticException;

/**
* Analyzer for catalog switching commands.
*/
@DDLSemanticAnalyzerFactory.DDLType(types = HiveParser.TOK_SWITCHCATALOG)
public class SwitchCatalogAnalyzer extends BaseSemanticAnalyzer {
public SwitchCatalogAnalyzer(QueryState queryState) throws SemanticException {
super(queryState);
}

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
String catalogName = unescapeIdentifier(root.getChild(0).getText());

Catalog catalog = getCatalog(catalogName);
ReadEntity readEntity = new ReadEntity(catalog);
readEntity.noLockNeeded();
inputs.add(readEntity);

SwitchCatalogDesc desc = new SwitchCatalogDesc(catalogName);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.ddl.catalog.use;

import org.apache.hadoop.hive.ql.ddl.DDLDesc;
import org.apache.hadoop.hive.ql.plan.Explain;

import java.io.Serializable;

/**
* DDL task description for SET CATALOG commands.
*/
@Explain(displayName = "Switch Catalog", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED })
public class SwitchCatalogDesc implements DDLDesc, Serializable {
private static final long serialVersionUID = 1L;

private final String catalogName;

public SwitchCatalogDesc(String databaseName) {
this.catalogName = databaseName;
}

@Explain(displayName = "name", explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED })
public String getCatalogName() {
return catalogName;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.ddl.catalog.use;

import java.util.Map;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Catalog;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.session.SessionState;

/**
* Operation process of switching to another catalog.
*/
public class SwitchCatalogOperation extends DDLOperation<SwitchCatalogDesc> {
public SwitchCatalogOperation(DDLOperationContext context, SwitchCatalogDesc desc) {
super(context, desc);
}

@Override
public int execute() throws HiveException {
String catalogName = desc.getCatalogName();
if (context.getDb().getCatalog(catalogName) == null) {
throw new HiveException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName);
}

SessionState.get().setCurrentCatalog(catalogName);
return 0;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public AbstractAlterDatabaseAnalyzer(QueryState queryState) throws SemanticExcep
}

protected void addAlterDatabaseDesc(AbstractAlterDatabaseDesc alterDesc) throws SemanticException {
Database database = getDatabase(alterDesc.getDatabaseName());
Database database = getDatabase(alterDesc.getCatalogName(), alterDesc.getDatabaseName(), true);
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc)));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,21 @@
public abstract class AbstractAlterDatabaseDesc implements DDLDesc, Serializable {
private static final long serialVersionUID = 1L;

private final String catalogName;
private final String databaseName;
private final ReplicationSpec replicationSpec;

public AbstractAlterDatabaseDesc(String databaseName, ReplicationSpec replicationSpec) {
public AbstractAlterDatabaseDesc(String catalogName, String databaseName, ReplicationSpec replicationSpec) {
this.catalogName = catalogName;
this.databaseName = databaseName;
this.replicationSpec = replicationSpec;
}

@Explain(displayName="catalogName", explainLevels = {Level.USER, Level.DEFAULT, Level.EXTENDED })
public String getCatalogName() {
return catalogName;
}

@Explain(displayName="name", explainLevels = {Level.USER, Level.DEFAULT, Level.EXTENDED })
public String getDatabaseName() {
return databaseName;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.hadoop.hive.ql.ddl.DDLOperation;
import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;

/**
* Operation process of altering a database.
Expand All @@ -36,8 +37,9 @@ public AbstractAlterDatabaseOperation(DDLOperationContext context, T desc) {

@Override
public int execute() throws HiveException {
String catName = desc.getCatalogName();
String dbName = desc.getDatabaseName();
Database database = context.getDb().getDatabase(dbName);
Database database = context.getDb().getDatabase(catName, dbName);
if (database == null) {
throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName);
}
Expand Down
Loading