Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

FDG-3219: changing artifactory ip #19

Open
wants to merge 5 commits into
base: fdp-master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -210,12 +210,12 @@
<snapshotRepository>
<id>fk-art-snapshot</id>
<name>libs-snapshot</name>
<url>http://10.85.59.116/artifactory/v1.0/artifacts/libs-snapshots-local</url>
<url>http://artifactory.fkinternal.com/artifactory/v1.0/artifacts/libs-snapshots-local</url>
</snapshotRepository>
<repository>
<id>fk-art-release</id>
<name>libs-rel</name>
<url>http://10.85.59.116/artifactory/v1.0/artifacts/libs-release-local</url>
<url>http://artifactory.fkinternal.com/artifactory/v1.0/artifacts/libs-release-local</url>
</repository>
</distributionManagement>

Expand All @@ -224,12 +224,12 @@
<repository>
<id>fk-art-snapshot</id>
<name>Flipkart-Artifactory</name>
<url>http://10.85.59.116/artifactory/v1.0/artifacts/libs-snapshots-local</url>
<url>http://artifactory.fkinternal.com/artifactory/v1.0/artifacts/libs-snapshots-local</url>
</repository>
<repository>
<id>fk-art-release</id>
<name>Flipkart-Artifactory</name>
<url>http://10.85.59.116/artifactory/v1.0/artifacts/libs-release-local</url>
<url>http://artifactory.fkinternal.com/artifactory/v1.0/artifacts/libs-release-local</url>
</repository>
<repository>
<id>datanucleus</id>
Expand Down
59 changes: 58 additions & 1 deletion ql/src/java/org/apache/hadoop/hive/ql/Driver.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,10 @@
import java.io.Serializable;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.LinkedList;
Expand All @@ -37,6 +39,8 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Iterables;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FSDataInputStream;
Expand Down Expand Up @@ -120,13 +124,21 @@
import org.apache.hadoop.hive.ql.session.OperationLog.LoggingLevel;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hive.common.util.ShutdownHookManager;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -727,7 +739,8 @@ private String getExplainOutput(BaseSemanticAnalyzer sem, QueryPlan plan,
* @throws AuthorizationException
*/
public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, String command)
throws HiveException, AuthorizationException {
throws HiveException, AuthorizationException, IOException
{
SessionState ss = SessionState.get();
Hive db = sem.getDb();

Expand Down Expand Up @@ -756,11 +769,17 @@ public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, S
if (ss.isAuthorizationModeV2()) {
// get mapping of tables to columns used
ColumnAccessInfo colAccessInfo = sem.getColumnAccessInfo();

// add Struct type columns
addStructColumns(sem, colAccessInfo);

// colAccessInfo is set only in case of SemanticAnalyzer
Map<String, List<String>> selectTab2Cols = colAccessInfo != null ? colAccessInfo
.getTableToColumnAccessMap() : null;
LOG.info("selectTab2Cols field Names : " + selectTab2Cols);
Map<String, List<String>> updateTab2Cols = sem.getUpdateColumnAccessInfo() != null ?
sem.getUpdateColumnAccessInfo().getTableToColumnAccessMap() : null;
LOG.info("updateTab2Cols field Names : " + updateTab2Cols);
doAuthorizationV2(ss, op, inputs, outputs, command, selectTab2Cols, updateTab2Cols);
return;
}
Expand Down Expand Up @@ -902,6 +921,44 @@ public static void doAuthorization(HiveOperation op, BaseSemanticAnalyzer sem, S
}
}

private static void addStructColumns(BaseSemanticAnalyzer sem, ColumnAccessInfo colAccessInfo) throws IOException
{
TableDesc tableDesc = sem.getFetchTask().getTblDesc();
String columnTypeProperty = tableDesc.getProperties().getProperty(serdeConstants.LIST_COLUMN_TYPES);
String columnNameProperty = tableDesc.getProperties().getProperty(serdeConstants.LIST_COLUMNS);
final String columnNameDelimiter = tableDesc.getProperties().containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tableDesc.getProperties()
.getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA);
List<String> columnNames;
if (columnNameProperty.length() == 0) {
columnNames = new ArrayList<>();
} else {
columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter));
}
List<TypeInfo> columnTypes;
if (columnTypeProperty.length() == 0) {
columnTypes = new ArrayList<>();
} else {
columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
}
StructTypeInfo rowTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
LOG.info("Following are the struct field Names : " + rowTypeInfo.getAllStructFieldNames());

int cnt = 0;
for (TypeInfo value: rowTypeInfo.getAllStructFieldTypeInfos()) {
if(value.getCategory().equals(ObjectInspector.Category.STRUCT)) {
JSONObject jsnobject = new JSONObject(value);
ObjectMapper objectMapper = new ObjectMapper();
JsonNode node = objectMapper.readTree(jsnobject.get("allStructFieldNames").toString());
Iterator<JsonNode> file = node.elements();
while (file.hasNext()) {
JsonNode al = file.next();
colAccessInfo.add(tableDesc.getTableName().replace('.','@'), rowTypeInfo.getAllStructFieldNames().get(cnt) + "." + al.textValue());
}
}
cnt++;
}
}

private static void getTablePartitionUsedColumns(HiveOperation op, BaseSemanticAnalyzer sem,
Map<Table, List<String>> tab2Cols, Map<Partition, List<String>> part2Cols,
Map<String, Boolean> tableUsePartLevelAuth) throws HiveException {
Expand Down
4 changes: 2 additions & 2 deletions storage-api/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,12 @@
<snapshotRepository>
<id>fk-art-snapshot</id>
<name>libs-snapshot</name>
<url>http://10.85.59.116/artifactory/v1.0/artifacts/libs-snapshots-local</url>
<url>http://artifactory.fkinternal.com/artifactory/v1.0/artifacts/libs-snapshots-local</url>
</snapshotRepository>
<repository>
<id>fk-art-release</id>
<name>libs-rel</name>
<url>http://10.85.59.116/artifactory/v1.0/artifacts/libs-release-local</url>
<url>http://artifactory.fkinternal.com/artifactory/v1.0/artifacts/libs-release-local</url>
</repository>
</distributionManagement>

Expand Down