diff --git a/.gitignore b/.gitignore
index 91ad75bb4..08edd2660 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
*.class
+classes/
target/
.classpath
.project
@@ -16,3 +17,7 @@ sentry-core/sentry-core-common/src/gen
*.ear
test-output/
maven-repo/
+*.orig
+*.rej
+.DS_Store
+**/thirdparty/*
diff --git a/LICENSE.txt b/LICENSE.txt
index d64569567..c29b59dda 100644
--- a/LICENSE.txt
+++ b/LICENSE.txt
@@ -200,3 +200,70 @@
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+================================================================================
+
+The Apache Sentry (incubating) distribution includes the following sources/binaries.
+The use of these sources/binaries is subject to the terms and conditions of
+their respective licenses.
+
+For sentry-tests/sentry-tests-solr/src/test/resources/solr/collection1/conf/velocity/jquery.autocomplete.js:
+
+The MIT License (MIT)
+
+Copyright (c) 2007 Dylan Verheul, Dan G. Switzer, Anjesh Tuladhar, Jörn Zaefferer
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+For sentry-tests/sentry-tests-solr/src/test/resources/solr/collection1/conf/lang:
+stopwords_ar.txt
+stopwords_bg.txt
+stopwords_fa.txt
+stopwords_hi.txt
+stopwords_ro.txt
+
+BSD License
+
+Copyright (c) 2005, Jacques Savoy.
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+For sentry-tests/sentry-tests-solr/src/test/resources/solr/collection1/conf/lang:
+stopwords_da.txt
+stopwords_de.txt
+stopwords_es.txt
+stopwords_fi.txt
+stopwords_fr.txt
+stopwords_hu.txt
+stopwords_it.txt
+stopwords_nl.txt
+stopwords_no.txt
+stopwords_pt.txt
+stopwords_ru.txt
+stopwords_sv.txt
+
+BSD License
+
+Copyright (c) 2001, Dr Martin Porter, and (for the Java developments) Copyright (c) 2002, Richard Boulton.
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/README.md b/README.md
index 3aabc60a9..24701f53d 100644
--- a/README.md
+++ b/README.md
@@ -10,15 +10,16 @@ Bug and Issues tracker
* https://issues.apache.org/jira/browse/SENTRY
+Wiki
+
+* https://cwiki.apache.org/confluence/display/SENTRY/Home
+
Building Sentry
Building Sentry requires the following tools:
-* Apache Maven 3.0+
-* Java JDK 1.6+
-
-Running hive end to end tests requires:
-* wget
+* Apache Maven 3.2.5+ (Might hit issues with pentaho library with older maven versions)
+* Java JDK7 (can't access TBase errors with JDK8)
To compile Sentry, run:
@@ -26,7 +27,7 @@ mvn install -DskipTests
To run Sentry tests, run:
-mvn test -Pdownload-hadoop
+mvn test
To build a distribution, run:
diff --git a/bin/sentryShell b/bin/sentryShell
new file mode 100755
index 000000000..d21a65f7a
--- /dev/null
+++ b/bin/sentryShell
@@ -0,0 +1,71 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bin=`dirname "$0"`
+myhome=`cd "$bin/.."; pwd`
+
+if [[ -z $SENTRY_HOME ]] ; then
+ export SENTRY_HOME=$myhome
+fi
+
+# check for hadoop in the path
+HADOOP_IN_PATH=`which hadoop 2>/dev/null`
+if [ -f ${HADOOP_IN_PATH} ]; then
+ HADOOP_DIR=`dirname "$HADOOP_IN_PATH"`/..
+fi
+# HADOOP_HOME env variable overrides hadoop in the path
+HADOOP_HOME=${HADOOP_HOME:-${HADOOP_PREFIX:-$HADOOP_DIR}}
+if [ "$HADOOP_HOME" == "" ]; then
+ echo "Cannot find hadoop installation: \$HADOOP_HOME or \$HADOOP_PREFIX must be set or hadoop must be in the path";
+ exit 4;
+fi
+
+HADOOP=$HADOOP_HOME/bin/hadoop
+if [ ! -f ${HADOOP} ]; then
+ echo "Cannot find hadoop installation: \$HADOOP_HOME or \$HADOOP_PREFIX must be set or hadoop must be in the path";
+ exit 4;
+fi
+
+export _CMD_JAR=${SENTRY_SHELL_JAR:-sentry-provider-db-*.jar}
+for f in ${SENTRY_HOME}/lib/*.jar; do
+ HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:${f}
+done
+export HADOOP_CLASSPATH
+
+for f in ${SENTRY_HOME}/lib/server/*.jar; do
+ HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:${f}
+done
+for f in ${SENTRY_HOME}/lib/plugins/*.jar; do
+ HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:${f}
+done
+
+args=()
+# get the type argument for the command, and check use the shell for hive model or for generic model.
+# todo: currently, supoort hive only, need add generic model support
+while [ $# -gt 0 ]; do # Until you run out of parameters . . .
+ if [[ "$1" = "-t" || "$1" = "--type" ]]; then
+ # currently, only support the hive model
+ if ! [[ $2 =~ ^[H|h][I|i][V|v][E|e]$ ]]; then
+ echo "Doesn't support the type $2!"
+ exit 1
+ fi
+ fi
+ args+=" $1"
+ shift
+done
+
+exec $HADOOP jar ${SENTRY_HOME}/lib/${_CMD_JAR} org.apache.sentry.provider.db.tools.SentryShellHive ${args[@]}
diff --git a/build-tools/sentry-pmd-ruleset.xml b/build-tools/sentry-pmd-ruleset.xml
new file mode 100644
index 000000000..8a2644693
--- /dev/null
+++ b/build-tools/sentry-pmd-ruleset.xml
@@ -0,0 +1,46 @@
+
+
+
+
+ A PMD ruleset for Apache Sentry
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/conf/sentry-site.xml.hive-client.example b/conf/sentry-site.xml.hive-client.example
index fd87c62ce..c9f1d0588 100644
--- a/conf/sentry-site.xml.hive-client.example
+++ b/conf/sentry-site.xml.hive-client.example
@@ -55,7 +55,7 @@
sentry.service.server.principal
- hivemeta/centos64.cloudera.com@HS2.CLOUDERA.COM
+ sentry/centos64.example.com@EXAMPLE.COMsentry.metastore.service.users
diff --git a/conf/sentry-site.xml.hive-client.template b/conf/sentry-site.xml.hive-client.template
index 0491de787..0e8a74ed9 100644
--- a/conf/sentry-site.xml.hive-client.template
+++ b/conf/sentry-site.xml.hive-client.template
@@ -86,7 +86,7 @@
sentry.hive.failure.hooks
- Deprecated Name: hive.sentry.failure.hooks. Any failure hooks to be configured like navigator (i.e. com.cloudera.navigator.audit.hive.HiveSentryOnFailureHook)
+ Deprecated Name: hive.sentry.failure.hooks
@@ -97,4 +97,4 @@
-
\ No newline at end of file
+
diff --git a/dev-support/smart-apply-patch.sh b/dev-support/smart-apply-patch.sh
old mode 100644
new mode 100755
diff --git a/dev-support/test-patch.py b/dev-support/test-patch.py
index 7e701c393..f9f79eadc 100644
--- a/dev-support/test-patch.py
+++ b/dev-support/test-patch.py
@@ -88,7 +88,7 @@ def jira_post_comment(result, defect, branch, username, password):
# hack (from hadoop) but REST api doesn't list attachments?
def jira_get_attachment(result, defect, username, password):
html = jira_get_defect_html(result, defect, username, password)
- pattern = "(/secure/attachment/[0-9]+/%s[0-9\.\-]*\.(patch|txt|patch\.txt))" % (re.escape(defect))
+ pattern = "(/secure/attachment/\d+/%s[\w\.\-]*\.(patch|txt|patch\.txt))" % (re.escape(defect))
matches = []
for match in re.findall(pattern, html, re.IGNORECASE):
matches += [ match[0] ]
@@ -282,6 +282,16 @@ def post_jira_comment_and_exit():
print "ERROR: No attachments found for %s" % (defect)
sys.exit(1)
result.attachment = attachment
+ # parse branch info
+ branchPattern = re.compile('/secure/attachment/\d+/%s(\.\d+)-(\S+)\.(patch|txt|patch.\txt)' % (re.escape(defect)))
+ try:
+ branchInfo = re.search(branchPattern,attachment)
+ if branchInfo:
+ branch = branchInfo.group(2)
+ print "INFO: Branch info is detected from attachment name: " + branch
+ except:
+ branch = "master"
+ print "INFO: Branch info is not detected from attachment name, use branch: " + branch
patch_contents = jira_request(result, result.attachment, username, password, None, {}).read()
patch_file = "%s/%s.patch" % (output_dir, defect)
with open(patch_file, 'a') as fh:
diff --git a/pom.xml b/pom.xml
index 2f9788062..d25c314e0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -20,12 +20,12 @@ limitations under the License.
org.apacheapache
- 13
+ 17org.apache.sentrysentry
- 1.5.0-incubating-SNAPSHOT
+ 1.7.0-incubating-SNAPSHOTSentry componentSentrypom
@@ -50,49 +50,54 @@ limitations under the License.
UTF-8
- 1.6
- 1.6
+ ${basedir}/build-tools
+ 1.7
+ 1.71.0b3
- 1.7
- 2.9
- 1.3.1
+ 0.7.1.RELEASE1.8
+ 2.2
+ 1.2
+ 2.22.61.2
- 0.7.1.RELEASE
- 3.3.0-release
+ 2.7.13.2.63.2.123.2.12
- 3.0.1
+ 4.0.110.10.2.0
- 1.2
- 1.1.0
- 2.5.0-cdh5.2.0-SNAPSHOT
+ 3.01.4.111.0.2
- 4.9
- 0.9.2
- 0.9.2
- 2.6.0
+ 2.6.0
+ 1.3
+ 1.3.0-SNAPSHOT
+ 1.1.0
+ 1.8.8
+ 3.0.1
+ 7.6.16.v20140903
+ 2.54.10
+ 0.9.2
+ 0.9.21.2.16
+ 1.7
+ 2.9
+ 1.3.1
+ 3.0.11.8.5
- 1.2.1
+ 1.2
+ 0.12.0
+ 1.2.31.6.14.10.2
- 3.4.5
- 0.12.0
- 1.8.8
- 3.1.0
- 7.6.16.v20140903
- 2.6.0
- 2.5
+ 1.99.6${maven.test.classpath}
- 3.0
- 1.2
- 2.2
+ 3.4.5
+ 0.9.0.0
+ 1.3.2
@@ -156,6 +161,16 @@ limitations under the License.
org.apache.hadoophadoop-common${hadoop.version}
+
+
+ curator-client
+ org.apache.curator
+
+
+ curator-framework
+ org.apache.curator
+
+ org.apache.hadoop
@@ -172,6 +187,12 @@ limitations under the License.
org.apache.hadoophadoop-minicluster${hadoop.version}
+
+
+ curator-client
+ org.apache.curator
+
+ org.apache.hadoop
@@ -232,6 +253,11 @@ limitations under the License.
+
+ org.apache.sentry
+ solr-sentry-core
+ ${project.version}
+ org.apache.sentrysolr-sentry-handlers
@@ -312,6 +338,11 @@ limitations under the License.
sentry-core-model-sqoop${project.version}
+
+ org.apache.sentry
+ sentry-core-model-kafka
+ ${project.version}
+ org.apache.hivehive-jdbc
@@ -331,6 +362,12 @@ limitations under the License.
org.apache.hivehive-exec${hive.version}
+
+
+ apache-curator
+ org.apache.curator
+
+ org.apache.hive
@@ -357,6 +394,11 @@ limitations under the License.
sentry-binding-hive${project.version}
+
+ org.apache.sentry
+ sentry-binding-hive-v2
+ ${project.version}
+ org.apache.sentrysentry-binding-solr
@@ -368,6 +410,16 @@ limitations under the License.
${project.version}test-jar
+
+ org.apache.sentry
+ sentry-binding-sqoop
+ ${project.version}
+
+
+ org.apache.sentry
+ sentry-binding-kafka
+ ${project.version}
+ org.apache.sentrysentry-provider-common
@@ -408,6 +460,12 @@ limitations under the License.
sentry-provider-db${project.version}
+
+ org.apache.sentry
+ sentry-provider-db
+ ${project.version}
+ test-jar
+ org.apache.sentrysentry-policy-common
@@ -428,6 +486,16 @@ limitations under the License.
sentry-policy-search${project.version}
+
+ org.apache.sentry
+ sentry-policy-sqoop
+ ${project.version}
+
+
+ org.apache.sentry
+ sentry-policy-kafka
+ ${project.version}
+ org.apache.sentrysentry-dist
@@ -514,6 +582,41 @@ limitations under the License.
cglib-nodep${cglib.version}
+
+ org.apache.commons
+ commons-pool2
+ ${commons-pool2.version}
+
+
+ org.apache.sqoop
+ sqoop-common
+ ${sqoop.version}
+
+
+ org.apache.sqoop
+ sqoop-security
+ ${sqoop.version}
+
+
+ org.apache.sqoop
+ sqoop-server
+ ${sqoop.version}
+
+
+ org.apache.sqoop
+ test
+ ${sqoop.version}
+
+
+ org.hamcrest
+ hamcrest-all
+ ${hamcrest.version}
+
+
+ org.apache.kafka
+ kafka_2.11
+ ${kafka.version}
+
@@ -540,6 +643,34 @@ limitations under the License.
org.apache.ratapache-rat-plugin
+
+ org.apache.maven.plugins
+ maven-pmd-plugin
+ 3.5
+
+
+ ${buildtools.dir}/sentry-pmd-ruleset.xml
+
+ UTF-8
+ true
+ false
+ true
+ true
+ ${targetJdk}
+
+ ${basedir}/src/main/generated
+
+
+
+
+ validate
+ validate
+
+ check
+
+
+
+ org.apache.maven.pluginsmaven-eclipse-plugin
@@ -605,7 +736,7 @@ limitations under the License.
org.apache.felixmaven-bundle-plugin
- 2.4.0
+ 2.5.4org.apache.maven.plugins
@@ -628,7 +759,7 @@ limitations under the License.
org.apache.maven.pluginsmaven-compiler-plugin
- 2.5.1
+ 3.1${maven.compile.source}${maven.compile.target}
@@ -647,15 +778,15 @@ limitations under the License.
org.apache.maven.pluginsmaven-surefire-plugin
- 2.16
+ 2.18
- always
+ 3-Xmx1500m -Dhive.log.dir=./target/900true
- -Xms512m -Xmx2g
+ -Xms512m -Xmx2g -XX:MaxPermSize=256mtrue
@@ -710,6 +841,11 @@ limitations under the License.
**/upgrade.***/datanucleus.log**/metastore_db/
+ **/*.rej
+ **/thirdparty/
+
+ **/*.crt
+ **/*.jks
@@ -736,26 +872,39 @@ limitations under the License.
+
+
+ nochecks
+
+ true
+
+
+
+ activate-buildtools-in-module
+
+
+ ${basedir}/../build-tools/sentry-pmd-ruleset.xml
+
+
+
+ ${basedir}/../build-tools
+
+
+
+ activate-buildtools-in-submodule
+
+
+ ${basedir}/../../build-tools/sentry-pmd-ruleset.xml
+
+
+
+ ${basedir}/../../build-tools
+
+
+
+
+
-
- cdh.repo
- https://repository.cloudera.com/artifactory/cloudera-repos
- Cloudera Repositories
-
- false
-
-
-
- cdh.snapshots.repo
- https://repository.cloudera.com/artifactory/libs-snapshot-local
- Cloudera Snapshots Repository
-
- true
-
-
- false
-
- apachehttps://repository.apache.org/content/repositories/
diff --git a/sentry-binding/pom.xml b/sentry-binding/pom.xml
index 7428aa5e6..9e4999bef 100644
--- a/sentry-binding/pom.xml
+++ b/sentry-binding/pom.xml
@@ -22,7 +22,7 @@ limitations under the License.
org.apache.sentrysentry
- 1.5.0-incubating-SNAPSHOT
+ 1.7.0-incubating-SNAPSHOTsentry-binding
@@ -31,7 +31,20 @@ limitations under the License.
sentry-binding-hive
+ sentry-binding-kafkasentry-binding-solr
+ sentry-binding-sqoop
+
+
+ hive-authz2
+
+ false
+
+
+ sentry-binding-hive-v2
+
+
+
diff --git a/sentry-binding/sentry-binding-hive-v2/pom.xml b/sentry-binding/sentry-binding-hive-v2/pom.xml
new file mode 100644
index 000000000..ef6048cef
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/pom.xml
@@ -0,0 +1,158 @@
+
+
+
+ 4.0.0
+
+
+ org.apache.sentry
+ sentry-binding
+ 1.7.0-incubating-SNAPSHOT
+
+
+ sentry-binding-hive-v2
+ Sentry Binding v2 for Hive
+
+
+
+ org.apache.sentry
+ sentry-binding-hive
+
+
+ org.apache.httpcomponents
+ httpclient
+
+
+ org.apache.httpcomponents
+ httpcore
+
+
+
+
+ org.apache.thrift
+ libthrift
+
+
+ org.apache.httpcomponents
+ httpclient
+
+
+ org.apache.httpcomponents
+ httpcore
+
+
+
+
+ org.apache.derby
+ derby
+
+
+ junit
+ junit
+ test
+
+
+ org.apache.hive
+ hive-exec
+ ${hive-v2.version}
+ provided
+
+
+ org.apache.hive
+ hive-service
+ ${hive-v2.version}
+ provided
+
+
+ org.apache.hive
+ hive-metastore
+ ${hive-v2.version}
+ provided
+
+
+ org.apache.hive
+ hive-shims
+ ${hive-v2.version}
+ provided
+
+
+ org.apache.hive
+ hive-serde
+ ${hive-v2.version}
+ provided
+
+
+ org.apache.hive
+ hive-common
+ ${hive-v2.version}
+ provided
+
+
+ org.apache.sentry
+ sentry-core-common
+
+
+ org.apache.sentry
+ sentry-core-model-db
+
+
+ org.apache.sentry
+ sentry-provider-common
+
+
+
+ org.apache.sentry
+ sentry-provider-db
+
+
+ org.apache.hive
+ hive-beeline
+
+
+ org.apache.hive
+ hive-metastore
+
+
+
+
+ org.apache.sentry
+ sentry-provider-file
+
+
+ org.apache.sentry
+ sentry-policy-db
+
+
+ org.apache.hadoop
+ hadoop-common
+ provided
+
+
+ org.apache.hadoop
+ hadoop-client
+ ${hadoop.version}
+ provided
+
+
+ org.mockito
+ mockito-all
+ test
+
+
+
+
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java
new file mode 100644
index 000000000..67cf2663a
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingHookV2.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.v2;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.DDLTask;
+import org.apache.hadoop.hive.ql.exec.SentryFilterDDLTask;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
+import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.DDLWork;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.core.common.Subject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class HiveAuthzBindingHookV2 extends AbstractSemanticAnalyzerHook {
+ private static final Logger LOG = LoggerFactory
+ .getLogger(HiveAuthzBindingHookV2.class);
+ private final HiveAuthzBinding hiveAuthzBinding;
+ private final HiveAuthzConf authzConf;
+
+ public HiveAuthzBindingHookV2() throws Exception {
+ SessionState session = SessionState.get();
+ if(session == null) {
+ throw new IllegalStateException("Session has not been started");
+ }
+
+ HiveConf hiveConf = session.getConf();
+ if(hiveConf == null) {
+ throw new IllegalStateException("Session HiveConf is null");
+ }
+ authzConf = HiveAuthzBindingHook.loadAuthzConf(hiveConf);
+ hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf);
+ }
+
+ @Override
+ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
+ throws SemanticException {
+ return ast;
+ }
+
+ /**
+ * Post analyze hook that invokes hive auth bindings
+ */
+ @Override
+ public void postAnalyze(HiveSemanticAnalyzerHookContext context,
+ List> rootTasks) throws SemanticException {
+ HiveOperation stmtOperation = getCurrentHiveStmtOp();
+ Subject subject = new Subject(context.getUserName());
+ for (int i = 0; i < rootTasks.size(); i++) {
+ Task extends Serializable> task = rootTasks.get(i);
+ if (task instanceof DDLTask) {
+ SentryFilterDDLTask filterTask =
+ new SentryFilterDDLTask(hiveAuthzBinding, subject, stmtOperation);
+ filterTask.setWork((DDLWork)task.getWork());
+ rootTasks.set(i, filterTask);
+ }
+ }
+ }
+
+ private HiveOperation getCurrentHiveStmtOp() {
+ SessionState sessState = SessionState.get();
+ if (sessState == null) {
+ LOG.warn("SessionState is null");
+ return null;
+ }
+ return sessState.getHiveOperation();
+ }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java
new file mode 100644
index 000000000..3fbb62662
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/HiveAuthzBindingSessionHookV2.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.v2;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hive.service.cli.HiveSQLException;
+import org.apache.hive.service.cli.session.HiveSessionHookContext;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+
+import com.google.common.base.Joiner;
+
+/**
+ * The session hook is the Session Hook for HiveAuthzBindingSessionHookV2, The configuration of
+ * session will update for Hive Authz v2.
+ */
+public class HiveAuthzBindingSessionHookV2 implements
+ org.apache.hive.service.cli.session.HiveSessionHook {
+ public static final String SCRATCH_DIR_PERMISSIONS = "700";
+ public static final String SEMANTIC_HOOK = HiveAuthzBindingHookV2.class.getName();
+ public static final String ACCESS_RESTRICT_LIST = Joiner.on(",").join(
+ ConfVars.SEMANTIC_ANALYZER_HOOK.varname, ConfVars.PREEXECHOOKS.varname,
+ ConfVars.SCRATCHDIR.varname, ConfVars.LOCALSCRATCHDIR.varname,
+ ConfVars.METASTOREURIS.varname, ConfVars.METASTORECONNECTURLKEY.varname,
+ ConfVars.HADOOPBIN.varname, ConfVars.HIVESESSIONID.varname, ConfVars.HIVEAUXJARS.varname,
+ ConfVars.HIVESTATSDBCONNECTIONSTRING.varname, ConfVars.SCRATCHDIRPERMISSION.varname,
+ ConfVars.HIVE_SECURITY_COMMAND_WHITELIST.varname,
+ ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY.varname,
+ ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY.varname, HiveAuthzConf.HIVE_ACCESS_CONF_URL,
+ HiveAuthzConf.HIVE_SENTRY_CONF_URL, HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME,
+ HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, HiveAuthzConf.SENTRY_ACTIVE_ROLE_SET);
+
+ /**
+ * The session hook for sentry authorization that sets the required session level configuration 1.
+ * Setup the sentry hooks - semantic, exec and filter hooks 2. Set additional config properties
+ * required for auth set HIVE_EXTENDED_ENITITY_CAPTURE = true set SCRATCHDIRPERMISSION = 700 3.
+ * Add sensitive config parameters to the config restrict list so that they can't be overridden by
+ * users
+ */
+ @Override
+ public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException {
+ // Add sentry hooks to the session configuration
+ HiveConf sessionConf = sessionHookContext.getSessionConf();
+
+ appendConfVar(sessionConf, ConfVars.SEMANTIC_ANALYZER_HOOK.varname, SEMANTIC_HOOK);
+ // enable sentry authorization V2
+ sessionConf.setBoolean(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, true);
+ sessionConf.setBoolean(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS.varname, false);
+ sessionConf.set(HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER.varname,
+ "org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator");
+
+ // grant all privileges for table to its owner
+ sessionConf.setVar(ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS, "");
+
+ // Enable compiler to capture transform URI referred in the query
+ sessionConf.setBoolVar(ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY, true);
+
+ // set security command list
+ HiveAuthzConf authzConf = HiveAuthzBindingHook.loadAuthzConf(sessionConf);
+ String commandWhitelist =
+ authzConf.get(HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST,
+ HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT);
+ sessionConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, commandWhitelist);
+
+ // set additional configuration properties required for auth
+ sessionConf.setVar(ConfVars.SCRATCHDIRPERMISSION, SCRATCH_DIR_PERMISSIONS);
+
+ // setup restrict list
+ sessionConf.addToRestrictList(ACCESS_RESTRICT_LIST);
+
+ // set user name
+ sessionConf.set(HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME, sessionHookContext.getSessionUser());
+ sessionConf.set(HiveAuthzConf.HIVE_SENTRY_SUBJECT_NAME, sessionHookContext.getSessionUser());
+
+ // Set MR ACLs to session user
+ appendConfVar(sessionConf, JobContext.JOB_ACL_VIEW_JOB, sessionHookContext.getSessionUser());
+ appendConfVar(sessionConf, JobContext.JOB_ACL_MODIFY_JOB, sessionHookContext.getSessionUser());
+ }
+
+ // Setup given sentry hooks
+ private void appendConfVar(HiveConf sessionConf, String confVar, String sentryConfVal) {
+ String currentValue = sessionConf.get(confVar, "").trim();
+ if (currentValue.isEmpty()) {
+ currentValue = sentryConfVal;
+ } else {
+ currentValue = sentryConfVal + "," + currentValue;
+ }
+ sessionConf.set(confVar, currentValue);
+ }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java
new file mode 100644
index 000000000..4a5cbcf85
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryAuthorizerFactory.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.v2;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext.CLIENT_TYPE;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.v2.authorizer.DefaultSentryAccessController;
+import org.apache.sentry.binding.hive.v2.authorizer.DefaultSentryValidator;
+import org.apache.sentry.binding.hive.v2.authorizer.SentryHiveAccessController;
+import org.apache.sentry.binding.hive.v2.authorizer.SentryHiveAuthorizationValidator;
+import org.apache.sentry.binding.hive.v2.authorizer.SentryHiveAuthorizer;
+
+import com.google.common.annotations.VisibleForTesting;
+
+public class SentryAuthorizerFactory implements HiveAuthorizerFactory {
+ public static final String HIVE_SENTRY_ACCESS_CONTROLLER =
+ "hive.security.sentry.access.controller";
+ public static final String HIVE_SENTRY_AUTHORIZATION_CONTROLLER =
+ "hive.security.sentry.authorization.controller";
+ private HiveAuthzConf authzConf;
+
+ @Override
+ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
+ HiveConf conf, HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx)
+ throws HiveAuthzPluginException {
+ HiveAuthzSessionContext sessionContext;
+ try {
+ this.authzConf = HiveAuthzBindingHook.loadAuthzConf(conf);
+ sessionContext = applyTestSettings(ctx, conf);
+ assertHiveCliAuthDisabled(conf, sessionContext);
+ } catch (Exception e) {
+ throw new HiveAuthzPluginException(e);
+ }
+ SentryHiveAccessController accessController =
+ getAccessController(conf, authzConf, authenticator, sessionContext);
+ SentryHiveAuthorizationValidator authzValidator =
+ getAuthzValidator(conf, authzConf, authenticator);
+
+ return new SentryHiveAuthorizer(accessController, authzValidator);
+ }
+
+ private HiveAuthzSessionContext applyTestSettings(HiveAuthzSessionContext ctx, HiveConf conf) {
+ if (conf.getBoolVar(ConfVars.HIVE_TEST_AUTHORIZATION_SQLSTD_HS2_MODE)
+ && ctx.getClientType() == CLIENT_TYPE.HIVECLI) {
+ // create new session ctx object with HS2 as client type
+ HiveAuthzSessionContext.Builder ctxBuilder = new HiveAuthzSessionContext.Builder(ctx);
+ ctxBuilder.setClientType(CLIENT_TYPE.HIVESERVER2);
+ return ctxBuilder.build();
+ }
+ return ctx;
+ }
+
+ private void assertHiveCliAuthDisabled(HiveConf conf, HiveAuthzSessionContext ctx)
+ throws HiveAuthzPluginException {
+ if (ctx.getClientType() == CLIENT_TYPE.HIVECLI
+ && conf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+ throw new HiveAuthzPluginException(
+ "SQL standards based authorization should not be enabled from hive cli"
+ + "Instead the use of storage based authorization in hive metastore is reccomended. Set "
+ + ConfVars.HIVE_AUTHORIZATION_ENABLED.varname + "=false to disable authz within cli");
+ }
+ }
+
+ /**
+ * just for testing
+ */
+ @VisibleForTesting
+ protected HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
+ HiveConf conf, HiveAuthzConf authzConf, HiveAuthenticationProvider authenticator,
+ HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {
+ SentryHiveAccessController accessController =
+ getAccessController(conf, authzConf, authenticator, ctx);
+ SentryHiveAuthorizationValidator authzValidator =
+ getAuthzValidator(conf, authzConf, authenticator);
+
+ return new SentryHiveAuthorizer(accessController, authzValidator);
+ }
+
+ /**
+ * Get instance of SentryAccessController from configuration
+ * Default return DefaultSentryAccessController
+ *
+ * @param conf
+ * @param authzConf
+ * @param hiveAuthzBinding
+ * @param authenticator
+ * @throws HiveAuthzPluginException
+ */
+ public static SentryHiveAccessController getAccessController(HiveConf conf,
+ HiveAuthzConf authzConf, HiveAuthenticationProvider authenticator,
+ HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {
+ Class extends SentryHiveAccessController> clazz =
+ conf.getClass(HIVE_SENTRY_ACCESS_CONTROLLER, DefaultSentryAccessController.class,
+ SentryHiveAccessController.class);
+
+ if (clazz == null) {
+ // should not happen as default value is set
+ throw new HiveAuthzPluginException("Configuration value " + HIVE_SENTRY_ACCESS_CONTROLLER
+ + " is not set to valid SentryAccessController subclass");
+ }
+
+ try {
+ return new DefaultSentryAccessController(conf, authzConf, authenticator, ctx);
+ } catch (Exception e) {
+ throw new HiveAuthzPluginException(e);
+ }
+
+ }
+
+ /**
+ * Get instance of SentryAuthorizationValidator from configuration
+ * Default return DefaultSentryAuthorizationValidator
+ *
+ * @param conf
+ * @param authzConf
+ * @param authenticator
+ * @throws HiveAuthzPluginException
+ */
+ public static SentryHiveAuthorizationValidator getAuthzValidator(HiveConf conf,
+ HiveAuthzConf authzConf, HiveAuthenticationProvider authenticator)
+ throws HiveAuthzPluginException {
+ Class extends SentryHiveAuthorizationValidator> clazz =
+ conf.getClass(HIVE_SENTRY_AUTHORIZATION_CONTROLLER, DefaultSentryValidator.class,
+ SentryHiveAuthorizationValidator.class);
+
+ if (clazz == null) {
+ // should not happen as default value is set
+ throw new HiveAuthzPluginException("Configuration value "
+ + HIVE_SENTRY_AUTHORIZATION_CONTROLLER
+ + " is not set to valid SentryAuthorizationValidator subclass");
+ }
+
+ try {
+ return new DefaultSentryValidator(conf, authzConf, authenticator);
+ } catch (Exception e) {
+ throw new HiveAuthzPluginException(e);
+ }
+
+ }
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java
new file mode 100644
index 000000000..2d4bf6436
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHiveAuthorizationTaskFactoryImplV2.java
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.SentryHivePrivilegeObjectDesc;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl;
+import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
+
+public class SentryHiveAuthorizationTaskFactoryImplV2 extends HiveAuthorizationTaskFactoryImpl {
+
+ public SentryHiveAuthorizationTaskFactoryImplV2(HiveConf conf, Hive db) {
+ super(conf, db);
+ }
+
+ @Override
+ protected PrivilegeObjectDesc parsePrivObject(ASTNode ast) throws SemanticException {
+ SentryHivePrivilegeObjectDesc subject = new SentryHivePrivilegeObjectDesc();
+ ASTNode child = (ASTNode) ast.getChild(0);
+ ASTNode gchild = (ASTNode) child.getChild(0);
+ if (child.getType() == HiveParser.TOK_TABLE_TYPE) {
+ subject.setTable(true);
+ String[] qualified = BaseSemanticAnalyzer.getQualifiedTableName(gchild);
+ subject.setObject(BaseSemanticAnalyzer.getDotName(qualified));
+ } else if (child.getType() == HiveParser.TOK_URI_TYPE) {
+ subject.setUri(true);
+ subject.setObject(gchild.getText());
+ } else if (child.getType() == HiveParser.TOK_SERVER_TYPE) {
+ subject.setServer(true);
+ subject.setObject(gchild.getText());
+ } else {
+ subject.setTable(false);
+ subject.setObject(BaseSemanticAnalyzer.unescapeIdentifier(gchild.getText()));
+ }
+ // if partition spec node is present, set partition spec
+ for (int i = 1; i < child.getChildCount(); i++) {
+ gchild = (ASTNode) child.getChild(i);
+ if (gchild.getType() == HiveParser.TOK_PARTSPEC) {
+ subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(gchild));
+ } else if (gchild.getType() == HiveParser.TOK_TABCOLNAME) {
+ subject.setColumns(BaseSemanticAnalyzer.getColumnNames(gchild));
+ }
+ }
+ return subject;
+ }
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java
new file mode 100644
index 000000000..62773855c
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/SentryHivePrivilegeObject.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.sentry.binding.hive.v2;
+
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+
+public class SentryHivePrivilegeObject extends HivePrivilegeObject {
+
+ boolean isServer = false;
+
+ boolean isUri = false;
+
+ String objectName = "";
+
+ public SentryHivePrivilegeObject(HivePrivilegeObjectType type, String objectName) {
+ super(type, null, objectName);
+ }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java
new file mode 100644
index 000000000..57de2ac5a
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryAccessController.java
@@ -0,0 +1,553 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.authorizer;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.hive.SentryHiveConstants;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext.CLIENT_TYPE;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.SentryUserException;
+import org.apache.sentry.binding.hive.SentryOnFailureHookContext;
+import org.apache.sentry.binding.hive.SentryOnFailureHookContextImpl;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding.HiveHook;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+import org.apache.sentry.binding.hive.v2.util.SentryAuthorizerUtil;
+import org.apache.sentry.core.common.ActiveRoleSet;
+import org.apache.sentry.core.common.Authorizable;
+import org.apache.sentry.core.model.db.AccessConstants;
+import org.apache.sentry.core.model.db.DBModelAuthorizable;
+import org.apache.sentry.core.model.db.Server;
+import org.apache.sentry.provider.db.SentryAccessDeniedException;
+import org.apache.sentry.provider.db.service.thrift.SentryPolicyServiceClient;
+import org.apache.sentry.provider.db.service.thrift.TSentryPrivilege;
+import org.apache.sentry.provider.db.service.thrift.TSentryRole;
+import org.apache.sentry.service.thrift.SentryServiceClientFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Sets;
+
+public class DefaultSentryAccessController extends SentryHiveAccessController {
+
+ public static final Logger LOG = LoggerFactory.getLogger(DefaultSentryAccessController.class);
+
+ public static final String REQUIRED_AUTHZ_SERVER_NAME = "Config "
+ + AuthzConfVars.AUTHZ_SERVER_NAME.getVar() + " is required";
+
+ private HiveAuthenticationProvider authenticator;
+ private String serverName;
+ private HiveConf conf;
+ private HiveAuthzConf authzConf;
+ private HiveAuthzSessionContext ctx;
+
+ private HiveHook hiveHook;
+ private HiveAuthzBinding hiveAuthzBinding;
+ protected SentryPolicyServiceClient sentryClient;
+
+
+ public DefaultSentryAccessController(HiveConf conf, HiveAuthzConf authzConf,
+ HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws Exception {
+ initilize(conf, authzConf, authenticator, ctx);
+ this.hiveHook = HiveHook.HiveServer2;
+ }
+
+ public DefaultSentryAccessController(HiveHook hiveHook, HiveConf conf, HiveAuthzConf authzConf,
+ HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws Exception {
+ initilize(conf, authzConf, authenticator, ctx);
+ this.hiveHook = hiveHook;
+ }
+
+ /**
+ * initialize authenticator and hiveAuthzBinding.
+ */
+ protected void initilize(HiveConf conf, HiveAuthzConf authzConf,
+ HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws Exception {
+ Preconditions.checkNotNull(conf, "HiveConf cannot be null");
+ Preconditions.checkNotNull(authzConf, "HiveAuthzConf cannot be null");
+ Preconditions.checkNotNull(authenticator, "Hive authenticator provider cannot be null");
+ Preconditions.checkNotNull(ctx, "HiveAuthzSessionContext cannot be null");
+
+ this.conf = conf;
+ this.authzConf = authzConf;
+ this.authenticator = authenticator;
+ this.ctx = ctx;
+ this.serverName =
+ Preconditions.checkNotNull(authzConf.get(AuthzConfVars.AUTHZ_SERVER_NAME.getVar()),
+ REQUIRED_AUTHZ_SERVER_NAME);
+ }
+
+ @Override
+ public void createRole(String roleName, HivePrincipal adminGrantor)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ if (AccessConstants.RESERVED_ROLE_NAMES.contains(roleName.toUpperCase())) {
+ String msg =
+ "Roles cannot be one of the reserved roles: " + AccessConstants.RESERVED_ROLE_NAMES;
+ throw new HiveAccessControlException(msg);
+ }
+ try {
+ sentryClient = getSentryClient();
+ sentryClient.createRole(authenticator.getUserName(), roleName);
+ } catch (SentryAccessDeniedException e) {
+ HiveOperation hiveOp = HiveOperation.CREATEROLE;
+ executeOnFailureHooks(hiveOp, e);
+ } catch (SentryUserException e) {
+ String msg = "Error occurred when Sentry client creating role: " + e.getMessage();
+ executeOnErrorHooks(msg, e);
+ } finally {
+ if (sentryClient != null) {
+ sentryClient.close();
+ }
+ }
+ }
+
+ @Override
+ public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException {
+ if (AccessConstants.RESERVED_ROLE_NAMES.contains(roleName.toUpperCase())) {
+ String msg =
+ "Roles cannot be one of the reserved roles: " + AccessConstants.RESERVED_ROLE_NAMES;
+ throw new HiveAccessControlException(msg);
+ }
+ try {
+ sentryClient = getSentryClient();
+ sentryClient.dropRole(authenticator.getUserName(), roleName);
+ } catch (SentryAccessDeniedException e) {
+ HiveOperation hiveOp = HiveOperation.DROPROLE;
+ executeOnFailureHooks(hiveOp, e);
+ } catch (SentryUserException e) {
+ String msg = "Error occurred when Sentry client creating role: " + e.getMessage();
+ executeOnErrorHooks(msg, e);
+ } finally {
+ if (sentryClient != null) {
+ sentryClient.close();
+ }
+ }
+ }
+
+ @Override
+ public List getAllRoles() throws HiveAccessControlException, HiveAuthzPluginException {
+ List roles = new ArrayList();
+ try {
+ sentryClient = getSentryClient();
+ roles = convert2RoleList(sentryClient.listRoles(authenticator.getUserName()));
+ } catch (SentryAccessDeniedException e) {
+ HiveOperation hiveOp = HiveOperation.SHOW_ROLES;
+ executeOnFailureHooks(hiveOp, e);
+ } catch (SentryUserException e) {
+ String msg = "Error when sentryClient listRoles: " + e.getMessage();
+ executeOnErrorHooks(msg, e);
+ } finally {
+ if (sentryClient != null) {
+ sentryClient.close();
+ }
+ }
+ return roles;
+ }
+
+ @Override
+ public void grantPrivileges(List hivePrincipals,
+ List hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException,
+ HiveAccessControlException {
+ grantOrRevokePrivlegeOnRole(hivePrincipals, hivePrivileges, hivePrivObject, grantOption, true);
+ }
+
+ @Override
+ public void revokePrivileges(List hivePrincipals,
+ List hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException,
+ HiveAccessControlException {
+ grantOrRevokePrivlegeOnRole(hivePrincipals, hivePrivileges, hivePrivObject, grantOption, false);
+ }
+
+ @Override
+ public void grantRole(List hivePrincipals, List roles,
+ boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
+ HiveAccessControlException {
+ grantOrRevokeRoleOnGroup(hivePrincipals, roles, grantorPrinc, true);
+ }
+
+ @Override
+ public void revokeRole(List hivePrincipals, List roles,
+ boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
+ HiveAccessControlException {
+ grantOrRevokeRoleOnGroup(hivePrincipals, roles, grantorPrinc, false);
+ }
+
+
+ @Override
+ public List showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ if (principal.getType() != HivePrincipalType.ROLE) {
+ String msg =
+ SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principal.getType();
+ throw new HiveAuthzPluginException(msg);
+ }
+ List infoList = new ArrayList();
+ try {
+ sentryClient = getSentryClient();
+ List> authorizables =
+ SentryAuthorizerUtil.getAuthzHierarchy(new Server(serverName), privObj);
+ Set tPrivilges = new HashSet();
+ if (authorizables != null && !authorizables.isEmpty()) {
+ for (List extends Authorizable> authorizable : authorizables) {
+ tPrivilges.addAll(sentryClient.listPrivilegesByRoleName(authenticator.getUserName(),
+ principal.getName(), authorizable));
+ }
+ } else {
+ tPrivilges.addAll(sentryClient.listPrivilegesByRoleName(authenticator.getUserName(),
+ principal.getName(), null));
+ }
+
+ if (tPrivilges != null && !tPrivilges.isEmpty()) {
+ for (TSentryPrivilege privilege : tPrivilges) {
+ infoList.add(SentryAuthorizerUtil.convert2HivePrivilegeInfo(privilege, principal));
+ }
+ }
+ } catch (SentryAccessDeniedException e) {
+ HiveOperation hiveOp = HiveOperation.SHOW_GRANT;
+ executeOnFailureHooks(hiveOp, e);
+ } catch (SentryUserException e) {
+ String msg = "Error when sentryClient listPrivilegesByRoleName: " + e.getMessage();
+ executeOnErrorHooks(msg, e);
+ } finally {
+ if (sentryClient != null) {
+ sentryClient.close();
+ }
+ }
+ return infoList;
+ }
+
+ @Override
+ public void setCurrentRole(String roleName) throws HiveAccessControlException,
+ HiveAuthzPluginException {
+ try {
+ sentryClient = getSentryClient();
+ hiveAuthzBinding = new HiveAuthzBinding(hiveHook, conf, authzConf);
+ hiveAuthzBinding.setActiveRoleSet(roleName,
+ sentryClient.listUserRoles(authenticator.getUserName()));
+ } catch (SentryAccessDeniedException e) {
+ HiveOperation hiveOp = HiveOperation.GRANT_ROLE;
+ executeOnFailureHooks(hiveOp, e);
+ } catch (Exception e) {
+ String msg = "Error when sentryClient setCurrentRole: " + e.getMessage();
+ executeOnErrorHooks(msg, e);
+ } finally {
+ if (sentryClient != null) {
+ sentryClient.close();
+ }
+ if (hiveAuthzBinding != null) {
+ hiveAuthzBinding.close();
+ }
+ }
+ }
+
+ @Override
+ public List getCurrentRoleNames() throws HiveAuthzPluginException {
+ List roles = new ArrayList();
+ try {
+ sentryClient = getSentryClient();
+ hiveAuthzBinding = new HiveAuthzBinding(hiveHook, conf, authzConf);
+ ActiveRoleSet roleSet = hiveAuthzBinding.getActiveRoleSet();
+ if (roleSet.isAll()) {
+ roles = convert2RoleList(sentryClient.listUserRoles(authenticator.getUserName()));
+ } else {
+ roles.addAll(roleSet.getRoles());
+ }
+ } catch (Exception e) {
+ String msg = "Error when sentryClient listUserRoles: " + e.getMessage();
+ executeOnErrorHooks(msg, e);
+ } finally {
+ if (sentryClient != null) {
+ sentryClient.close();
+ }
+ if (hiveAuthzBinding != null) {
+ hiveAuthzBinding.close();
+ }
+ }
+ return roles;
+ }
+
+ @Override
+ public List getPrincipalGrantInfoForRole(String roleName)
+ throws HiveAuthzPluginException {
+ // TODO we will support in future
+ throw new HiveAuthzPluginException("Not supported of SHOW_ROLE_PRINCIPALS in Sentry");
+ }
+
+ @Override
+ public List getRoleGrantInfoForPrincipal(HivePrincipal principal)
+ throws HiveAccessControlException, HiveAuthzPluginException {
+ List hiveRoleGrants = new ArrayList();
+ try {
+ sentryClient = getSentryClient();
+
+ if (principal.getType() != HivePrincipalType.GROUP) {
+ String msg =
+ SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principal.getType();
+ throw new HiveAuthzPluginException(msg);
+ }
+ Set roles =
+ sentryClient.listRolesByGroupName(authenticator.getUserName(), principal.getName());
+ if (roles != null && !roles.isEmpty()) {
+ for (TSentryRole role : roles) {
+ hiveRoleGrants.add(SentryAuthorizerUtil.convert2HiveRoleGrant(role));
+ }
+ }
+ } catch (SentryAccessDeniedException e) {
+ HiveOperation hiveOp = HiveOperation.SHOW_ROLE_GRANT;
+ executeOnFailureHooks(hiveOp, e);
+ } catch (SentryUserException e) {
+ String msg = "Error when sentryClient listRolesByGroupName: " + e.getMessage();
+ executeOnErrorHooks(msg, e);
+ } finally {
+ if (sentryClient != null) {
+ sentryClient.close();
+ }
+ }
+ return hiveRoleGrants;
+ }
+
+ @Override
+ public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException {
+ // Apply rest of the configuration only to HiveServer2
+ if (ctx.getClientType() != CLIENT_TYPE.HIVESERVER2
+ || !hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+ throw new HiveAuthzPluginException("Sentry just support for hiveserver2");
+ }
+ }
+
+ /**
+ * Grant(isGrant is true) or revoke(isGrant is false) db privileges to/from role via sentryClient,
+ * which is a instance of SentryPolicyServiceClientV2
+ *
+ * @param hivePrincipals
+ * @param hivePrivileges
+ * @param hivePrivObject
+ * @param grantOption
+ * @param isGrant
+ */
+ private void grantOrRevokePrivlegeOnRole(List hivePrincipals,
+ List hivePrivileges, HivePrivilegeObject hivePrivObject, boolean grantOption,
+ boolean isGrant) throws HiveAuthzPluginException, HiveAccessControlException {
+ try {
+ sentryClient = getSentryClient();
+
+ for (HivePrincipal principal : hivePrincipals) {
+ // Sentry only support grant privilege to ROLE
+ if (principal.getType() != HivePrincipalType.ROLE) {
+ String msg =
+ SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principal.getType();
+ throw new HiveAuthzPluginException(msg);
+ }
+ for (HivePrivilege privilege : hivePrivileges) {
+ String grantorName = authenticator.getUserName();
+ String roleName = principal.getName();
+ String action = SentryAuthorizerUtil.convert2SentryAction(privilege);
+ List columnNames = privilege.getColumns();
+ Boolean grantOp = null;
+ if (isGrant) {
+ grantOp = grantOption;
+ }
+
+ switch (hivePrivObject.getType()) {
+ case GLOBAL:
+ if (isGrant) {
+ sentryClient.grantServerPrivilege(grantorName, roleName,
+ hivePrivObject.getObjectName(), action, grantOp);
+ } else {
+ sentryClient.revokeServerPrivilege(grantorName, roleName,
+ hivePrivObject.getObjectName(), action, grantOp);
+ }
+ break;
+ case DATABASE:
+ if (isGrant) {
+ sentryClient.grantDatabasePrivilege(grantorName, roleName, serverName,
+ hivePrivObject.getDbname(), action, grantOp);
+ } else {
+ sentryClient.revokeDatabasePrivilege(grantorName, roleName, serverName,
+ hivePrivObject.getDbname(), action, grantOp);
+ }
+ break;
+ case TABLE_OR_VIEW:
+ // For column level security
+ if (columnNames != null && !columnNames.isEmpty()) {
+ if (action.equalsIgnoreCase(AccessConstants.INSERT)
+ || action.equalsIgnoreCase(AccessConstants.ALL)) {
+ String msg =
+ SentryHiveConstants.PRIVILEGE_NOT_SUPPORTED + privilege.getName()
+ + " on Column";
+ throw new HiveAuthzPluginException(msg);
+ }
+ if (isGrant) {
+ sentryClient.grantColumnsPrivileges(grantorName, roleName, serverName,
+ hivePrivObject.getDbname(), hivePrivObject.getObjectName(), columnNames,
+ action, grantOp);
+ } else {
+ sentryClient.revokeColumnsPrivilege(grantorName, roleName, serverName,
+ hivePrivObject.getDbname(), hivePrivObject.getObjectName(), columnNames,
+ action, grantOp);
+ }
+ } else {
+ if (isGrant) {
+ sentryClient.grantTablePrivilege(grantorName, roleName, serverName,
+ hivePrivObject.getDbname(), hivePrivObject.getObjectName(), action, grantOp);
+ } else {
+ sentryClient.revokeTablePrivilege(grantorName, roleName, serverName,
+ hivePrivObject.getDbname(), hivePrivObject.getObjectName(), action, grantOp);
+ }
+ }
+ break;
+ case LOCAL_URI:
+ case DFS_URI:
+ String uRIString = hivePrivObject.getObjectName().replace("'", "").replace("\"", "");
+ if (isGrant) {
+ sentryClient.grantURIPrivilege(grantorName, roleName, serverName,
+ uRIString, grantOp);
+ } else {
+ sentryClient.revokeURIPrivilege(grantorName, roleName, serverName,
+ uRIString, grantOp);
+ }
+ break;
+ case FUNCTION:
+ case PARTITION:
+ case COLUMN:
+ case COMMAND_PARAMS:
+ // not support these type
+ throw new HiveAuthzPluginException(hivePrivObject.getType().name()
+ + " are not supported in sentry");
+ default:
+ break;
+ }
+ }
+ }
+ } catch (SentryAccessDeniedException e) {
+ HiveOperation hiveOp =
+ isGrant ? HiveOperation.GRANT_PRIVILEGE : HiveOperation.REVOKE_PRIVILEGE;
+ executeOnFailureHooks(hiveOp, e);
+ } catch (SentryUserException e) {
+ String msg = "Error when sentryClient grant/revoke privilege:" + e.getMessage();
+ executeOnErrorHooks(msg, e);
+ } finally {
+ if (sentryClient != null) {
+ sentryClient.close();
+ }
+ }
+ }
+
+ /**
+ * Grant(isGrant is true) or revoke(isGrant is false) role to/from group via sentryClient, which
+ * is a instance of SentryPolicyServiceClientV2
+ *
+ * @param hivePrincipals
+ * @param roles
+ * @param grantorPrinc
+ * @param isGrant
+ */
+ private void grantOrRevokeRoleOnGroup(List hivePrincipals, List roles,
+ HivePrincipal grantorPrinc, boolean isGrant) throws HiveAuthzPluginException,
+ HiveAccessControlException {
+ try {
+ sentryClient = getSentryClient();
+ // get principals
+ Set groups = Sets.newHashSet();
+ for (HivePrincipal principal : hivePrincipals) {
+ if (principal.getType() != HivePrincipalType.GROUP) {
+ String msg =
+ SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principal.getType();
+ throw new HiveAuthzPluginException(msg);
+ }
+ groups.add(principal.getName());
+ }
+
+ // grant/revoke role to/from principals
+ for (String roleName : roles) {
+ if (isGrant) {
+ sentryClient.grantRoleToGroups(grantorPrinc.getName(), roleName, groups);
+ } else {
+ sentryClient.revokeRoleFromGroups(grantorPrinc.getName(), roleName, groups);
+ }
+ }
+
+ } catch (SentryAccessDeniedException e) {
+ HiveOperation hiveOp = isGrant ? HiveOperation.GRANT_ROLE : HiveOperation.REVOKE_ROLE;
+ executeOnFailureHooks(hiveOp, e);
+ } catch (SentryUserException e) {
+ String msg = "Error when sentryClient grant/revoke role:" + e.getMessage();
+ executeOnErrorHooks(msg, e);
+ } finally {
+ if (sentryClient != null) {
+ sentryClient.close();
+ }
+ }
+ }
+
+ private void executeOnFailureHooks(HiveOperation hiveOp, SentryAccessDeniedException e)
+ throws HiveAccessControlException {
+ SentryOnFailureHookContext hookCtx =
+ new SentryOnFailureHookContextImpl(SessionState.get().getCmd(), null, null, hiveOp, null,
+ null, null, null, authenticator.getUserName(), null, new AuthorizationException(e),
+ authzConf);
+ SentryAuthorizerUtil.executeOnFailureHooks(hookCtx, authzConf);
+ throw new HiveAccessControlException(e.getMessage(), e);
+ }
+
+ private void executeOnErrorHooks(String msg, Exception e) throws HiveAuthzPluginException {
+ LOG.error(msg, e);
+ throw new HiveAuthzPluginException(msg, e);
+ }
+
+ private List convert2RoleList(Set roleSet) {
+ List roles = new ArrayList();
+ if (roleSet != null && !roleSet.isEmpty()) {
+ for (TSentryRole tRole : roleSet) {
+ roles.add(tRole.getRoleName());
+ }
+ }
+ return roles;
+ }
+
+ private SentryPolicyServiceClient getSentryClient() throws HiveAuthzPluginException {
+ try {
+ Preconditions.checkNotNull(authzConf, "HiveAuthConf cannot be null");
+ return SentryServiceClientFactory.create(authzConf);
+ } catch (Exception e) {
+ String msg = "Error occurred when creating Sentry client: " + e.getMessage();
+ throw new HiveAuthzPluginException(msg, e);
+ }
+ }
+
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryValidator.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryValidator.java
new file mode 100644
index 000000000..70e0720c9
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/DefaultSentryValidator.java
@@ -0,0 +1,479 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.authorizer;
+
+import static org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+
+import java.security.CodeSource;
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.binding.hive.SentryOnFailureHookContext;
+import org.apache.sentry.binding.hive.SentryOnFailureHookContextImpl;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding.HiveHook;
+import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges;
+import org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationScope;
+import org.apache.sentry.binding.hive.authz.HiveAuthzPrivilegesMap;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.v2.util.SentryAuthorizerUtil;
+import org.apache.sentry.binding.hive.v2.util.SimpleSemanticAnalyzer;
+import org.apache.sentry.core.common.Subject;
+import org.apache.sentry.core.model.db.AccessURI;
+import org.apache.sentry.core.model.db.Column;
+import org.apache.sentry.core.model.db.DBModelAction;
+import org.apache.sentry.core.model.db.DBModelAuthorizable;
+import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
+import org.apache.sentry.core.model.db.Database;
+import org.apache.sentry.core.model.db.Table;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Sets;
+
+/**
+ * This class used to do authorization. Check if current user has privileges to do the operation.
+ */
+public class DefaultSentryValidator extends SentryHiveAuthorizationValidator {
+
+ public static final Logger LOG = LoggerFactory.getLogger(DefaultSentryValidator.class);
+
+ protected HiveConf conf;
+ protected HiveAuthzConf authzConf;
+ protected HiveAuthenticationProvider authenticator;
+
+ public DefaultSentryValidator(HiveConf conf, HiveAuthzConf authzConf,
+ HiveAuthenticationProvider authenticator) throws Exception {
+ initilize(conf, authzConf, authenticator);
+ this.hiveHook = HiveHook.HiveServer2;
+ }
+
+ public DefaultSentryValidator(HiveHook hiveHook, HiveConf conf, HiveAuthzConf authzConf,
+ HiveAuthenticationProvider authenticator) throws Exception {
+ initilize(conf, authzConf, authenticator);
+ this.hiveHook = hiveHook;
+ }
+
+ /**
+ * initialize authenticator and hiveAuthzBinding.
+ */
+ protected void initilize(HiveConf conf, HiveAuthzConf authzConf,
+ HiveAuthenticationProvider authenticator) throws Exception {
+ Preconditions.checkNotNull(conf, "HiveConf cannot be null");
+ Preconditions.checkNotNull(authzConf, "HiveAuthzConf cannot be null");
+ Preconditions.checkNotNull(authenticator, "Hive authenticator provider cannot be null");
+ this.conf = conf;
+ this.authzConf = authzConf;
+ this.authenticator = authenticator;
+ }
+
+ private HiveHook hiveHook;
+
+ // all operations need to extend at DB scope
+ private static final Set EX_DB_ALL = Sets.newHashSet(HiveOperation.DROPDATABASE,
+ HiveOperation.CREATETABLE, HiveOperation.IMPORT, HiveOperation.DESCDATABASE,
+ HiveOperation.ALTERTABLE_RENAME, HiveOperation.LOCKDB, HiveOperation.UNLOCKDB);
+ // input operations need to extend at DB scope
+ private static final Set EX_DB_INPUT = Sets.newHashSet(HiveOperation.DROPDATABASE,
+ HiveOperation.DESCDATABASE, HiveOperation.ALTERTABLE_RENAME, HiveOperation.LOCKDB,
+ HiveOperation.UNLOCKDB);
+
+ // all operations need to extend at Table scope
+ private static final Set EX_TB_ALL = Sets.newHashSet(HiveOperation.DROPTABLE,
+ HiveOperation.DROPVIEW, HiveOperation.DESCTABLE, HiveOperation.SHOW_TBLPROPERTIES,
+ HiveOperation.SHOWINDEXES, HiveOperation.ALTERTABLE_PROPERTIES,
+ HiveOperation.ALTERTABLE_SERDEPROPERTIES, HiveOperation.ALTERTABLE_CLUSTER_SORT,
+ HiveOperation.ALTERTABLE_FILEFORMAT, HiveOperation.ALTERTABLE_TOUCH,
+ HiveOperation.ALTERTABLE_PROTECTMODE, HiveOperation.ALTERTABLE_RENAMECOL,
+ HiveOperation.ALTERTABLE_ADDCOLS, HiveOperation.ALTERTABLE_REPLACECOLS,
+ HiveOperation.ALTERTABLE_RENAMEPART, HiveOperation.ALTERTABLE_ARCHIVE,
+ HiveOperation.ALTERTABLE_UNARCHIVE, HiveOperation.ALTERTABLE_SERIALIZER,
+ HiveOperation.ALTERTABLE_MERGEFILES, HiveOperation.ALTERTABLE_SKEWED,
+ HiveOperation.ALTERTABLE_DROPPARTS, HiveOperation.ALTERTABLE_ADDPARTS,
+ HiveOperation.ALTERTABLE_RENAME, HiveOperation.ALTERTABLE_LOCATION,
+ HiveOperation.ALTERVIEW_PROPERTIES, HiveOperation.ALTERPARTITION_FILEFORMAT,
+ HiveOperation.ALTERPARTITION_PROTECTMODE, HiveOperation.ALTERPARTITION_SERDEPROPERTIES,
+ HiveOperation.ALTERPARTITION_SERIALIZER, HiveOperation.ALTERPARTITION_MERGEFILES,
+ HiveOperation.ALTERPARTITION_LOCATION, HiveOperation.ALTERTBLPART_SKEWED_LOCATION,
+ HiveOperation.MSCK, HiveOperation.ALTERINDEX_REBUILD, HiveOperation.LOCKTABLE,
+ HiveOperation.UNLOCKTABLE, HiveOperation.SHOWCOLUMNS, HiveOperation.SHOW_TABLESTATUS, HiveOperation.LOAD);
+ // input operations need to extend at Table scope
+ private static final Set EX_TB_INPUT = Sets.newHashSet(HiveOperation.DROPTABLE,
+ HiveOperation.DROPVIEW, HiveOperation.SHOW_TBLPROPERTIES, HiveOperation.SHOWINDEXES,
+ HiveOperation.ALTERINDEX_REBUILD, HiveOperation.LOCKTABLE, HiveOperation.UNLOCKTABLE,
+ HiveOperation.SHOW_TABLESTATUS);
+ private static final Set META_TB_INPUT = Sets.newHashSet(HiveOperation.DESCTABLE,
+ HiveOperation.SHOWCOLUMNS);
+
+ /**
+ * Check if current user has privileges to perform given operation type hiveOpType on the given
+ * input and output objects
+ *
+ * @param hiveOpType
+ * @param inputHObjs
+ * @param outputHObjs
+ * @param context
+ * @throws SentryAccessControlException
+ */
+ @Override
+ public void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs,
+ List outputHObjs, HiveAuthzContext context)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ if (LOG.isDebugEnabled()) {
+ String msg =
+ "Checking privileges for operation " + hiveOpType + " by user "
+ + authenticator.getUserName() + " on " + " input objects " + inputHObjs
+ + " and output objects " + outputHObjs + ". Context Info: " + context;
+ LOG.debug(msg);
+ }
+
+ HiveOperation hiveOp = SentryAuthorizerUtil.convert2HiveOperation(hiveOpType.name());
+ HiveAuthzPrivileges stmtAuthPrivileges = null;
+ if (HiveOperation.DESCTABLE.equals(hiveOp) &&
+ !(context.getCommandString().contains("EXTENDED") || context.getCommandString().contains("FORMATTED")) ) {
+ stmtAuthPrivileges = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS);
+ } else {
+ stmtAuthPrivileges = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(hiveOp);
+ }
+
+ HiveAuthzBinding hiveAuthzBinding = null;
+ try {
+ hiveAuthzBinding = getAuthzBinding();
+ if (stmtAuthPrivileges == null) {
+ // We don't handle authorizing this statement
+ return;
+ }
+
+ List> inputHierarchyList =
+ SentryAuthorizerUtil.convert2SentryPrivilegeList(hiveAuthzBinding.getAuthServer(),
+ inputHObjs);
+ List> outputHierarchyList =
+ SentryAuthorizerUtil.convert2SentryPrivilegeList(hiveAuthzBinding.getAuthServer(),
+ outputHObjs);
+
+ // Workaround for metadata queries
+ addExtendHierarchy(hiveOp, stmtAuthPrivileges, inputHierarchyList, outputHierarchyList,
+ context.getCommandString(), hiveAuthzBinding);
+
+ hiveAuthzBinding.authorize(hiveOp, stmtAuthPrivileges,
+ new Subject(authenticator.getUserName()), inputHierarchyList, outputHierarchyList);
+ } catch (AuthorizationException e) {
+ Database db = null;
+ Table tab = null;
+ AccessURI udfURI = null;
+ AccessURI partitionURI = null;
+ if (outputHObjs != null) {
+ for (HivePrivilegeObject obj : outputHObjs) {
+ switch (obj.getType()) {
+ case DATABASE:
+ db = new Database(obj.getObjectName());
+ break;
+ case TABLE_OR_VIEW:
+ db = new Database(obj.getDbname());
+ tab = new Table(obj.getObjectName());
+ break;
+ case PARTITION:
+ db = new Database(obj.getDbname());
+ tab = new Table(obj.getObjectName());
+ case LOCAL_URI:
+ case DFS_URI:
+ }
+ }
+ }
+ String permsRequired = "";
+ SentryOnFailureHookContext hookCtx =
+ new SentryOnFailureHookContextImpl(context.getCommandString(), null, null, hiveOp, db,
+ tab, udfURI, partitionURI, authenticator.getUserName(), context.getIpAddress(), e,
+ authzConf);
+ SentryAuthorizerUtil.executeOnFailureHooks(hookCtx, authzConf);
+ for (String perm : hiveAuthzBinding.getLastQueryPrivilegeErrors()) {
+ permsRequired += perm + ";";
+ }
+ SessionState.get().getConf().set(HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS, permsRequired);
+ String msg =
+ HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE
+ + "\n Required privileges for this query: " + permsRequired;
+ throw new HiveAccessControlException(msg, e);
+ } catch (Exception e) {
+ throw new HiveAuthzPluginException(e.getClass()+ ": " + e.getMessage(), e);
+ } finally {
+ if (hiveAuthzBinding != null) {
+ hiveAuthzBinding.close();
+ }
+ }
+
+ if ("true".equalsIgnoreCase(SessionState.get().getConf()
+ .get(HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION))) {
+ throw new HiveAccessControlException(HiveAuthzConf.HIVE_SENTRY_MOCK_ERROR
+ + " Mock query compilation aborted. Set " + HiveAuthzConf.HIVE_SENTRY_MOCK_COMPILATION
+ + " to 'false' for normal query processing");
+ }
+ }
+
+ @VisibleForTesting
+ public HiveAuthzBinding getAuthzBinding() throws Exception {
+ return new HiveAuthzBinding(hiveHook, conf, authzConf);
+ }
+
+ private void addExtendHierarchy(HiveOperation hiveOp, HiveAuthzPrivileges stmtAuthPrivileges,
+ List> inputHierarchyList,
+ List> outputHierarchyList, String command,
+ HiveAuthzBinding hiveAuthzBinding) throws HiveAuthzPluginException,
+ HiveAccessControlException {
+ String currDatabase = null;
+ switch (stmtAuthPrivileges.getOperationScope()) {
+ case SERVER:
+ // validate server level privileges if applicable. Eg create UDF,register jar etc ..
+ List serverHierarchy = new ArrayList();
+ serverHierarchy.add(hiveAuthzBinding.getAuthServer());
+ inputHierarchyList.add(serverHierarchy);
+ break;
+ case DATABASE:
+ // workaround for metadata queries.
+ if (EX_DB_ALL.contains(hiveOp)) {
+ SimpleSemanticAnalyzer analyzer = new SimpleSemanticAnalyzer(hiveOp, command);
+ currDatabase = analyzer.getCurrentDb();
+
+ List externalAuthorizableHierarchy =
+ new ArrayList();
+ externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+ externalAuthorizableHierarchy.add(new Database(currDatabase));
+
+ if (EX_DB_INPUT.contains(hiveOp)) {
+ inputHierarchyList.add(externalAuthorizableHierarchy);
+ } else {
+ outputHierarchyList.add(externalAuthorizableHierarchy);
+ }
+ }
+ break;
+ case TABLE:
+ case COLUMN:
+ // workaround for drop table/view.
+ if (EX_TB_ALL.contains(hiveOp)) {
+ SimpleSemanticAnalyzer analyzer = new SimpleSemanticAnalyzer(hiveOp, command);
+ currDatabase = analyzer.getCurrentDb();
+ String currTable = analyzer.getCurrentTb();
+
+ List externalAuthorizableHierarchy =
+ new ArrayList();
+ externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+ externalAuthorizableHierarchy.add(new Database(currDatabase));
+ externalAuthorizableHierarchy.add(new Table(currTable));
+
+ if (EX_TB_INPUT.contains(hiveOp)) {
+ inputHierarchyList.add(externalAuthorizableHierarchy);
+ } else if (META_TB_INPUT.contains(hiveOp)) {
+ externalAuthorizableHierarchy.add(Column.SOME);
+ inputHierarchyList.add(externalAuthorizableHierarchy);
+ } else {
+ outputHierarchyList.add(externalAuthorizableHierarchy);
+ }
+ }
+ break;
+ case FUNCTION:
+ if (hiveOp.equals(HiveOperation.CREATEFUNCTION)) {
+ SimpleSemanticAnalyzer analyzer = new SimpleSemanticAnalyzer(hiveOp, command);
+ currDatabase = analyzer.getCurrentDb();
+ String udfClassName = analyzer.getCurrentTb();
+ try {
+ CodeSource udfSrc = Class.forName(udfClassName).getProtectionDomain().getCodeSource();
+ if (udfSrc == null) {
+ throw new HiveAuthzPluginException("Could not resolve the jar for UDF class "
+ + udfClassName);
+ }
+ String udfJar = udfSrc.getLocation().getPath();
+ if (udfJar == null || udfJar.isEmpty()) {
+ throw new HiveAuthzPluginException("Could not find the jar for UDF class "
+ + udfClassName + "to validate privileges");
+ }
+ AccessURI udfURI = SentryAuthorizerUtil.parseURI(udfSrc.getLocation().toString(), true);
+ List udfUriHierarchy = new ArrayList();
+ udfUriHierarchy.add(hiveAuthzBinding.getAuthServer());
+ udfUriHierarchy.add(udfURI);
+ inputHierarchyList.add(udfUriHierarchy);
+ } catch (Exception e) {
+ throw new HiveAuthzPluginException("Error retrieving udf class", e);
+ }
+ }
+ break;
+ case CONNECT:
+ /*
+ * The 'CONNECT' is an implicit privilege scope currently used for - USE It's allowed
+ * when the user has any privilege on the current database. For application backward
+ * compatibility, we allow (optional) implicit connect permission on 'default' db.
+ */
+ List connectHierarchy = new ArrayList();
+ connectHierarchy.add(hiveAuthzBinding.getAuthServer());
+ if (hiveOp.equals(HiveOperation.SWITCHDATABASE)) {
+ currDatabase = command.split(" ")[1];
+ }
+ // by default allow connect access to default db
+ Table currTbl = Table.ALL;
+ Database currDB = new Database(currDatabase);
+ Column currCol = Column.ALL;
+ if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDatabase) && "false"
+ .equalsIgnoreCase(authzConf.get(
+ HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) {
+ currDB = Database.ALL;
+ currTbl = Table.SOME;
+ }
+
+ connectHierarchy.add(currDB);
+ connectHierarchy.add(currTbl);
+ connectHierarchy.add(currCol);
+
+ inputHierarchyList.add(connectHierarchy);
+ break;
+ }
+ }
+
+ @Override
+ public List filterListCmdObjects(List listObjs,
+ HiveAuthzContext context) {
+ if (listObjs != null && listObjs.size() >= 1) {
+ HivePrivilegeObjectType pType = listObjs.get(0).getType();
+ HiveAuthzBinding hiveAuthzBinding = null;
+ try {
+ switch (pType) {
+ case DATABASE:
+ hiveAuthzBinding = getAuthzBinding();
+ listObjs = filterShowDatabases(listObjs, authenticator.getUserName(), hiveAuthzBinding);
+ break;
+ case TABLE_OR_VIEW:
+ hiveAuthzBinding = getAuthzBinding();
+ listObjs = filterShowTables(listObjs, authenticator.getUserName(), hiveAuthzBinding);
+ break;
+ }
+ } catch (Exception e) {
+ LOG.debug(e.getMessage(),e);
+ } finally {
+ if (hiveAuthzBinding != null) {
+ hiveAuthzBinding.close();
+ }
+ }
+ }
+ return listObjs;
+ }
+
+ private List filterShowTables(List listObjs,
+ String userName, HiveAuthzBinding hiveAuthzBinding) {
+ List filteredResult = new ArrayList();
+ Subject subject = new Subject(userName);
+ HiveAuthzPrivileges tableMetaDataPrivilege =
+ new HiveAuthzPrivileges.AuthzPrivilegeBuilder()
+ .addInputObjectPriviledge(AuthorizableType.Column,
+ EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT))
+ .setOperationScope(HiveOperationScope.TABLE)
+ .setOperationType(
+ org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType.INFO)
+ .build();
+
+ for (HivePrivilegeObject obj : listObjs) {
+ // if user has privileges on table, add to filtered list, else discard
+ Table table = new Table(obj.getObjectName());
+ Database database;
+ database = new Database(obj.getDbname());
+
+ List> inputHierarchy = new ArrayList>();
+ List> outputHierarchy = new ArrayList>();
+ List externalAuthorizableHierarchy =
+ new ArrayList();
+ externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+ externalAuthorizableHierarchy.add(database);
+ externalAuthorizableHierarchy.add(table);
+ externalAuthorizableHierarchy.add(Column.ALL);
+ inputHierarchy.add(externalAuthorizableHierarchy);
+
+ try {
+ hiveAuthzBinding.authorize(HiveOperation.SHOWTABLES, tableMetaDataPrivilege, subject,
+ inputHierarchy, outputHierarchy);
+ filteredResult.add(obj);
+ } catch (AuthorizationException e) {
+ // squash the exception, user doesn't have privileges, so the table is
+ // not added to
+ // filtered list.
+ }
+ }
+ return filteredResult;
+ }
+
+ private List filterShowDatabases(List listObjs,
+ String userName, HiveAuthzBinding hiveAuthzBinding) {
+ List filteredResult = new ArrayList();
+ Subject subject = new Subject(userName);
+ HiveAuthzPrivileges anyPrivilege =
+ new HiveAuthzPrivileges.AuthzPrivilegeBuilder()
+ .addInputObjectPriviledge(
+ AuthorizableType.Column,
+ EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT, DBModelAction.ALTER,
+ DBModelAction.CREATE, DBModelAction.DROP, DBModelAction.INDEX,
+ DBModelAction.LOCK))
+ .setOperationScope(HiveOperationScope.CONNECT)
+ .setOperationType(
+ org.apache.sentry.binding.hive.authz.HiveAuthzPrivileges.HiveOperationType.QUERY)
+ .build();
+
+ for (HivePrivilegeObject obj : listObjs) {
+ // if user has privileges on database, add to filtered list, else discard
+ Database database = null;
+
+ // if default is not restricted, continue
+ if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(obj.getObjectName())
+ && "false".equalsIgnoreCase(hiveAuthzBinding.getAuthzConf().get(
+ HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) {
+ filteredResult.add(obj);
+ continue;
+ }
+
+ database = new Database(obj.getObjectName());
+
+ List> inputHierarchy = new ArrayList>();
+ List> outputHierarchy = new ArrayList>();
+ List externalAuthorizableHierarchy =
+ new ArrayList();
+ externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+ externalAuthorizableHierarchy.add(database);
+ externalAuthorizableHierarchy.add(Table.ALL);
+ externalAuthorizableHierarchy.add(Column.ALL);
+ inputHierarchy.add(externalAuthorizableHierarchy);
+
+ try {
+ hiveAuthzBinding.authorize(HiveOperation.SHOWDATABASES, anyPrivilege, subject,
+ inputHierarchy, outputHierarchy);
+ filteredResult.add(obj);
+ } catch (AuthorizationException e) {
+ // squash the exception, user doesn't have privileges, so the table is
+ // not added to
+ // filtered list.
+ }
+ }
+ return filteredResult;
+ }
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAccessController.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAccessController.java
new file mode 100644
index 000000000..26fdac803
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAccessController.java
@@ -0,0 +1,200 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.authorizer;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessController;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+
+/**
+ * Abstract class to do access control commands, e.g. grant/revoke privileges, grant/revoke role,
+ * create/drop role.
+ */
+public abstract class SentryHiveAccessController implements HiveAccessController {
+
+ /**
+ * Hive statement: Grant privilege GRANT priv_type [, priv_type ] ... ON table_or_view_name TO
+ * principal_specification [, principal_specification] ... [WITH GRANT OPTION];
+ * principal_specification : USER user | ROLE role
+ *
+ * priv_type : INSERT | SELECT | UPDATE | DELETE | ALL
+ *
+ * @param hivePrincipals
+ * @param hivePrivileges
+ * @param hivePrivObject
+ * @param grantorPrincipal
+ * @param grantOption
+ * @throws HiveAuthzPluginException, HiveAccessControlException
+ */
+ @Override
+ public abstract void grantPrivileges(List hivePrincipals,
+ List hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException,
+ HiveAccessControlException;
+
+ /**
+ * Hive statement: Revoke privilege REVOKE priv_type [, priv_type ] ... ON table_or_view_name FROM
+ * principal_specification [, principal_specification] ... ;
+ *
+ * principal_specification : USER user | ROLE role
+ *
+ * priv_type : INSERT | SELECT | UPDATE | DELETE | ALL
+ *
+ * @param hivePrincipals
+ * @param hivePrivileges
+ * @param hivePrivObject
+ * @param grantorPrincipal
+ * @param grantOption
+ * @throws HiveAuthzPluginException, HiveAccessControlException
+ */
+ @Override
+ public abstract void revokePrivileges(List hivePrincipals,
+ List hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException,
+ HiveAccessControlException;
+
+ /**
+ * Hive statement: Create role CREATE ROLE role_name;
+ *
+ * @param roleName
+ * @param adminGrantor
+ * @throws HiveAuthzPluginException, HiveAccessControlException
+ */
+ @Override
+ public abstract void createRole(String roleName, HivePrincipal adminGrantor)
+ throws HiveAuthzPluginException, HiveAccessControlException;
+
+ /**
+ * Hive statement: Drop role DROP ROLE role_name;
+ *
+ * @param roleName
+ * @throws HiveAuthzPluginException, HiveAccessControlException
+ */
+ @Override
+ public abstract void dropRole(String roleName) throws HiveAuthzPluginException,
+ HiveAccessControlException;
+
+ /**
+ * Hive statement: Grant role GRANT role_name [, role_name] ... TO principal_specification [,
+ * principal_specification] ... [ WITH ADMIN OPTION ];
+ *
+ * principal_specification : USER user | ROLE role
+ *
+ * @param hivePrincipals
+ * @param roles
+ * @param grantOption
+ * @param grantorPrinc
+ * @throws HiveAuthzPluginException, HiveAccessControlException
+ */
+ @Override
+ public abstract void grantRole(List hivePrincipals, List roles,
+ boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
+ HiveAccessControlException;
+
+
+ /**
+ * Hive statement: Revoke role REVOKE [ADMIN OPTION FOR] role_name [, role_name] ... FROM
+ * principal_specification [, principal_specification] ... ;
+ *
+ * principal_specification : USER user | ROLE role
+ *
+ * @param hivePrincipals
+ * @param roles
+ * @param grantOption
+ * @param grantorPrinc
+ * @throws HiveAuthzPluginException, HiveAccessControlException
+ */
+ @Override
+ public abstract void revokeRole(List hivePrincipals, List roles,
+ boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
+ HiveAccessControlException;
+
+ /**
+ * Hive statement: Show roles SHOW ROLES;
+ *
+ * @throws HiveAuthzPluginException, HiveAccessControlException
+ */
+ @Override
+ public abstract List getAllRoles() throws HiveAuthzPluginException,
+ HiveAccessControlException;
+
+ /**
+ * Hive statement: Show grant SHOW GRANT [principal_name] ON (ALL| ([TABLE] table_or_view_name);
+ *
+ * @param principal
+ * @param privObj
+ * @throws HiveAuthzPluginException, HiveAccessControlException
+ */
+ @Override
+ public abstract List showPrivileges(HivePrincipal principal,
+ HivePrivilegeObject privObj) throws HiveAuthzPluginException, HiveAccessControlException;
+
+ /**
+ * Hive statement: Set role SET ROLE (role_name|ALL);
+ *
+ * @param roleName
+ * @throws HiveAuthzPluginException, HiveAccessControlException
+ */
+ @Override
+ public abstract void setCurrentRole(String roleName) throws HiveAuthzPluginException,
+ HiveAccessControlException;
+
+ /**
+ * Hive statement: Show current roles SHOW CURRENT ROLES;
+ *
+ * @throws HiveAuthzPluginException
+ */
+ @Override
+ public abstract List getCurrentRoleNames() throws HiveAuthzPluginException;
+
+ /**
+ * Hive statement: Set role privileges SHOW PRINCIPALS role_name;
+ *
+ * @param roleName
+ * @throws HiveAuthzPluginException, HiveAccessControlException
+ */
+ @Override
+ public abstract List getPrincipalGrantInfoForRole(String roleName)
+ throws HiveAuthzPluginException, HiveAccessControlException;
+
+ /**
+ * Hive statement: Set role grant SHOW ROLE GRANT (USER|ROLE) principal_name;
+ *
+ * @param principal
+ * @throws HiveAuthzPluginException, HiveAccessControlException
+ */
+ @Override
+ public abstract List getRoleGrantInfoForPrincipal(HivePrincipal principal)
+ throws HiveAuthzPluginException, HiveAccessControlException;
+
+ /**
+ * Apply configuration files for authorization V2
+ *
+ * @param hiveConf
+ * @throws HiveAuthzPluginException
+ */
+ @Override
+ public abstract void applyAuthorizationConfigPolicy(HiveConf hiveConf)
+ throws HiveAuthzPluginException;
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizationValidator.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizationValidator.java
new file mode 100644
index 000000000..7bf7b8722
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizationValidator.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.authorizer;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+
+/**
+ * This class used to do authorization validate. Check if current user has privileges to do the
+ * operation and filter the select results.
+ */
+public abstract class SentryHiveAuthorizationValidator implements HiveAuthorizationValidator {
+
+ /**
+ * Check if current user has privileges to perform given operation type hiveOpType on the given
+ * input and output objects.
+ *
+ * @param hiveOpType
+ * @param inputHObjs
+ * @param outputHObjs
+ * @param context
+ * @throws HiveAuthzPluginException, HiveAccessControlException
+ */
+ @Override
+ public abstract void checkPrivileges(HiveOperationType hiveOpType,
+ List inputHObjs, List outputHObjs,
+ HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException;
+
+
+ /**
+ * Filter the select results according current user's permission. remove the object which current
+ * user do not have any privilege on it.
+ *
+ * @param listObjs
+ * @param context
+ */
+ @Override
+ public abstract List filterListCmdObjects(
+ List listObjs, HiveAuthzContext context);
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizer.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizer.java
new file mode 100644
index 000000000..14b952f55
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/authorizer/SentryHiveAuthorizer.java
@@ -0,0 +1,192 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.authorizer;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.exec.SentryHivePrivilegeObjectDesc;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
+import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationTranslator;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationTranslator;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+import org.apache.sentry.binding.hive.v2.SentryHivePrivilegeObject;
+
+/**
+ * Convenience implementation of HiveAuthorizer. You can customize the behavior by passing different
+ * implementations of {@link SentryHiveAccessController} and
+ * {@link SentryHiveAuthorizationValidator} to constructor.
+ */
+public class SentryHiveAuthorizer implements HiveAuthorizer {
+
+ private SentryHiveAccessController accessController;
+ private SentryHiveAuthorizationValidator authValidator;
+ static private HiveAuthorizationTranslator hiveTranslator =
+ new SentryHiveAuthorizationTranslator();
+
+ public SentryHiveAuthorizer(SentryHiveAccessController accessController,
+ SentryHiveAuthorizationValidator authValidator) {
+ this.accessController = accessController;
+ this.authValidator = authValidator;
+ }
+
+ @Override
+ public void grantPrivileges(List hivePrincipals,
+ List hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException,
+ HiveAccessControlException {
+ accessController.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivObject,
+ grantorPrincipal, grantOption);
+ }
+
+ @Override
+ public void revokePrivileges(List hivePrincipals,
+ List hivePrivileges, HivePrivilegeObject hivePrivObject,
+ HivePrincipal grantorPrincipal, boolean grantOption) throws HiveAuthzPluginException,
+ HiveAccessControlException {
+ accessController.revokePrivileges(hivePrincipals, hivePrivileges, hivePrivObject,
+ grantorPrincipal, grantOption);
+ }
+
+ @Override
+ public void createRole(String roleName, HivePrincipal adminGrantor)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ accessController.createRole(roleName, adminGrantor);
+ }
+
+ @Override
+ public void dropRole(String roleName) throws HiveAuthzPluginException, HiveAccessControlException {
+ accessController.dropRole(roleName);
+ }
+
+ @Override
+ public void grantRole(List hivePrincipals, List roles,
+ boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
+ HiveAccessControlException {
+ accessController.grantRole(hivePrincipals, roles, grantOption, grantorPrinc);
+ }
+
+ @Override
+ public void revokeRole(List hivePrincipals, List roles,
+ boolean grantOption, HivePrincipal grantorPrinc) throws HiveAuthzPluginException,
+ HiveAccessControlException {
+ accessController.revokeRole(hivePrincipals, roles, grantOption, grantorPrinc);
+ }
+
+ @Override
+ public void checkPrivileges(HiveOperationType hiveOpType, List inputHObjs,
+ List outputHObjs, HiveAuthzContext context)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, context);
+ }
+
+ @Override
+ public List getAllRoles() throws HiveAuthzPluginException, HiveAccessControlException {
+ return accessController.getAllRoles();
+ }
+
+ @Override
+ public List showPrivileges(HivePrincipal principal, HivePrivilegeObject privObj)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ return accessController.showPrivileges(principal, privObj);
+ }
+
+ @Override
+ public VERSION getVersion() {
+ return VERSION.V1;
+ }
+
+ @Override
+ public void setCurrentRole(String roleName) throws HiveAccessControlException,
+ HiveAuthzPluginException {
+ accessController.setCurrentRole(roleName);
+ }
+
+ @Override
+ public List getCurrentRoleNames() throws HiveAuthzPluginException {
+ return accessController.getCurrentRoleNames();
+ }
+
+ @Override
+ public List getPrincipalGrantInfoForRole(String roleName)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ return accessController.getPrincipalGrantInfoForRole(roleName);
+ }
+
+ @Override
+ public List getRoleGrantInfoForPrincipal(HivePrincipal principal)
+ throws HiveAuthzPluginException, HiveAccessControlException {
+ return accessController.getRoleGrantInfoForPrincipal(principal);
+ }
+
+ @Override
+ public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException {
+ accessController.applyAuthorizationConfigPolicy(hiveConf);
+ }
+
+ @Override
+ public List filterListCmdObjects(List listObjs,
+ HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException {
+ return authValidator.filterListCmdObjects(listObjs, context);
+ }
+
+ protected static HivePrivilegeObjectType getPrivObjectType(
+ SentryHivePrivilegeObjectDesc privSubjectDesc) {
+ if (privSubjectDesc.getObject() == null) {
+ return null;
+ }
+ if (privSubjectDesc.getServer()) {
+ return HivePrivilegeObjectType.GLOBAL;
+ } else if (privSubjectDesc.getUri()) {
+ return HivePrivilegeObjectType.LOCAL_URI;
+ } else {
+ return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE_OR_VIEW
+ : HivePrivilegeObjectType.DATABASE;
+ }
+ }
+
+ @Override
+ public Object getHiveAuthorizationTranslator() throws HiveAuthzPluginException {
+ return hiveTranslator;
+ }
+
+ private static class SentryHiveAuthorizationTranslator extends DefaultHiveAuthorizationTranslator {
+
+ @Override
+ public HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjectDesc)
+ throws HiveException {
+ if (privSubjectDesc != null && privSubjectDesc instanceof SentryHivePrivilegeObjectDesc) {
+ SentryHivePrivilegeObjectDesc sPrivSubjectDesc =
+ (SentryHivePrivilegeObjectDesc) privSubjectDesc;
+ if (sPrivSubjectDesc.isSentryPrivObjectDesc()) {
+ HivePrivilegeObjectType objectType = getPrivObjectType(sPrivSubjectDesc);
+ return new SentryHivePrivilegeObject(objectType, privSubjectDesc.getObject());
+ }
+ }
+ return super.getHivePrivilegeObject(privSubjectDesc);
+ }
+ }
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/AuthorizingObjectStoreV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/AuthorizingObjectStoreV2.java
new file mode 100644
index 000000000..726f5ad81
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/AuthorizingObjectStoreV2.java
@@ -0,0 +1,412 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive.v2.metastore;
+
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.List;
+import java.util.Set;
+
+import javax.security.auth.login.LoginException;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.UnknownDBException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.shims.Utils;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
+
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+/***
+ * This class is the wrapper of ObjectStore which is the interface between the
+ * application logic and the database store. Do the authorization or filter the
+ * result when processing the metastore request.
+ * eg:
+ * Callers will only receive the objects back which they have privileges to
+ * access.
+ * If there is a request for the object list(like getAllTables()), the result
+ * will be filtered to exclude object the requestor doesn't have privilege to
+ * access.
+ */
+public class AuthorizingObjectStoreV2 extends ObjectStore {
+ private static ImmutableSet serviceUsers;
+ private static HiveConf hiveConf;
+ private static HiveAuthzConf authzConf;
+ private static HiveAuthzBinding hiveAuthzBinding;
+ private static String NO_ACCESS_MESSAGE_TABLE = "Table does not exist or insufficient privileges to access: ";
+ private static String NO_ACCESS_MESSAGE_DATABASE = "Database does not exist or insufficient privileges to access: ";
+
+ @Override
+ public List getDatabases(String pattern) throws MetaException {
+ return filterDatabases(super.getDatabases(pattern));
+ }
+
+ @Override
+ public List getAllDatabases() throws MetaException {
+ return filterDatabases(super.getAllDatabases());
+ }
+
+ @Override
+ public Database getDatabase(String name) throws NoSuchObjectException {
+ Database db = super.getDatabase(name);
+ try {
+ if (filterDatabases(Lists.newArrayList(name)).isEmpty()) {
+ throw new NoSuchObjectException(getNoAccessMessageForDB(name));
+ }
+ } catch (MetaException e) {
+ throw new NoSuchObjectException("Failed to authorized access to " + name
+ + " : " + e.getMessage());
+ }
+ return db;
+ }
+
+ @Override
+ public Table getTable(String dbName, String tableName) throws MetaException {
+ Table table = super.getTable(dbName, tableName);
+ if (table == null
+ || filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+ return null;
+ }
+ return table;
+ }
+
+ @Override
+ public Partition getPartition(String dbName, String tableName,
+ List part_vals) throws MetaException, NoSuchObjectException {
+ if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+ throw new NoSuchObjectException(getNoAccessMessageForTable(dbName, tableName));
+ }
+ return super.getPartition(dbName, tableName, part_vals);
+ }
+
+ @Override
+ public List getPartitions(String dbName, String tableName,
+ int maxParts) throws MetaException, NoSuchObjectException {
+ if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+ }
+ return super.getPartitions(dbName, tableName, maxParts);
+ }
+
+ @Override
+ public List getTables(String dbName, String pattern)
+ throws MetaException {
+ return filterTables(dbName, super.getTables(dbName, pattern));
+ }
+
+ @Override
+ public List
getTableObjectsByName(String dbname, List tableNames)
+ throws MetaException, UnknownDBException {
+ return super.getTableObjectsByName(dbname, filterTables(dbname, tableNames));
+ }
+
+ @Override
+ public List getAllTables(String dbName) throws MetaException {
+ return filterTables(dbName, super.getAllTables(dbName));
+ }
+
+ @Override
+ public List listTableNamesByFilter(String dbName, String filter,
+ short maxTables) throws MetaException {
+ return filterTables(dbName,
+ super.listTableNamesByFilter(dbName, filter, maxTables));
+ }
+
+ @Override
+ public List listPartitionNames(String dbName, String tableName,
+ short max_parts) throws MetaException {
+ if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+ }
+ return super.listPartitionNames(dbName, tableName, max_parts);
+ }
+
+ @Override
+ public List listPartitionNamesByFilter(String dbName,
+ String tableName, String filter, short max_parts) throws MetaException {
+ if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+ }
+ return super.listPartitionNamesByFilter(dbName, tableName, filter,
+ max_parts);
+ }
+
+ @Override
+ public Index getIndex(String dbName, String origTableName, String indexName)
+ throws MetaException {
+ if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, origTableName));
+ }
+ return super.getIndex(dbName, origTableName, indexName);
+ }
+
+ @Override
+ public List getIndexes(String dbName, String origTableName, int max)
+ throws MetaException {
+ if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, origTableName));
+ }
+ return super.getIndexes(dbName, origTableName, max);
+ }
+
+ @Override
+ public List listIndexNames(String dbName, String origTableName,
+ short max) throws MetaException {
+ if (filterTables(dbName, Lists.newArrayList(origTableName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, origTableName));
+ }
+ return super.listIndexNames(dbName, origTableName, max);
+ }
+
+ @Override
+ public List getPartitionsByFilter(String dbName,
+ String tblName, String filter, short maxParts) throws MetaException,
+ NoSuchObjectException {
+ if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+ }
+ return super.getPartitionsByFilter(dbName, tblName, filter, maxParts);
+ }
+
+ @Override
+ public List getPartitionsByNames(String dbName, String tblName,
+ List partNames) throws MetaException, NoSuchObjectException {
+ if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+ }
+ return super.getPartitionsByNames(dbName, tblName, partNames);
+ }
+
+ @Override
+ public Partition getPartitionWithAuth(String dbName, String tblName,
+ List partVals, String user_name, List group_names)
+ throws MetaException, NoSuchObjectException, InvalidObjectException {
+ if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+ }
+ return super.getPartitionWithAuth(dbName, tblName, partVals, user_name,
+ group_names);
+ }
+
+ @Override
+ public List getPartitionsWithAuth(String dbName, String tblName,
+ short maxParts, String userName, List groupNames)
+ throws MetaException, InvalidObjectException {
+ if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+ }
+ return super.getPartitionsWithAuth(dbName, tblName, maxParts, userName,
+ groupNames);
+ }
+
+ @Override
+ public List listPartitionNamesPs(String dbName, String tblName,
+ List part_vals, short max_parts) throws MetaException,
+ NoSuchObjectException {
+ if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+ }
+ return super.listPartitionNamesPs(dbName, tblName, part_vals, max_parts);
+ }
+
+ @Override
+ public List listPartitionsPsWithAuth(String dbName,
+ String tblName, List part_vals, short max_parts, String userName,
+ List groupNames) throws MetaException, InvalidObjectException,
+ NoSuchObjectException {
+ if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+ }
+ return super.listPartitionsPsWithAuth(dbName, tblName, part_vals,
+ max_parts, userName, groupNames);
+ }
+
+ @Override
+ public ColumnStatistics getTableColumnStatistics(String dbName,
+ String tableName, List colNames) throws MetaException,
+ NoSuchObjectException {
+ if (filterTables(dbName, Lists.newArrayList(tableName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, tableName));
+ }
+ return super.getTableColumnStatistics(dbName, tableName, colNames);
+ }
+
+ @Override
+ public List getPartitionColumnStatistics(
+ String dbName, String tblName, List partNames,
+ List colNames) throws MetaException, NoSuchObjectException {
+ if (filterTables(dbName, Lists.newArrayList(tblName)).isEmpty()) {
+ throw new MetaException(getNoAccessMessageForTable(dbName, tblName));
+ }
+ return super.getPartitionColumnStatistics(dbName, tblName, partNames,
+ colNames);
+ }
+
+ /**
+ * Invoke Hive database filtering that removes the entries which use has no
+ * privileges to access
+ * @param dbList
+ * @return
+ * @throws MetaException
+ */
+ private List filterDatabases(List dbList)
+ throws MetaException {
+ if (needsAuthorization(getUserName())) {
+ try {
+ return HiveAuthzBindingHook.filterShowDatabases(getHiveAuthzBinding(),
+ dbList, HiveOperation.SHOWDATABASES, getUserName());
+ } catch (SemanticException e) {
+ throw new MetaException("Error getting DB list " + e.getMessage());
+ }
+ } else {
+ return dbList;
+ }
+ }
+
+ /**
+ * Invoke Hive table filtering that removes the entries which use has no
+ * privileges to access
+ * @param dbList
+ * @return
+ * @throws MetaException
+ */
+ protected List filterTables(String dbName, List tabList)
+ throws MetaException {
+ if (needsAuthorization(getUserName())) {
+ try {
+ return HiveAuthzBindingHook.filterShowTables(getHiveAuthzBinding(),
+ tabList, HiveOperation.SHOWTABLES, getUserName(), dbName);
+ } catch (SemanticException e) {
+ throw new MetaException("Error getting Table list " + e.getMessage());
+ }
+ } else {
+ return tabList;
+ }
+ }
+
+ /**
+ * load Hive auth provider
+ *
+ * @return
+ * @throws MetaException
+ */
+ private HiveAuthzBinding getHiveAuthzBinding() throws MetaException {
+ if (hiveAuthzBinding == null) {
+ try {
+ hiveAuthzBinding = new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveMetaStore,
+ getHiveConf(), getAuthzConf());
+ } catch (Exception e) {
+ throw new MetaException("Failed to load Hive binding " + e.getMessage());
+ }
+ }
+ return hiveAuthzBinding;
+ }
+
+ private ImmutableSet getServiceUsers() throws MetaException {
+ if (serviceUsers == null) {
+ serviceUsers = ImmutableSet.copyOf(toTrimed(Sets.newHashSet(getAuthzConf().getStrings(
+ AuthzConfVars.AUTHZ_METASTORE_SERVICE_USERS.getVar(), new String[] { "" }))));
+ }
+ return serviceUsers;
+ }
+
+ private HiveConf getHiveConf() {
+ if (hiveConf == null) {
+ hiveConf = new HiveConf(getConf(), this.getClass());
+ }
+ return hiveConf;
+ }
+
+ private HiveAuthzConf getAuthzConf() throws MetaException {
+ if (authzConf == null) {
+ String hiveAuthzConf = getConf().get(HiveAuthzConf.HIVE_SENTRY_CONF_URL);
+ if (hiveAuthzConf == null
+ || (hiveAuthzConf = hiveAuthzConf.trim()).isEmpty()) {
+ throw new MetaException("Configuration key "
+ + HiveAuthzConf.HIVE_SENTRY_CONF_URL + " value '" + hiveAuthzConf
+ + "' is invalid.");
+ }
+ try {
+ authzConf = new HiveAuthzConf(new URL(hiveAuthzConf));
+ } catch (MalformedURLException e) {
+ throw new MetaException("Configuration key "
+ + HiveAuthzConf.HIVE_SENTRY_CONF_URL
+ + " specifies a malformed URL '" + hiveAuthzConf + "' "
+ + e.getMessage());
+ }
+ }
+ return authzConf;
+ }
+
+ /**
+ * Extract the user from underlying auth subsystem
+ * @return
+ * @throws MetaException
+ */
+ private String getUserName() throws MetaException {
+ try {
+ return Utils.getUGI().getShortUserName();
+ } catch (LoginException e) {
+ throw new MetaException("Failed to get username " + e.getMessage());
+ } catch (IOException e) {
+ throw new MetaException("Failed to get username " + e.getMessage());
+ }
+ }
+
+ /**
+ * Check if the give user needs to be validated.
+ * @param userName
+ * @return
+ */
+ private boolean needsAuthorization(String userName) throws MetaException {
+ return !getServiceUsers().contains(userName.trim());
+ }
+
+ private static Set toTrimed(Set s) {
+ Set result = Sets.newHashSet();
+ for (String v : s) {
+ result.add(v.trim());
+ }
+ return result;
+ }
+
+ protected String getNoAccessMessageForTable(String dbName, String tableName) {
+ return NO_ACCESS_MESSAGE_TABLE + "<" + dbName + ">.<" + tableName + ">";
+ }
+
+ private String getNoAccessMessageForDB(String dbName) {
+ return NO_ACCESS_MESSAGE_DATABASE + "<" + dbName + ">";
+ }
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java
new file mode 100644
index 000000000..d9374910e
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/MetastoreAuthzBindingV2.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.v2.metastore;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.sentry.binding.metastore.MetastoreAuthzBinding;
+
+/**
+ * Sentry binding for Hive Metastore. The binding is integrated into Metastore
+ * via the pre-event listener which are fired prior to executing the metadata
+ * action. This point we are only authorizing metadata writes since the listners
+ * are not fired from read events. Each action builds a input and output
+ * hierarchy as per the objects used in the given operations. This is then
+ * passed down to the hive binding which handles the authorization. This ensures
+ * that we follow the same privilege model and policies.
+ */
+public class MetastoreAuthzBindingV2 extends MetastoreAuthzBinding {
+
+ public MetastoreAuthzBindingV2(Configuration config) throws Exception {
+ super(config);
+ }
+
+ protected void authorizeDropPartition(PreDropPartitionEvent context)
+ throws InvalidOperationException, MetaException {
+ authorizeMetastoreAccess(
+ HiveOperation.ALTERTABLE_DROPPARTS,
+ new HierarcyBuilder().addTableToOutput(getAuthServer(),
+ context.getTable().getDbName(),
+ context.getTable().getTableName()).build(),
+ new HierarcyBuilder().addTableToOutput(getAuthServer(),
+ context.getTable().getDbName(),
+ context.getTable().getTableName()).build());
+ }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java
new file mode 100644
index 000000000..013d01628
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/metastore/SentryMetastorePostEventListenerV2.java
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.binding.hive.v2.metastore;
+
+import java.util.Iterator;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
+import org.apache.sentry.binding.metastore.SentryMetastorePostEventListener;
+import org.apache.sentry.provider.db.SentryMetastoreListenerPlugin;
+
+public class SentryMetastorePostEventListenerV2 extends SentryMetastorePostEventListener {
+
+ public SentryMetastorePostEventListenerV2(Configuration config) {
+ super(config);
+ }
+
+ @Override
+ public void onAddPartition(AddPartitionEvent partitionEvent)
+ throws MetaException {
+ if (partitionEvent != null && partitionEvent.getPartitionIterator() != null) {
+ Iterator it = partitionEvent.getPartitionIterator();
+ while (it.hasNext()) {
+ Partition part = it.next();
+ if (part.getSd() != null && part.getSd().getLocation() != null) {
+ String authzObj = part.getDbName() + "." + part.getTableName();
+ String path = part.getSd().getLocation();
+ for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+ plugin.addPath(authzObj, path);
+ }
+ }
+ }
+ }
+ }
+
+ @Override
+ public void onDropPartition(DropPartitionEvent partitionEvent)
+ throws MetaException {
+ if (partitionEvent != null && partitionEvent.getPartitionIterator() != null) {
+ String authzObj = partitionEvent.getTable().getDbName() + "."
+ + partitionEvent.getTable().getTableName();
+ Iterator it = partitionEvent.getPartitionIterator();
+ while (it.hasNext()) {
+ Partition part = it.next();
+ if (part.getSd() != null && part.getSd().getLocation() != null) {
+ String path = part.getSd().getLocation();
+ for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
+ plugin.removePath(authzObj, path);
+ }
+ }
+ }
+ }
+ }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java
new file mode 100644
index 000000000..35bd68ce7
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SentryAuthorizerUtil.java
@@ -0,0 +1,362 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.util;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.ql.hooks.Hook;
+import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.security.authorization.PrivilegeType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.binding.hive.SentryOnFailureHook;
+import org.apache.sentry.binding.hive.SentryOnFailureHookContext;
+import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
+import org.apache.sentry.core.common.utils.PathUtils;
+import org.apache.sentry.core.model.db.AccessConstants;
+import org.apache.sentry.core.model.db.AccessURI;
+import org.apache.sentry.core.model.db.Column;
+import org.apache.sentry.core.model.db.DBModelAuthorizable;
+import org.apache.sentry.core.model.db.Database;
+import org.apache.sentry.core.model.db.Server;
+import org.apache.sentry.core.model.db.Table;
+import org.apache.sentry.provider.db.service.thrift.TSentryGrantOption;
+import org.apache.sentry.provider.db.service.thrift.TSentryPrivilege;
+import org.apache.sentry.provider.db.service.thrift.TSentryRole;
+import org.apache.sentry.service.thrift.ServiceConstants.PrivilegeScope;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Splitter;
+
+public class SentryAuthorizerUtil {
+ public static final Logger LOG = LoggerFactory.getLogger(SentryAuthorizerUtil.class);
+ public static String UNKONWN_GRANTOR = "--";
+
+ /**
+ * Convert string to URI
+ *
+ * @param uri
+ * @param isLocal
+ * @throws SemanticException
+ * @throws URISyntaxException
+ */
+ public static AccessURI parseURI(String uri, boolean isLocal) throws URISyntaxException {
+ HiveConf conf = SessionState.get().getConf();
+ String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
+ return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal));
+ }
+
+ /**
+ * Convert HivePrivilegeObject to DBModelAuthorizable list Now hive 0.13 don't support column
+ * level
+ *
+ * @param server
+ * @param privilege
+ */
+ public static List> getAuthzHierarchy(Server server,
+ HivePrivilegeObject privilege) {
+ List baseHierarchy = new ArrayList();
+ List> objectHierarchy = new ArrayList>();
+ boolean isLocal = false;
+ if (privilege.getType() != null) {
+ switch (privilege.getType()) {
+ case GLOBAL:
+ baseHierarchy.add(new Server(privilege.getObjectName()));
+ objectHierarchy.add(baseHierarchy);
+ break;
+ case DATABASE:
+ baseHierarchy.add(server);
+ baseHierarchy.add(new Database(privilege.getDbname()));
+ objectHierarchy.add(baseHierarchy);
+ break;
+ case TABLE_OR_VIEW:
+ baseHierarchy.add(server);
+ baseHierarchy.add(new Database(privilege.getDbname()));
+ baseHierarchy.add(new Table(privilege.getObjectName()));
+ if (privilege.getColumns() != null) {
+ for (String columnName : privilege.getColumns()) {
+ List columnHierarchy =
+ new ArrayList(baseHierarchy);
+ columnHierarchy.add(new Column(columnName));
+ objectHierarchy.add(columnHierarchy);
+ }
+ } else {
+ objectHierarchy.add(baseHierarchy);
+ }
+ break;
+ case LOCAL_URI:
+ isLocal = true;
+ case DFS_URI:
+ if (privilege.getObjectName() == null) {
+ break;
+ }
+ try {
+ baseHierarchy.add(server);
+ baseHierarchy.add(parseURI(privilege.getObjectName(), isLocal));
+ objectHierarchy.add(baseHierarchy);
+ } catch (Exception e) {
+ throw new AuthorizationException("Failed to get File URI", e);
+ }
+ break;
+ case FUNCTION:
+ case PARTITION:
+ case COLUMN:
+ case COMMAND_PARAMS:
+ // not support these type
+ break;
+ default:
+ break;
+ }
+ }
+ return objectHierarchy;
+ }
+
+ /**
+ * Convert HivePrivilegeObject list to List>
+ *
+ * @param server
+ * @param privilges
+ */
+ public static List> convert2SentryPrivilegeList(Server server,
+ List privilges) {
+ List> hierarchyList = new ArrayList>();
+ if (privilges != null && !privilges.isEmpty()) {
+ for (HivePrivilegeObject p : privilges) {
+ hierarchyList.addAll(getAuthzHierarchy(server, p));
+ }
+ }
+ return hierarchyList;
+ }
+
+ /**
+ * Convert HiveOperationType to HiveOperation
+ *
+ * @param type
+ */
+ public static HiveOperation convert2HiveOperation(String typeName) {
+ try {
+ return HiveOperation.valueOf(typeName);
+ } catch (Exception e) {
+ return null;
+ }
+ }
+
+ /**
+ * Convert HivePrivilege to Sentry Action
+ *
+ * @param hivePrivilege
+ */
+ public static String convert2SentryAction(HivePrivilege hivePrivilege) {
+ if (PrivilegeType.ALL.name().equals(hivePrivilege.getName())) {
+ return AccessConstants.ALL;
+ } else {
+ return hivePrivilege.getName();
+ }
+ }
+
+ /**
+ * Convert Sentry Action to HivePrivilege
+ *
+ * @param hivePrivilege
+ */
+ public static HivePrivilege convert2HivePrivilege(String action) {
+ return new HivePrivilege(action, null);
+ }
+
+ /**
+ * Convert TSentryRole Set to String List
+ *
+ * @param roleSet
+ */
+ public static List convert2RoleList(Set roleSet) {
+ List roles = new ArrayList();
+ if (roleSet != null && !roleSet.isEmpty()) {
+ for (TSentryRole tRole : roleSet) {
+ roles.add(tRole.getRoleName());
+ }
+ }
+ return roles;
+ }
+
+ /**
+ * Convert TSentryPrivilege to HivePrivilegeInfo
+ *
+ * @param tPrivilege
+ * @param principal
+ */
+ public static HivePrivilegeInfo convert2HivePrivilegeInfo(TSentryPrivilege tPrivilege,
+ HivePrincipal principal) {
+ HivePrivilege hivePrivilege = convert2HivePrivilege(tPrivilege.getAction());
+ HivePrivilegeObject hivePrivilegeObject = convert2HivePrivilegeObject(tPrivilege);
+ // now sentry don't show grantor of a privilege
+ HivePrincipal grantor = new HivePrincipal(UNKONWN_GRANTOR, HivePrincipalType.ROLE);
+ boolean grantOption =
+ tPrivilege.getGrantOption().equals(TSentryGrantOption.TRUE) ? true : false;
+ return new HivePrivilegeInfo(principal, hivePrivilege, hivePrivilegeObject, grantor,
+ grantOption, (int) tPrivilege.getCreateTime());
+ }
+
+ /**
+ * Convert TSentryPrivilege to HivePrivilegeObject
+ *
+ * @param tSentryPrivilege
+ */
+ public static HivePrivilegeObject convert2HivePrivilegeObject(TSentryPrivilege tSentryPrivilege) {
+ HivePrivilegeObject privilege = null;
+ switch (PrivilegeScope.valueOf(tSentryPrivilege.getPrivilegeScope())) {
+ case SERVER:
+ privilege = new HivePrivilegeObject(HivePrivilegeObjectType.GLOBAL, "*", null);
+ break;
+ case DATABASE:
+ privilege =
+ new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, tSentryPrivilege.getDbName(),
+ null);
+ break;
+ case TABLE:
+ privilege =
+ new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW,
+ tSentryPrivilege.getDbName(), tSentryPrivilege.getTableName());
+ break;
+ case COLUMN:
+ privilege =
+ new HivePrivilegeObject(HivePrivilegeObjectType.COLUMN, tSentryPrivilege.getDbName(),
+ tSentryPrivilege.getTableName(), null, tSentryPrivilege.getColumnName());
+ break;
+ case URI:
+ String uriString = tSentryPrivilege.getURI();
+ try {
+ uriString = uriString.replace("'", "").replace("\"", "");
+ HivePrivilegeObjectType type =
+ isLocalUri(uriString) ? HivePrivilegeObjectType.LOCAL_URI
+ : HivePrivilegeObjectType.DFS_URI;
+ privilege = new HivePrivilegeObject(type, uriString, null);
+ } catch (URISyntaxException e1) {
+ throw new RuntimeException(uriString + "is not a URI");
+ }
+ default:
+ LOG.warn("Unknown PrivilegeScope: "
+ + PrivilegeScope.valueOf(tSentryPrivilege.getPrivilegeScope()));
+ break;
+ }
+ return privilege;
+ }
+
+ public static boolean isLocalUri(String uriString) throws URISyntaxException {
+ URI uri = new URI(uriString);
+ if (uri.getScheme().equalsIgnoreCase("file")) {
+ return true;
+ }
+
+ return false;
+ }
+
+ /**
+ * Convert TSentryRole to HiveRoleGrant
+ *
+ * @param role
+ */
+ public static HiveRoleGrant convert2HiveRoleGrant(TSentryRole role) {
+ HiveRoleGrant hiveRoleGrant = new HiveRoleGrant();
+ hiveRoleGrant.setRoleName(role.getRoleName());
+ hiveRoleGrant.setPrincipalName(role.getRoleName());
+ hiveRoleGrant.setPrincipalType(PrincipalType.ROLE.name());
+ hiveRoleGrant.setGrantOption(false);
+ hiveRoleGrant.setGrantor(role.getGrantorPrincipal());
+ hiveRoleGrant.setGrantorType(PrincipalType.USER.name());
+ return hiveRoleGrant;
+ }
+
+ /**
+ * Execute on failure hooks for e2e tests
+ *
+ * @param context
+ * @param conf
+ * @param hiveOp
+ */
+ public static void executeOnFailureHooks(SentryOnFailureHookContext hookCtx, Configuration conf) {
+ String csHooks =
+ conf.get(HiveAuthzConf.AuthzConfVars.AUTHZ_ONFAILURE_HOOKS.getVar(), "").trim();
+
+ try {
+ for (Hook aofh : SentryAuthorizerUtil.getHooks(csHooks)) {
+ ((SentryOnFailureHook) aofh).run(hookCtx);
+ }
+ } catch (Exception ex) {
+ LOG.error("Error executing hook:", ex);
+ }
+ }
+
+ /**
+ * Returns a set of hooks specified in a configuration variable.
+ *
+ * See getHooks(HiveAuthzConf.AuthzConfVars hookConfVar, Class clazz)
+ *
+ * @param hookConfVar
+ * @return
+ * @throws Exception
+ */
+ public static List getHooks(String csHooks) throws Exception {
+ return getHooks(csHooks, Hook.class);
+ }
+
+ /**
+ * Returns the hooks specified in a configuration variable. The hooks are returned in a list in
+ * the order they were specified in the configuration variable.
+ *
+ * @param hookConfVar The configuration variable specifying a comma separated list of the hook
+ * class names.
+ * @param clazz The super type of the hooks.
+ * @return A list of the hooks cast as the type specified in clazz, in the order they are listed
+ * in the value of hookConfVar
+ * @throws Exception
+ */
+ public static List getHooks(String csHooks, Class clazz) throws Exception {
+
+ List hooks = new ArrayList();
+ if (csHooks.isEmpty()) {
+ return hooks;
+ }
+ for (String hookClass : Splitter.on(",").omitEmptyStrings().trimResults().split(csHooks)) {
+ try {
+ @SuppressWarnings("unchecked")
+ T hook = (T) Class.forName(hookClass, true, JavaUtils.getClassLoader()).newInstance();
+ hooks.add(hook);
+ } catch (ClassNotFoundException e) {
+ LOG.error(hookClass + " Class not found:" + e.getMessage());
+ throw e;
+ }
+ }
+
+ return hooks;
+ }
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java
new file mode 100644
index 000000000..b50bbf482
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/main/java/org/apache/sentry/binding/hive/v2/util/SimpleSemanticAnalyzer.java
@@ -0,0 +1,369 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2.util;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.sentry.core.model.db.Table;
+
+/**
+ * Currently hive complier doesn't create read/write entities for some operations, e.g. create
+ * table, drop table. This class is a simple semantic analyzer using regex, it is a workaround
+ * approach to extract db_name and tb_name from those operations.
+ */
+public class SimpleSemanticAnalyzer {
+ private String currentDb;
+ private String currentTb;
+
+ /**
+ * CREATE [TEMPORARY] [EXTERNAL] TABLE [IF NOT EXISTS] [db_name.]table_name ...
+ */
+ private static final String CREATE_TABLE_REGEX = "^(CREATE)\\s+" + "(TEMPORARY\\s+)?"
+ + "(EXTERNAL\\s+)?" + "TABLE\\s+" + "(IF\\s+NOT\\s+EXISTS\\s+)?" + "([A-Za-z0-9._]+)";
+
+ /**
+ * DROP (DATABASE|SCHEMA) [IF EXISTS] database_name [RESTRICT|CASCADE];
+ */
+ private static final String DROP_DB_REGEX = "^DROP\\s+" + "(DATABASE|SCHEMA)\\s+"
+ + "(IF\\s+EXISTS\\s+)?" + "([A-Za-z0-9_]+)";
+
+ /**
+ * DROP TABLE [IF EXISTS] table_name;
+ */
+ private static final String DROP_TABLE_REGEX = "^DROP\\s+" + "TABLE\\s+" + "(IF\\s+EXISTS\\s+)?"
+ + "([A-Za-z0-9._]+)";
+
+ /**
+ * DROP VIEW [IF EXISTS] view_name;
+ */
+ private static final String DROP_VIEW_REGEX = "^DROP\\s+" + "VIEW\\s+" + "(IF\\s+EXISTS\\s+)?"
+ + "([A-Za-z0-9_].+)";
+
+ /**
+ * DESCRIBE DATABASE|SCHEMA [EXTENDED] db_name;
+ */
+ private static final String DESCRIBE_DB_REGEX = "^DESCRIBE\\s+" + "(DATABASE|SCHEMA)\\s+"
+ + "(EXTENDED\\s+)?" + "([A-Za-z0-9_]+)";
+
+ /**
+ * DESCRIBE [EXTENDED|FORMATTED] [db_name.]table_name[.col_name ( [.field_name] | [.'$elem$'] |
+ * [.'$key$'] | [.'$value$'] )* ];
+ */
+ private static final String DESCRIBE_TABLE_REGEX = "^DESCRIBE\\s+"
+ + "((EXTENDED|FORMATTED)\\s+)?" + "([A-Za-z0-9._]+)";
+
+ /**
+ * SHOW [FORMATTED] (INDEX|INDEXES) ON table_with_index [(FROM|IN) db_name];
+ */
+ private static final String SHOW_INDEX_REGEX = "^SHOW\\s+" + "(FORMATTED\\s+)?"
+ + "(INDEX|INDEXES)\\s+" + "ON\\s+" + "([A-Za-z0-9._]+)\\s*"
+ + "((FROM|IN)\\s+([A-Za-z0-9_]+))?";
+
+ /**
+ * SHOW TBLPROPERTIES tblname;
+ */
+ private static final String SHOW_TBLPROPERTIES_REGEX = "^SHOW\\s+" + "TBLPROPERTIES\\s+"
+ + "([A-Za-z0-9._]+)";
+
+ /**
+ * ALTER TABLE table_name ...
+ */
+ private static final String ALTER_TABLE_REGEX = "^ALTER\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)";
+
+ /**
+ * ALTER VIEW view_name ...
+ */
+ private static final String ALTER_VIEW_REGEX = "^ALTER\\s+" + "VIEW\\s+" + "([A-Za-z0-9._]+)";
+
+ /**
+ * MSCK REPAIR TABLE table_name;
+ */
+ private static final String MSCK_REGEX = "^MSCK\\s+" + "REPAIR\\s" + "TABLE\\s"
+ + "([A-Za-z0-9._]+)";
+
+ /**
+ * ALTER INDEX index_name ON table_name [PARTITION partition_spec] REBUILD;
+ */
+ private static final String ALTER_INDEX_REGEX = "^ALTER\\s+" + "INDEX\\s+"
+ + "([A-Za-z0-9_]+)\\s+" + "ON\\s" + "([A-Za-z0-9._]+)";
+
+ /**
+ * CREATE FUNCTION [db_name.]function_name AS class_name [USING JAR|FILE|ARCHIVE 'file_uri' [,
+ * JAR|FILE|ARCHIVE 'file_uri'] ];
+ */
+ private static final String CREATE_FUNCTION_REGEX = "^CREATE\\s+" + "(TEMPORARY\\s+)?"
+ + "FUNCTION\\s+" + "([A-Za-z0-9._]+)\\s+" + "AS\\s" + "([A-Za-z0-9._']+)";
+
+ /**
+ * SHOW COLUMNS FROM table_name
+ */
+ private static final String SHOWCOLUMNS = "^SHOW\\s+" + "COLUMNS\\s+" + "(FROM|IN)\\s+"
+ + "([A-Za-z0-9._]+)";
+
+ private static final String SHOW_TABLESTATUS = "^SHOW\\s+" + "TABLE\\s+" + "EXTENDED\\s+" + "IN\\s+"
+ + "([A-Za-z0-9._]+)";
+
+ private static final String LOAD = "^LOAD\\s+" + "DATA\\s+" + "(LOCAL\\s+)?" + "INPATH\\s+"
+ + "([A-Za-z0-9._':///-]+)" +"\\s" + "INTO\\s" + "TABLE\\s" + "([A-Za-z0-9._]+)";
+
+ /**
+ * LOCK DATABASE dbname;
+ */
+ private static final String LOCKDB = "^LOCK\\s+" + "DATABASE\\s+" + "([A-Za-z0-9._]+)";
+
+ /**
+ * UNLOCK DATABASE dbname;
+ */
+ private static final String UNLOCKDB = "^UNLOCK\\s+" + "DATABASE\\s+" + "([A-Za-z0-9._]+)";
+
+ /**
+ * LOCK TABLE tblname;
+ */
+ private static final String LOCKTABLE = "^LOCK\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)";
+
+ /**
+ * UNLOCK TABLE tblname;
+ */
+ private static final String UNLOCKTABLE = "^UNLOCK\\s+" + "TABLE\\s+" + "([A-Za-z0-9._]+)";
+
+ private static Map OP_REGEX_MAP = new HashMap();
+ static {
+ // database metadata
+ OP_REGEX_MAP.put(HiveOperation.DROPDATABASE, DROP_DB_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.DESCDATABASE, DESCRIBE_DB_REGEX);
+
+ // table metadata
+ OP_REGEX_MAP.put(HiveOperation.CREATETABLE, CREATE_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.DROPTABLE, DROP_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.DROPVIEW, DROP_VIEW_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.DESCTABLE, DESCRIBE_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.SHOW_TBLPROPERTIES, SHOW_TBLPROPERTIES_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_PROPERTIES, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_SERDEPROPERTIES, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_CLUSTER_SORT, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_FILEFORMAT, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_TOUCH, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_PROTECTMODE, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_RENAMECOL, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_ADDCOLS, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_REPLACECOLS, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_RENAMEPART, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_ARCHIVE, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_UNARCHIVE, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_SERIALIZER, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_MERGEFILES, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_SKEWED, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_DROPPARTS, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_ADDPARTS, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_RENAME, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTABLE_LOCATION, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_FILEFORMAT, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_PROTECTMODE, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_SERDEPROPERTIES, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_SERIALIZER, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_MERGEFILES, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERPARTITION_LOCATION, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERTBLPART_SKEWED_LOCATION, ALTER_TABLE_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERVIEW_PROPERTIES, ALTER_VIEW_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.MSCK, MSCK_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERINDEX_REBUILD, ALTER_INDEX_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.ALTERINDEX_PROPS, ALTER_INDEX_REGEX);
+ OP_REGEX_MAP.put(HiveOperation.LOCKDB, LOCKDB);
+ OP_REGEX_MAP.put(HiveOperation.UNLOCKDB, UNLOCKDB);
+ OP_REGEX_MAP.put(HiveOperation.LOCKTABLE, LOCKTABLE);
+ OP_REGEX_MAP.put(HiveOperation.UNLOCKTABLE, UNLOCKTABLE);
+ OP_REGEX_MAP.put(HiveOperation.SHOWCOLUMNS, SHOWCOLUMNS);
+ OP_REGEX_MAP.put(HiveOperation.SHOW_TABLESTATUS, SHOW_TABLESTATUS);
+ }
+
+ public SimpleSemanticAnalyzer(HiveOperation hiveOp, String cmd) throws HiveAuthzPluginException {
+ currentDb = SessionState.get().getCurrentDatabase();
+ parse(hiveOp, cmd);
+ }
+
+ private void parse(HiveOperation hiveOp, String cmd) throws HiveAuthzPluginException {
+ switch (hiveOp) {
+ case DROPDATABASE:
+ case DESCDATABASE:
+ case LOCKDB:
+ case UNLOCKDB:
+ parseDbMeta(cmd, OP_REGEX_MAP.get(hiveOp));
+ break;
+ case DESCTABLE:
+ case CREATETABLE:
+ case DROPTABLE:
+ case DROPVIEW:
+ case SHOW_TBLPROPERTIES:
+ // alter table
+ case ALTERTABLE_PROPERTIES:
+ case ALTERTABLE_SERDEPROPERTIES:
+ case ALTERTABLE_CLUSTER_SORT:
+ case ALTERTABLE_FILEFORMAT:
+ case ALTERTABLE_TOUCH:
+ case ALTERTABLE_PROTECTMODE:
+ case ALTERTABLE_RENAMECOL:
+ case ALTERTABLE_ADDCOLS:
+ case ALTERTABLE_REPLACECOLS:
+ case ALTERTABLE_RENAMEPART:
+ case ALTERTABLE_ARCHIVE:
+ case ALTERTABLE_UNARCHIVE:
+ case ALTERTABLE_SERIALIZER:
+ case ALTERTABLE_MERGEFILES:
+ case ALTERTABLE_SKEWED:
+ case ALTERTABLE_DROPPARTS:
+ case ALTERTABLE_ADDPARTS:
+ case ALTERTABLE_RENAME:
+ case ALTERTABLE_LOCATION:
+ // alter view
+ case ALTERVIEW_PROPERTIES:
+ // alter partition
+ case ALTERPARTITION_FILEFORMAT:
+ case ALTERPARTITION_PROTECTMODE:
+ case ALTERPARTITION_SERDEPROPERTIES:
+ case ALTERPARTITION_SERIALIZER:
+ case ALTERPARTITION_MERGEFILES:
+ case ALTERPARTITION_LOCATION:
+ case ALTERTBLPART_SKEWED_LOCATION:
+ // MSCK
+ case MSCK:
+ // alter index
+ case ALTERINDEX_REBUILD:
+ case ALTERINDEX_PROPS:
+ case LOCKTABLE:
+ case UNLOCKTABLE:
+ case SHOWCOLUMNS:
+ parseTableMeta(cmd, OP_REGEX_MAP.get(hiveOp));
+ break;
+ case SHOWINDEXES:
+ parseShowIndex(cmd, SHOW_INDEX_REGEX);
+ break;
+ case CREATEFUNCTION:
+ parseFunction(cmd, CREATE_FUNCTION_REGEX);
+ break;
+ case SHOW_TABLESTATUS:
+ parseTableExtend(cmd, SHOW_TABLESTATUS);
+ break;
+ case LOAD:
+ parseLoadTable(cmd, LOAD);
+ break;
+ default:
+ break;
+ }
+ }
+
+ private void parseLoadTable(String cmd, String load) throws HiveAuthzPluginException {
+ Pattern pattern = Pattern.compile(load, Pattern.CASE_INSENSITIVE);
+ Matcher matcher = pattern.matcher(cmd);
+ if (matcher.find()) {
+ String tbName = matcher.group(matcher.groupCount());
+ extractDbAndTb(tbName.trim());
+ } else {
+ throw new HiveAuthzPluginException("this command " + cmd + " is not match table meta grammar");
+ }
+ }
+
+ private void parseTableExtend(String cmd, String showTablestatus) throws HiveAuthzPluginException {
+ Pattern pattern = Pattern.compile(showTablestatus, Pattern.CASE_INSENSITIVE);
+ Matcher matcher = pattern.matcher(cmd);
+ if (matcher.find()) {
+ String dbName = matcher.group(matcher.groupCount());
+ currentDb = dbName;
+ currentTb = Table.SOME.getName();
+ } else {
+ throw new HiveAuthzPluginException("this command " + cmd + " is not match table meta grammar");
+ }
+ }
+
+ private void extractDbAndTb(String tableName) {
+ if (tableName.contains(".")) {
+ String[] tb = tableName.split("\\.");
+ currentDb = tb[0];
+ currentTb = tb[1];
+ } else {
+ currentDb = SessionState.get().getCurrentDatabase();
+ currentTb = tableName;
+ }
+ }
+
+ private void parseDbMeta(String cmd, String regex) throws HiveAuthzPluginException {
+ Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
+ Matcher matcher = pattern.matcher(cmd);
+ if (matcher.find()) {
+ currentDb = matcher.group(matcher.groupCount());
+ } else {
+ throw new HiveAuthzPluginException("this command " + cmd
+ + " is not match database meta grammar");
+ }
+ }
+
+ private void parseTableMeta(String cmd, String regex) throws HiveAuthzPluginException {
+ Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
+ Matcher matcher = pattern.matcher(cmd);
+ if (matcher.find()) {
+ String tbName = matcher.group(matcher.groupCount());
+ extractDbAndTb(tbName.trim());
+ } else {
+ throw new HiveAuthzPluginException("this command " + cmd + " is not match table meta grammar");
+ }
+ }
+
+ private void parseShowIndex(String cmd, String regex) throws HiveAuthzPluginException {
+ Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
+ Matcher matcher = pattern.matcher(cmd);
+ if (matcher.find()) {
+ String dbName = matcher.group(matcher.groupCount());
+ String tbName = matcher.group(3);
+ if (dbName != null) {
+ currentDb = dbName;
+ currentTb = tbName;
+ } else {
+ extractDbAndTb(tbName);
+ }
+ } else {
+ throw new HiveAuthzPluginException("this command " + cmd + " is not match show index grammar");
+ }
+ }
+
+ private void parseFunction(String cmd, String regex) throws HiveAuthzPluginException {
+ Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
+ Matcher matcher = pattern.matcher(cmd);
+ if (matcher.find()) {
+ String udfClass = matcher.group(matcher.groupCount());
+ if (udfClass.contains("'")) {
+ currentTb = udfClass.split("'")[1];
+ } else {
+ currentTb = udfClass;
+ }
+ } else {
+ throw new HiveAuthzPluginException("this command " + cmd
+ + " is not match create function grammar");
+ }
+ }
+
+ public String getCurrentDb() {
+ return currentDb;
+ }
+
+ public String getCurrentTb() {
+ return currentTb;
+ }
+
+}
diff --git a/sentry-binding/sentry-binding-hive-v2/src/test/java/org/apache/sentry/binding/hive/v2/DummyHiveAuthenticationProvider.java b/sentry-binding/sentry-binding-hive-v2/src/test/java/org/apache/sentry/binding/hive/v2/DummyHiveAuthenticationProvider.java
new file mode 100644
index 000000000..9335c37bd
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive-v2/src/test/java/org/apache/sentry/binding/hive/v2/DummyHiveAuthenticationProvider.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.sentry.binding.hive.v2;
+
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+public class DummyHiveAuthenticationProvider implements HiveAuthenticationProvider {
+
+ private String userName;
+ private Configuration conf;
+
+ @Override
+ public void setConf(Configuration conf) {
+ this.conf = conf;
+ }
+
+ @Override
+ public Configuration getConf() {
+ return conf;
+ }
+
+ @Override
+ public String getUserName() {
+ return userName;
+ }
+
+ @Override
+ public List getGroupNames() {
+ return null;
+ }
+
+ @Override
+ public void destroy() throws HiveException {
+
+ }
+
+ @Override
+ public void setSessionState(SessionState ss) {
+
+ }
+
+ public void setUserName(String user) {
+ this.userName = user;
+ }
+
+}
diff --git a/sentry-binding/sentry-binding-hive/pom.xml b/sentry-binding/sentry-binding-hive/pom.xml
index 6188b3400..fb5f21494 100644
--- a/sentry-binding/sentry-binding-hive/pom.xml
+++ b/sentry-binding/sentry-binding-hive/pom.xml
@@ -22,7 +22,7 @@ limitations under the License.
org.apache.sentrysentry-binding
- 1.5.0-incubating-SNAPSHOT
+ 1.7.0-incubating-SNAPSHOTsentry-binding-hive
@@ -73,6 +73,10 @@ limitations under the License.
org.apache.sentrysentry-provider-file
+
+ org.apache.sentry
+ sentry-provider-cache
+ org.apache.sentrysentry-policy-db
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
new file mode 100644
index 000000000..883836809
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryFilterDDLTask.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.exec;
+
+import static org.apache.hadoop.util.StringUtils.stringifyException;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.DriverContext;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
+import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
+import org.apache.sentry.core.common.Subject;
+
+import com.google.common.base.Preconditions;
+
+public class SentryFilterDDLTask extends DDLTask {
+ private static final long serialVersionUID = 1L;
+ private static final Log LOG = LogFactory.getLog(SentryFilterDDLTask.class);
+
+ private HiveAuthzBinding hiveAuthzBinding;
+ private Subject subject;
+ private HiveOperation stmtOperation;
+
+ public SentryFilterDDLTask(HiveAuthzBinding hiveAuthzBinding, Subject subject,
+ HiveOperation stmtOperation) {
+ Preconditions.checkNotNull(hiveAuthzBinding);
+ Preconditions.checkNotNull(subject);
+ Preconditions.checkNotNull(stmtOperation);
+
+ this.hiveAuthzBinding = hiveAuthzBinding;
+ this.subject = subject;
+ this.stmtOperation = stmtOperation;
+ }
+
+ public HiveAuthzBinding getHiveAuthzBinding() {
+ return hiveAuthzBinding;
+ }
+
+ public Subject getSubject() {
+ return subject;
+ }
+
+ public HiveOperation getStmtOperation() {
+ return stmtOperation;
+ }
+
+ @Override
+ public int execute(DriverContext driverContext) {
+ // Currently the SentryFilterDDLTask only supports filter the "show columns in table " command.
+ ShowColumnsDesc showCols = work.getShowColumnsDesc();
+ try {
+ if (showCols != null) {
+ return showFilterColumns(showCols);
+ }
+ } catch (Throwable e) {
+ failed(e);
+ return 1;
+ }
+
+ return super.execute(driverContext);
+ }
+
+ private void failed(Throwable e) {
+ while (e.getCause() != null && e.getClass() == RuntimeException.class) {
+ e = e.getCause();
+ }
+ setException(e);
+ LOG.error(stringifyException(e));
+ }
+
+ /**
+ * Filter the command "show columns in table"
+ *
+ */
+ private int showFilterColumns(ShowColumnsDesc showCols) throws HiveException {
+ Table table = Hive.get(conf).getTable(showCols.getTableName());
+
+ // write the results in the file
+ DataOutputStream outStream = null;
+ try {
+ Path resFile = new Path(showCols.getResFile());
+ FileSystem fs = resFile.getFileSystem(conf);
+ outStream = fs.create(resFile);
+
+ List cols = table.getCols();
+ cols.addAll(table.getPartCols());
+ // In case the query is served by HiveServer2, don't pad it with spaces,
+ // as HiveServer2 output is consumed by JDBC/ODBC clients.
+ boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
+ outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(
+ fiterColumns(cols, table), false, isOutputPadded, null));
+ outStream.close();
+ outStream = null;
+ } catch (IOException e) {
+ throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
+ } finally {
+ IOUtils.closeStream(outStream);
+ }
+ return 0;
+ }
+
+ private List fiterColumns(List cols, Table table) throws HiveException {
+ // filter some columns that the subject has privilege on
+ return HiveAuthzBindingHook.filterShowColumns(getHiveAuthzBinding(),
+ cols, getStmtOperation(), getSubject().getName(), table.getTableName(), table.getDbName());
+ }
+}
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java
index 2a60a232c..31eb5e8ad 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryGrantRevokeTask.java
@@ -130,23 +130,23 @@ public int execute(DriverContext driverContext) {
"Config " + AuthzConfVars.AUTHZ_SERVER_NAME.getVar() + " is required");
try {
if (work.getRoleDDLDesc() != null) {
- return processRoleDDL(conf, console, sentryClient, subject.getName(),
+ return processRoleDDL(console, sentryClient, subject.getName(),
hiveAuthzBinding, work.getRoleDDLDesc());
}
if (work.getGrantDesc() != null) {
- return processGrantDDL(conf, console, sentryClient,
+ return processGrantDDL(console, sentryClient,
subject.getName(), server, work.getGrantDesc());
}
if (work.getRevokeDesc() != null) {
- return processRevokeDDL(conf, console, sentryClient,
+ return processRevokeDDL(console, sentryClient,
subject.getName(), server, work.getRevokeDesc());
}
if (work.getShowGrantDesc() != null) {
- return processShowGrantDDL(conf, console, sentryClient, subject.getName(), server,
+ return processShowGrantDDL(console, sentryClient, subject.getName(),
work.getShowGrantDesc());
}
if (work.getGrantRevokeRoleDDL() != null) {
- return processGrantRevokeRoleDDL(conf, console, sentryClient,
+ return processGrantRevokeRoleDDL(console, sentryClient,
subject.getName(), work.getGrantRevokeRoleDDL());
}
throw new AssertionError(
@@ -165,7 +165,10 @@ public int execute(DriverContext driverContext) {
}
} catch(SentryUserException e) {
setException(new Exception(e.getClass().getSimpleName() + ": " + e.getReason(), e));
- String msg = "Error processing Sentry command: " + e.getMessage();
+ String msg = "Error processing Sentry command: " + e.getReason() + ".";
+ if (e instanceof SentryAccessDeniedException) {
+ msg += "Please grant admin privilege to " + subject.getName() + ".";
+ }
LOG.error(msg, e);
console.printError(msg);
return RETURN_CODE_FAILURE;
@@ -214,7 +217,7 @@ public void setOperation(HiveOperation stmtOperation) {
this.stmtOperation = stmtOperation;
}
- private int processRoleDDL(HiveConf conf, LogHelper console,
+ private int processRoleDDL(LogHelper console,
SentryPolicyServiceClient sentryClient, String subject,
HiveAuthzBinding hiveAuthzBinding, RoleDDLDesc desc)
throws SentryUserException {
@@ -277,7 +280,7 @@ private int processRoleDDL(HiveConf conf, LogHelper console,
}
}
- private int processGrantDDL(HiveConf conf, LogHelper console,
+ private int processGrantDDL(LogHelper console,
SentryPolicyServiceClient sentryClient, String subject,
String server, GrantDesc desc) throws SentryUserException {
return processGrantRevokeDDL(console, sentryClient, subject,
@@ -286,7 +289,7 @@ private int processGrantDDL(HiveConf conf, LogHelper console,
}
// For grant option, we use null to stand for revoke the privilege ignore the grant option
- private int processRevokeDDL(HiveConf conf, LogHelper console,
+ private int processRevokeDDL(LogHelper console,
SentryPolicyServiceClient sentryClient, String subject,
String server, RevokeDesc desc) throws SentryUserException {
return processGrantRevokeDDL(console, sentryClient, subject,
@@ -294,8 +297,8 @@ private int processRevokeDDL(HiveConf conf, LogHelper console,
desc.getPrivilegeSubjectDesc(), null);
}
- private int processShowGrantDDL(HiveConf conf, LogHelper console, SentryPolicyServiceClient sentryClient,
- String subject, String server, ShowGrantDesc desc) throws SentryUserException{
+ private int processShowGrantDDL(LogHelper console, SentryPolicyServiceClient sentryClient,
+ String subject, ShowGrantDesc desc) throws SentryUserException{
PrincipalDesc principalDesc = desc.getPrincipalDesc();
PrivilegeObjectDesc hiveObjectDesc = desc.getHiveObj();
String principalName = principalDesc.getName();
@@ -384,17 +387,18 @@ private void writeToFile(String data, String file) throws IOException {
FSDataOutputStream out = fs.create(resFile);
try {
if (data != null && !data.isEmpty()) {
- OutputStreamWriter writer = new OutputStreamWriter(out, "UTF-8");
- writer.write(data);
- writer.write((char) terminator);
- writer.flush();
+ try (OutputStreamWriter writer = new OutputStreamWriter(out, "UTF-8")) {
+ writer.write(data);
+ writer.write((char) terminator);
+ writer.flush();
+ }
}
} finally {
closeQuiet(out);
}
}
- private int processGrantRevokeRoleDDL(HiveConf conf, LogHelper console,
+ private int processGrantRevokeRoleDDL(LogHelper console,
SentryPolicyServiceClient sentryClient, String subject,
GrantRevokeRoleDDL desc) throws SentryUserException {
try {
@@ -590,7 +594,8 @@ private static int processGrantRevokeDDL(LogHelper console,
}
} else {
if (serverName != null) {
- sentryClient.revokeServerPrivilege(subject, princ.getName(), serverName, grantOption);
+ sentryClient.revokeServerPrivilege(subject, princ.getName(), serverName,
+ toSentryAction(privDesc.getPrivilege().getPriv()), grantOption);
} else if (uriPath != null) {
sentryClient.revokeURIPrivilege(subject, princ.getName(), server, uriPath, grantOption);
} else if (tableName == null) {
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
index 18cdde228..4fa4221b4 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/hadoop/hive/ql/exec/SentryHivePrivilegeObjectDesc.java
@@ -17,9 +17,6 @@
package org.apache.hadoop.hive.ql.exec;
-import java.util.ArrayList;
-import java.util.List;
-
import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
public class SentryHivePrivilegeObjectDesc extends PrivilegeObjectDesc {
@@ -47,4 +44,8 @@ public void setServer(boolean isServer) {
this.isServer = isServer;
}
+ public boolean isSentryPrivObjectDesc() {
+ return isServer || isUri;
+ }
+
}
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
index 48afa0875..c425e0687 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingHook.java
@@ -27,17 +27,27 @@
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
+import java.util.Arrays;
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.exec.DDLTask;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.SentryFilterDDLTask;
import org.apache.hadoop.hive.ql.exec.SentryGrantRevokeTask;
import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.hooks.Entity;
import org.apache.hadoop.hive.ql.hooks.Entity.Type;
import org.apache.hadoop.hive.ql.hooks.Hook;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.AbstractSemanticAnalyzerHook;
@@ -45,6 +55,7 @@
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.DDLWork;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.sentry.binding.hive.authz.HiveAuthzBinding;
@@ -62,6 +73,9 @@
import org.apache.sentry.core.model.db.DBModelAuthorizable.AuthorizableType;
import org.apache.sentry.core.model.db.Database;
import org.apache.sentry.core.model.db.Table;
+import org.apache.sentry.provider.cache.PrivilegeCache;
+import org.apache.sentry.provider.cache.SimplePrivilegeCache;
+import org.apache.sentry.provider.common.AuthorizationProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -77,9 +91,19 @@ public class HiveAuthzBindingHook extends AbstractSemanticAnalyzerHook {
private Database currDB = Database.ALL;
private Table currTab;
private AccessURI udfURI;
+ private AccessURI serdeURI;
private AccessURI partitionURI;
private Table currOutTab = null;
private Database currOutDB = null;
+ private final List serdeWhiteList;
+ private boolean serdeURIPrivilegesEnabled;
+
+ // True if this is a basic DESCRIBE
operation. False for other DESCRIBE variants
+ // like DESCRIBE [FORMATTED|EXTENDED]. Required because Hive treats these stmts as the same
+ // HiveOperationType, but we want to enforces different privileges on each statement.
+ // Basic DESCRIBE
is allowed with only column-level privs, while the variants
+ // require table-level privileges.
+ public boolean isDescTableBasic = false;
public HiveAuthzBindingHook() throws Exception {
SessionState session = SessionState.get();
@@ -95,6 +119,14 @@ public HiveAuthzBindingHook() throws Exception {
}
authzConf = loadAuthzConf(hiveConf);
hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf);
+
+ String serdeWhiteLists = authzConf.get(HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST,
+ HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST_DEFAULT);
+ serdeWhiteList = Arrays.asList(serdeWhiteLists.split(","));
+ serdeURIPrivilegesEnabled = authzConf.getBoolean(HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED,
+ HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT);
+
+ FunctionRegistry.setupPermissionsForBuiltinUDFs("", HiveAuthzConf.HIVE_UDF_BLACK_LIST);
}
public static HiveAuthzConf loadAuthzConf(HiveConf hiveConf) {
@@ -145,6 +177,16 @@ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
currDB = new Database(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText()));
break;
case HiveParser.TOK_CREATETABLE:
+
+ for (Node childNode : ast.getChildren()) {
+ ASTNode childASTNode = (ASTNode) childNode;
+ if ("TOK_TABLESERIALIZER".equals(childASTNode.getText())) {
+ ASTNode serdeNode = (ASTNode)childASTNode.getChild(0);
+ String serdeClassName = BaseSemanticAnalyzer.unescapeSQLString(serdeNode.getChild(0).getText());
+ setSerdeURI(serdeClassName);
+ }
+ }
+
case HiveParser.TOK_CREATEVIEW:
/*
* Compiler doesn't create read/write entities for create table.
@@ -218,7 +260,9 @@ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
case HiveParser.TOK_CREATEFUNCTION:
String udfClassName = BaseSemanticAnalyzer.unescapeSQLString(ast.getChild(1).getText());
try {
- CodeSource udfSrc = Class.forName(udfClassName).getProtectionDomain().getCodeSource();
+ CodeSource udfSrc =
+ Class.forName(udfClassName, true, Utilities.getSessionSpecifiedClassLoader())
+ .getProtectionDomain().getCodeSource();
if (udfSrc == null) {
throw new SemanticException("Could not resolve the jar for UDF class " + udfClassName);
}
@@ -229,7 +273,7 @@ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
}
udfURI = parseURI(udfSrc.getLocation().toString(), true);
} catch (ClassNotFoundException e) {
- throw new SemanticException("Error retrieving udf class", e);
+ throw new SemanticException("Error retrieving udf class:" + e.getMessage(), e);
}
// create/drop function is allowed with any database
currDB = Database.ALL;
@@ -243,7 +287,37 @@ public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
String dbName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(1).getChild(0).getChild(0).getText());
currDB = new Database(dbName);
break;
- default:
+ case HiveParser.TOK_DESCTABLE:
+ currDB = getCanonicalDb();
+ // For DESCRIBE FORMATTED/EXTENDED ast will have an additional child node with value
+ // "FORMATTED/EXTENDED".
+ isDescTableBasic = (ast.getChildCount() == 1);
+ break;
+ case HiveParser.TOK_TRUNCATETABLE:
+ // SENTRY-826:
+ // Truncate empty partitioned table should throw SemanticException only if the
+ // user does not have permission.
+ // In postAnalyze, currOutDB and currOutTbl will be added into outputHierarchy
+ // which will be validated in the hiveAuthzBinding.authorize method.
+ Preconditions.checkArgument(ast.getChildCount() == 1);
+ // childcount is 1 for table without partition, 2 for table with partitions
+ Preconditions.checkArgument(ast.getChild(0).getChildCount() >= 1);
+ Preconditions.checkArgument(ast.getChild(0).getChild(0).getChildCount() == 1);
+ currOutDB = extractDatabase((ASTNode) ast.getChild(0));
+ currOutTab = extractTable((ASTNode) ast.getChild(0).getChild(0).getChild(0));
+ break;
+ case HiveParser.TOK_ALTERTABLE:
+
+ for (Node childNode : ast.getChildren()) {
+ ASTNode childASTNode = (ASTNode) childNode;
+ if ("TOK_ALTERTABLE_SERIALIZER".equals(childASTNode.getText())) {
+ ASTNode serdeNode = (ASTNode)childASTNode.getChild(0);
+ String serdeClassName = BaseSemanticAnalyzer.unescapeSQLString(serdeNode.getText());
+ setSerdeURI(serdeClassName);
+ }
+ }
+
+ default:
currDB = getCanonicalDb();
break;
}
@@ -258,7 +332,7 @@ private Database getCanonicalDb() {
private Database extractDatabase(ASTNode ast) throws SemanticException {
String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
if (tableName.contains(".")) {
- return new Database((tableName.split("\\."))[0]);
+ return new Database(tableName.split("\\.")[0]);
} else {
return getCanonicalDb();
}
@@ -266,7 +340,7 @@ private Database extractDatabase(ASTNode ast) throws SemanticException {
private Table extractTable(ASTNode ast) throws SemanticException {
String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
if (tableName.contains(".")) {
- return new Table((tableName.split("\\."))[1]);
+ return new Table(tableName.split("\\.")[1]);
} else {
return new Table(tableName);
}
@@ -296,6 +370,11 @@ protected static AccessURI parseURI(String uri, boolean isLocal)
try {
HiveConf conf = SessionState.get().getConf();
String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
+ Path warehousePath = new Path(warehouseDir);
+ if (warehousePath.isAbsoluteAndSchemeAuthorityNull()) {
+ FileSystem fs = FileSystem.get(conf);
+ warehouseDir = fs.makeQualified(warehousePath).toUri().toString();
+ }
return new AccessURI(PathUtils.parseURI(warehouseDir, uri, isLocal));
} catch (Exception e) {
throw new SemanticException("Error parsing URI " + uri + ": " +
@@ -335,17 +414,40 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context,
// We don't handle authorizing this statement
return;
}
+
+ /**
+ * Replace DDLTask using the SentryFilterDDLTask for protection,
+ * such as "show column" only allow show some column that user can access to.
+ * SENTRY-847
+ */
+ for (int i = 0; i < rootTasks.size(); i++) {
+ Task extends Serializable> task = rootTasks.get(i);
+ if (task instanceof DDLTask) {
+ SentryFilterDDLTask filterTask =
+ new SentryFilterDDLTask(hiveAuthzBinding, subject, stmtOperation);
+ filterTask.setWork((DDLWork)task.getWork());
+ rootTasks.set(i, filterTask);
+ }
+ }
+
authorizeWithHiveBindings(context, stmtAuthObject, stmtOperation);
} catch (AuthorizationException e) {
executeOnFailureHooks(context, stmtOperation, e);
- String permsRequired = "";
+ StringBuilder permsBuilder = new StringBuilder();
for (String perm : hiveAuthzBinding.getLastQueryPrivilegeErrors()) {
- permsRequired += perm + ";";
+ permsBuilder.append(perm);
+ permsBuilder.append(";");
}
+ String permsRequired = permsBuilder.toString();
SessionState.get().getConf().set(HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS, permsRequired);
- String msg = HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE + "\n Required privileges for this query: "
+ String msgForLog = HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE
+ + "\n Required privileges for this query: "
+ permsRequired;
- throw new SemanticException(msg, e);
+ String msgForConsole = HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE + "\n "
+ + e.getMessage()+ "\n The required privileges: " + permsRequired;
+ // AuthorizationException is not a real exception, use the info level to record this.
+ LOG.info(msgForLog);
+ throw new SemanticException(msgForConsole, e);
} finally {
hiveAuthzBinding.close();
}
@@ -409,6 +511,14 @@ private void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext context,
LOG.debug("context.getOutputs() = " + context.getOutputs());
}
+ // Workaround to allow DESCRIBE
to be executed with only column-level privileges, while
+ // still authorizing DESCRIBE [EXTENDED|FORMATTED] as table-level.
+ // This is done by treating DESCRIBE
the same as SHOW COLUMNS, which only requires column
+ // level privs.
+ if (isDescTableBasic) {
+ stmtAuthObject = HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS);
+ }
+
switch (stmtAuthObject.getOperationScope()) {
case SERVER :
@@ -426,6 +536,13 @@ private void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext context,
outputHierarchy.add(dbHierarchy);
getInputHierarchyFromInputs(inputHierarchy, inputs);
+
+ if (serdeURI != null) {
+ List serdeUriHierarchy = new ArrayList();
+ serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer());
+ serdeUriHierarchy.add(serdeURI);
+ outputHierarchy.add(serdeUriHierarchy);
+ }
break;
case TABLE:
// workaround for add partitions
@@ -453,6 +570,8 @@ private void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext context,
inputHierarchy.add(externalAuthorizableHierarchy);
}
+
+
// workaround for DDL statements
// Capture the table name in pre-analyze and include that in the output entity list
if (currOutTab != null) {
@@ -462,6 +581,14 @@ private void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext context,
externalAuthorizableHierarchy.add(currOutTab);
outputHierarchy.add(externalAuthorizableHierarchy);
}
+
+ if (serdeURI != null) {
+ List serdeUriHierarchy = new ArrayList();
+ serdeUriHierarchy.add(hiveAuthzBinding.getAuthServer());
+ serdeUriHierarchy.add(serdeURI);
+ outputHierarchy.add(serdeUriHierarchy);
+ }
+
break;
case FUNCTION:
/* The 'FUNCTION' privilege scope currently used for
@@ -492,9 +619,9 @@ private void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext context,
// by default allow connect access to default db
Table currTbl = Table.ALL;
Column currCol = Column.ALL;
- if ((DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDB.getName()) &&
+ if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(currDB.getName()) &&
"false".equalsIgnoreCase(authzConf.
- get(HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false")))) {
+ get(HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(), "false"))) {
currDB = Database.ALL;
currTbl = Table.SOME;
}
@@ -506,32 +633,34 @@ private void authorizeWithHiveBindings(HiveSemanticAnalyzerHookContext context,
inputHierarchy.add(connectHierarchy);
outputHierarchy.add(connectHierarchy);
break;
-
+ case COLUMN:
+ for (ReadEntity readEntity: inputs) {
+ if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) {
+ addColumnHierarchy(inputHierarchy, readEntity);
+ } else {
+ List entityHierarchy = new ArrayList();
+ entityHierarchy.add(hiveAuthzBinding.getAuthServer());
+ entityHierarchy.addAll(getAuthzHierarchyFromEntity(readEntity));
+ entityHierarchy.add(Column.ALL);
+ inputHierarchy.add(entityHierarchy);
+ }
+ }
+ break;
default:
throw new AuthorizationException("Unknown operation scope type " +
stmtAuthObject.getOperationScope().toString());
}
- // validate permission
- hiveAuthzBinding.authorize(stmtOperation, stmtAuthObject, getCurrentSubject(context),
- inputHierarchy, outputHierarchy);
- }
-
- private boolean isUDF(ReadEntity readEntity) {
- return readEntity.getType().equals(Type.FUNCTION);
- }
-
- private void checkUDFWhiteList(String queryUDF) throws AuthorizationException {
- String whiteList = authzConf.get(HiveAuthzConf.AuthzConfVars.AUTHZ_UDF_WHITELIST.getVar());
- if (whiteList == null) {
- return;
- }
- for (String hiveUDF : Splitter.on(",").omitEmptyStrings().trimResults().split(whiteList)) {
- if (queryUDF.equalsIgnoreCase(hiveUDF)) {
- return; // found the given UDF in whitelist
- }
+ HiveAuthzBinding binding = null;
+ try {
+ binding = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, context.getUserName());
+ } catch (SemanticException e) {
+ // Will use the original hiveAuthzBinding
+ binding = hiveAuthzBinding;
}
- throw new AuthorizationException("The UDF " + queryUDF + " is not found in the list of allowed UDFs");
+ // validate permission
+ binding.authorize(stmtOperation, stmtAuthObject, getCurrentSubject(context), inputHierarchy,
+ outputHierarchy);
}
private HiveOperation getCurrentHiveStmtOp() {
@@ -557,6 +686,7 @@ private List getAuthzHierarchyFromEntity(Entity entity) {
objectHierarchy.add(new Table(entity.getTable().getTableName()));
break;
case PARTITION:
+ case DUMMYPARTITION:
objectHierarchy.add(new Database(entity.getPartition().getTable().getDbName()));
objectHierarchy.add(new Table(entity.getPartition().getTable().getTableName()));
break;
@@ -618,8 +748,9 @@ private void addColumnHierarchy(List> inputHierarchy,
private void getInputHierarchyFromInputs(List> inputHierarchy,
Set inputs) {
for (ReadEntity readEntity: inputs) {
- // skip the tables/view that are part of expanded view definition.
- if (isChildTabForView(readEntity)) {
+ // skip the tables/view that are part of expanded view definition
+ // skip the Hive generated dummy entities created for queries like 'select '
+ if (isChildTabForView(readEntity) || isDummyEntity(readEntity)) {
continue;
}
if (readEntity.getAccessedColumns() != null && !readEntity.getAccessedColumns().isEmpty()) {
@@ -678,6 +809,8 @@ public static List filterShowTables(
setOperationType(HiveOperationType.INFO).
build();
+ HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
+
for (String tableName : queryResult) {
// if user has privileges on table, add to filtered list, else discard
Table table = new Table(tableName);
@@ -694,14 +827,51 @@ public static List filterShowTables(
inputHierarchy.add(externalAuthorizableHierarchy);
try {
- hiveAuthzBinding.authorize(operation, tableMetaDataPrivilege, subject,
+ // do the authorization by new HiveAuthzBinding with PrivilegeCache
+ hiveBindingWithPrivilegeCache.authorize(operation, tableMetaDataPrivilege, subject,
inputHierarchy, outputHierarchy);
filteredResult.add(table.getName());
} catch (AuthorizationException e) {
// squash the exception, user doesn't have privileges, so the table is
// not added to
// filtered list.
- ;
+ }
+ }
+ return filteredResult;
+ }
+
+ public static List filterShowColumns(
+ HiveAuthzBinding hiveAuthzBinding, List cols,
+ HiveOperation operation, String userName, String tableName, String dbName)
+ throws SemanticException {
+ List filteredResult = new ArrayList();
+ Subject subject = new Subject(userName);
+ HiveAuthzPrivileges columnMetaDataPrivilege =
+ HiveAuthzPrivilegesMap.getHiveAuthzPrivileges(HiveOperation.SHOWCOLUMNS);
+ HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
+
+ Database database = new Database(dbName);
+ Table table = new Table(tableName);
+ for (FieldSchema col : cols) {
+ // if user has privileges on column, add to filtered list, else discard
+ List> inputHierarchy = new ArrayList>();
+ List> outputHierarchy = new ArrayList>();
+ List externalAuthorizableHierarchy = new ArrayList();
+ externalAuthorizableHierarchy.add(hiveAuthzBinding.getAuthServer());
+ externalAuthorizableHierarchy.add(database);
+ externalAuthorizableHierarchy.add(table);
+ externalAuthorizableHierarchy.add(new Column(col.getName()));
+ inputHierarchy.add(externalAuthorizableHierarchy);
+
+ try {
+ // do the authorization by new HiveAuthzBinding with PrivilegeCache
+ hiveBindingWithPrivilegeCache.authorize(operation, columnMetaDataPrivilege, subject,
+ inputHierarchy, outputHierarchy);
+ filteredResult.add(col);
+ } catch (AuthorizationException e) {
+ // squash the exception, user doesn't have privileges, so the column is
+ // not added to
+ // filtered list.
}
}
return filteredResult;
@@ -712,6 +882,8 @@ public static List filterShowDatabases(
HiveOperation operation, String userName) throws SemanticException {
List filteredResult = new ArrayList();
Subject subject = new Subject(userName);
+ HiveAuthzBinding hiveBindingWithPrivilegeCache = getHiveBindingWithPrivilegeCache(hiveAuthzBinding, userName);
+
HiveAuthzPrivileges anyPrivilege = new HiveAuthzPrivileges.AuthzPrivilegeBuilder().
addInputObjectPriviledge(AuthorizableType.Column, EnumSet.of(DBModelAction.SELECT, DBModelAction.INSERT)).
addInputObjectPriviledge(AuthorizableType.URI, EnumSet.of(DBModelAction.SELECT)).
@@ -724,9 +896,8 @@ public static List filterShowDatabases(
Database database = null;
// if default is not restricted, continue
- if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(dbName) &&
- "false".equalsIgnoreCase(
-hiveAuthzBinding.getAuthzConf().get(
+ if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(dbName) && "false".equalsIgnoreCase(
+ hiveAuthzBinding.getAuthzConf().get(
HiveAuthzConf.AuthzConfVars.AUTHZ_RESTRICT_DEFAULT_DB.getVar(),
"false"))) {
filteredResult.add(DEFAULT_DATABASE_NAME);
@@ -745,14 +916,14 @@ public static List filterShowDatabases(
inputHierarchy.add(externalAuthorizableHierarchy);
try {
- hiveAuthzBinding.authorize(operation, anyPrivilege, subject,
+ // do the authorization by new HiveAuthzBinding with PrivilegeCache
+ hiveBindingWithPrivilegeCache.authorize(operation, anyPrivilege, subject,
inputHierarchy, outputHierarchy);
filteredResult.add(database.getName());
} catch (AuthorizationException e) {
// squash the exception, user doesn't have privileges, so the table is
// not added to
// filtered list.
- ;
}
}
@@ -772,7 +943,7 @@ private boolean isChildTabForView(ReadEntity readEntity) {
if (!readEntity.getType().equals(Type.TABLE) && !readEntity.getType().equals(Type.PARTITION)) {
return false;
}
- if ((readEntity.getParents() != null) && (readEntity.getParents().size() > 0)) {
+ if (readEntity.getParents() != null && readEntity.getParents().size() > 0) {
for (ReadEntity parentEntity : readEntity.getParents()) {
if (!parentEntity.getType().equals(Type.TABLE)) {
return false;
@@ -784,32 +955,16 @@ private boolean isChildTabForView(ReadEntity readEntity) {
}
}
- /**
- * Returns a set of hooks specified in a configuration variable.
- *
- * See getHooks(HiveAuthzConf.AuthzConfVars hookConfVar, Class clazz)
- * @param hookConfVar
- * @return
- * @throws Exception
- */
- private static List getHooks(String csHooks) throws Exception {
- return getHooks(csHooks, Hook.class);
- }
-
/**
* Returns the hooks specified in a configuration variable. The hooks are returned in a list in
* the order they were specified in the configuration variable.
*
* @param hookConfVar The configuration variable specifying a comma separated list of the hook
* class names.
- * @param clazz The super type of the hooks.
- * @return A list of the hooks cast as the type specified in clazz, in the order
- * they are listed in the value of hookConfVar
+ * @return A list of the hooks, in the order they are listed in the value of hookConfVar
* @throws Exception
*/
- private static List getHooks(String csHooks,
- Class clazz)
- throws Exception {
+ private static List getHooks(String csHooks) throws Exception {
List hooks = new ArrayList();
if (csHooks.isEmpty()) {
@@ -829,4 +984,73 @@ private static List getHooks(String csHooks,
return hooks;
}
+
+ // Check if the given entity is identified as dummy by Hive compilers.
+ private boolean isDummyEntity(Entity entity) {
+ return entity.isDummy();
+ }
+
+ // create hiveBinding with PrivilegeCache
+ private static HiveAuthzBinding getHiveBindingWithPrivilegeCache(HiveAuthzBinding hiveAuthzBinding,
+ String userName) throws SemanticException {
+ // get the original HiveAuthzBinding, and get the user's privileges by AuthorizationProvider
+ AuthorizationProvider authProvider = hiveAuthzBinding.getCurrentAuthProvider();
+ Set userPrivileges = authProvider.getPolicyEngine().getPrivileges(
+ authProvider.getGroupMapping().getGroups(userName), hiveAuthzBinding.getActiveRoleSet(),
+ hiveAuthzBinding.getAuthServer());
+
+ // create PrivilegeCache using user's privileges
+ PrivilegeCache privilegeCache = new SimplePrivilegeCache(userPrivileges);
+ try {
+ // create new instance of HiveAuthzBinding whose backend provider should be SimpleCacheProviderBackend
+ return new HiveAuthzBinding(HiveAuthzBinding.HiveHook.HiveServer2, hiveAuthzBinding.getHiveConf(),
+ hiveAuthzBinding.getAuthzConf(), privilegeCache);
+ } catch (Exception e) {
+ LOG.error("Can not create HiveAuthzBinding with privilege cache.");
+ throw new SemanticException(e);
+ }
+ }
+
+ private static boolean hasPrefixMatch(List prefixList, final String str) {
+ for (String prefix : prefixList) {
+ if (str.startsWith(prefix)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * Set the Serde URI privileges. If the URI privileges are not set, which serdeURI will be null,
+ * the URI authorization checks will be skipped.
+ */
+ private void setSerdeURI(String serdeClassName) throws SemanticException {
+ if (!serdeURIPrivilegesEnabled) {
+ return;
+ }
+
+ // WhiteList Serde Jar can be used by any users. WhiteList checking is
+ // done by comparing the Java package name. The assumption is cluster
+ // admin will ensure there is no Java namespace collision.
+ // e.g org.apache.hadoop.hive.serde2 is used by hive and cluster admin should
+ // ensure no custom Serde class is introduced under the same namespace.
+ if (!hasPrefixMatch(serdeWhiteList, serdeClassName)) {
+ try {
+ CodeSource serdeSrc = Class.forName(serdeClassName, true, Utilities.getSessionSpecifiedClassLoader()).getProtectionDomain().getCodeSource();
+ if (serdeSrc == null) {
+ throw new SemanticException("Could not resolve the jar for Serde class " + serdeClassName);
+ }
+
+ String serdeJar = serdeSrc.getLocation().getPath();
+ if (serdeJar == null || serdeJar.isEmpty()) {
+ throw new SemanticException("Could not find the jar for Serde class " + serdeClassName + "to validate privileges");
+ }
+
+ serdeURI = parseURI(serdeSrc.getLocation().toString(), true);
+ } catch (ClassNotFoundException e) {
+ throw new SemanticException("Error retrieving Serde class:" + e.getMessage(), e);
+ }
+ }
+ }
}
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
index 0fa4a87fe..17b900341 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/HiveAuthzBindingSessionHook.java
@@ -57,6 +57,7 @@ public class HiveAuthzBindingSessionHook
ConfVars.HIVE_SECURITY_COMMAND_WHITELIST.varname,
ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY.varname,
ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY.varname,
+ ConfVars.HIVERELOADABLEJARS.varname,
HiveAuthzConf.HIVE_ACCESS_CONF_URL,
HiveAuthzConf.HIVE_SENTRY_CONF_URL,
HiveAuthzConf.HIVE_ACCESS_SUBJECT_NAME,
@@ -84,7 +85,6 @@ public SentryHiveAuthorizerImpl(HiveAccessController accessController,
@Override
public void applyAuthorizationConfigPolicy(HiveConf conf) {
- return;
}
}
@@ -95,7 +95,7 @@ public void applyAuthorizationConfigPolicy(HiveConf conf) {
* 2. Set additional config properties required for auth
* set HIVE_EXTENDED_ENITITY_CAPTURE = true
* set SCRATCHDIRPERMISSION = 700
- * 3. Add sensetive config parameters to the config restrict list so that they can't be overridden by users
+ * 3. Add sensitive config parameters to the config restrict list so that they can't be overridden by users
*/
@Override
public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLException {
@@ -104,7 +104,11 @@ public void run(HiveSessionHookContext sessionHookContext) throws HiveSQLExcepti
appendConfVar(sessionConf, ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
SEMANTIC_HOOK);
- sessionConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, "set");
+ HiveAuthzConf authzConf = HiveAuthzBindingHook.loadAuthzConf(sessionConf);
+ String commandWhitelist =
+ authzConf.get(HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST,
+ HiveAuthzConf.HIVE_SENTRY_SECURITY_COMMAND_WHITELIST_DEFAULT);
+ sessionConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, commandWhitelist);
sessionConf.setVar(ConfVars.SCRATCHDIRPERMISSION, SCRATCH_DIR_PERMISSIONS);
sessionConf.setBoolVar(ConfVars.HIVE_CAPTURE_TRANSFORM_ENTITY, true);
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java
index 5898b7e27..caf32cfa3 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java
@@ -34,7 +34,6 @@
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactory;
@@ -61,7 +60,7 @@ public class SentryHiveAuthorizationTaskFactoryImpl implements HiveAuthorization
private static final Logger LOG = LoggerFactory.getLogger(SentryHiveAuthorizationTaskFactoryImpl.class);
- public SentryHiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) {
+ public SentryHiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) { //NOPMD
}
@@ -162,7 +161,7 @@ public Task extends Serializable> createRevokeTask(ASTNode ast, HashSet createShowGrantTask(ASTNode ast, Path result
PrincipalDesc principalDesc = new PrincipalDesc(principalName, type);
// Partition privileges are not supported by Sentry
- List cols = null;
if (ast.getChildCount() > 1) {
ASTNode child = (ASTNode) ast.getChild(1);
if (child.getToken().getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
privHiveObj = analyzePrivilegeObject(child);
- cols = privHiveObj.getColumns();
- }else {
+ } else {
throw new SemanticException("Unrecognized Token: " + child.getToken().getType());
}
}
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
new file mode 100644
index 000000000..45747dfdd
--- /dev/null
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryIniPolicyFileFormatter.java
@@ -0,0 +1,161 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.binding.hive;
+
+import java.io.File;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.sentry.policy.common.PolicyConstants;
+import org.apache.sentry.provider.common.PolicyFileConstants;
+import org.apache.sentry.provider.common.ProviderBackendContext;
+import org.apache.sentry.provider.file.SimpleFileProviderBackend;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Charsets;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+import com.google.common.collect.Table;
+import com.google.common.io.Files;
+
+/**
+ * SentryIniPolicyFileFormatter is to parse file and write data to file for sentry mapping data with
+ * ini format, eg:
+ * [groups]
+ * group1=role1
+ * [roles]
+ * role1=server=server1
+ */
+public class SentryIniPolicyFileFormatter implements SentryPolicyFileFormatter {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(SentryIniPolicyFileFormatter.class);
+
+ private static final String NL = System.getProperty("line.separator", "\n");
+
+ /**
+ * Write the sentry mapping data to ini file.
+ *
+ * @param resourcePath
+ * The path of the output file
+ * @param sentryMappingData
+ * The map for sentry mapping data, eg:
+ * for the following mapping data:
+ * group1=role1,role2
+ * group2=role2,role3
+ * role1=server=server1->db=db1
+ * role2=server=server1->db=db1->table=tbl1,server=server1->db=db1->table=tbl2
+ * role3=server=server1->url=hdfs://localhost/path
+ *
+ * The sentryMappingData will be inputed as:
+ * {
+ * groups={[group1={role1, role2}], group2=[role2, role3]},
+ * roles={role1=[server=server1->db=db1],
+ * role2=[server=server1->db=db1->table=tbl1,server=server1->db=db1->table=tbl2],
+ * role3=[server=server1->url=hdfs://localhost/path]
+ * }
+ * }
+ */
+ @Override
+ public void write(String resourcePath, Map>> sentryMappingData)
+ throws Exception {
+ File destFile = new File(resourcePath);
+ if (destFile.exists() && !destFile.delete()) {
+ throw new IllegalStateException("Unable to delete " + destFile);
+ }
+ String contents = Joiner
+ .on(NL)
+ .join(
+ generateSection(PolicyFileConstants.GROUPS,
+ sentryMappingData.get(PolicyFileConstants.GROUPS)),
+ generateSection(PolicyFileConstants.ROLES,
+ sentryMappingData.get(PolicyFileConstants.ROLES)),
+ "");
+ LOGGER.info("Writing policy file to " + destFile + ":\n" + contents);
+ Files.write(contents, destFile, Charsets.UTF_8);
+ }
+
+ /**
+ * parse the ini file and return a map with all data
+ *
+ * @param resourcePath
+ * The path of the input file
+ * @param conf
+ * The configuration info
+ * @return the result of sentry mapping data in map structure.
+ */
+ @Override
+ public Map>> parse(String resourcePath, Configuration conf)
+ throws Exception {
+ Map>> resultMap = Maps.newHashMap();
+ // SimpleFileProviderBackend is used for parse the ini file
+ SimpleFileProviderBackend policyFileBackend = new SimpleFileProviderBackend(conf, resourcePath);
+ ProviderBackendContext context = new ProviderBackendContext();
+ context.setAllowPerDatabase(true);
+ // parse the ini file
+ policyFileBackend.initialize(context);
+
+ // SimpleFileProviderBackend parsed the input file and output the data in Table format.
+ Table> groupRolePrivilegeTable = policyFileBackend
+ .getGroupRolePrivilegeTable();
+ Map> groupRolesMap = Maps.newHashMap();
+ Map> rolePrivilegesMap = Maps.newHashMap();
+ for (String groupName : groupRolePrivilegeTable.rowKeySet()) {
+ for (String roleName : groupRolePrivilegeTable.columnKeySet()) {
+ // get the roles set for the current groupName
+ Set tempRoles = groupRolesMap.get(groupName);
+ if (tempRoles == null) {
+ tempRoles = Sets.newHashSet();
+ }
+ Set privileges = groupRolePrivilegeTable.get(groupName, roleName);
+ // if there has privilege for [group,role], if no privilege exist, the [group, role] info
+ // will be discard.
+ if (privileges != null) {
+ // update [group, role] mapping data
+ tempRoles.add(roleName);
+ groupRolesMap.put(groupName, tempRoles);
+ // update [role, privilege] mapping data
+ rolePrivilegesMap.put(roleName, privileges);
+ }
+ }
+ }
+ resultMap.put(PolicyFileConstants.GROUPS, groupRolesMap);
+ resultMap.put(PolicyFileConstants.ROLES, rolePrivilegesMap);
+ return resultMap;
+ }
+
+ // generate the ini section according to the mapping data.
+ private String generateSection(String name, Map> mappingData) {
+ if (mappingData.isEmpty()) {
+ return "";
+ }
+ List lines = Lists.newArrayList();
+ lines.add("[" + name + "]");
+ for (Map.Entry> entry : mappingData.entrySet()) {
+ lines.add(PolicyConstants.KV_JOINER.join(entry.getKey(),
+ PolicyConstants.ROLE_JOINER.join(entry.getValue())));
+ }
+ return Joiner.on(NL).join(lines);
+ }
+
+}
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
index a38065188..c101a4fa0 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryOnFailureHookContext.java
@@ -38,61 +38,61 @@ public interface SentryOnFailureHookContext {
/**
* @return the command attempted by user
*/
- public String getCommand();
+ String getCommand();
/**
* @return the set of read entities
*/
- public Set getInputs();
+ Set getInputs();
/**
* @return the set of write entities
*/
- public Set getOutputs();
+ Set