Skip to content

Commit

Permalink
HIVE-7673 : Authorization api: missing privilege objects in create ta…
Browse files Browse the repository at this point in the history
…ble/view (Thejas Nair, reviewed by Jason Dere)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1620949 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
Thejas Nair committed Aug 27, 2014
1 parent c0cfefe commit b71adbd
Show file tree
Hide file tree
Showing 200 changed files with 1,108 additions and 186 deletions.
2 changes: 1 addition & 1 deletion contrib/src/test/results/clientnegative/serde_regex.q.out
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ STAGE PLANS:
serde properties:
input.regex ([^ ]*) ([^ ]*) ([^ ]*) (-|\[[^\]]*\]) ([^ "]*|"[^"]*") (-|[0-9]*) (-|[0-9]*)(?: ([^ "]*|"[^"]*") ([^ "]*|"[^"]*"))?
output.format.string %1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s
name: serde_regex
name: default.serde_regex

PREHOOK: query: CREATE TABLE serde_regex(
host STRING,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ STAGE PLANS:
columns: key int, value string
input format: org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat
output format: org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextOutputFormat
name: base64_test
name: default.base64_test

PREHOOK: query: CREATE TABLE base64_test(key INT, value STRING) STORED AS
INPUTFORMAT 'org.apache.hadoop.hive.contrib.fileformat.base64.Base64TextInputFormat'
Expand Down
2 changes: 1 addition & 1 deletion contrib/src/test/results/clientpositive/serde_regex.q.out
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ STAGE PLANS:
serde properties:
input.regex ([^ ]*) ([^ ]*) ([^ ]*) (-|\[[^\]]*\]) ([^ "]*|"[^"]*") (-|[0-9]*) (-|[0-9]*)(?: ([^ "]*|"[^"]*") ([^ "]*|"[^"]*"))?
output.format.string %1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s
name: serde_regex
name: default.serde_regex

PREHOOK: query: CREATE TABLE serde_regex(
host STRING,
Expand Down
3 changes: 1 addition & 2 deletions hbase-handler/src/test/results/negative/cascade_dbdrop.q.out
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string")
TBLPROPERTIES ("hbase.table.name" = "hbase_table_0")
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:hbasedb
PREHOOK: Output: hbaseDB@hbaseDB.hbase_table_0
PREHOOK: Output: hbaseDB@hbase_table_0
POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.20S)
-- Hadoop 0.23 changes the behavior FsShell on Exit Codes
-- In Hadoop 0.20
Expand All @@ -37,7 +37,6 @@ WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key,cf:string")
TBLPROPERTIES ("hbase.table.name" = "hbase_table_0")
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:hbasedb
POSTHOOK: Output: [email protected]_table_0
POSTHOOK: Output: hbaseDB@hbase_table_0
Found 3 items
#### A masked pattern was here ####
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.verify;
Expand Down Expand Up @@ -140,6 +141,47 @@ public void testInputAllColumnsUsed() throws HiveAuthzPluginException, HiveAcces
getSortedList(tableObj.getColumns()));
}

@Test
public void testCreateTableWithDb() throws HiveAuthzPluginException, HiveAccessControlException,
CommandNeedRetryException {
final String newTable = "ctTableWithDb";
checkCreateViewOrTableWithDb(newTable, "create table " + dbName + "." + newTable + "(i int)");
}

@Test
public void testCreateViewWithDb() throws HiveAuthzPluginException, HiveAccessControlException,
CommandNeedRetryException {
final String newTable = "ctViewWithDb";
checkCreateViewOrTableWithDb(newTable, "create table " + dbName + "." + newTable + "(i int)");
}

private void checkCreateViewOrTableWithDb(String newTable, String cmd)
throws HiveAuthzPluginException, HiveAccessControlException {
reset(mockedAuthorizer);
int status = driver.compile(cmd);
assertEquals(0, status);

List<HivePrivilegeObject> outputs = getHivePrivilegeObjectInputs().getRight();
assertEquals("num outputs", 2, outputs.size());
for (HivePrivilegeObject output : outputs) {
switch (output.getType()) {
case DATABASE:
assertTrue("database name", output.getDbname().equalsIgnoreCase(dbName));
break;
case TABLE_OR_VIEW:
assertTrue("database name", output.getDbname().equalsIgnoreCase(dbName));
assertEqualsIgnoreCase("table name", output.getObjectName(), newTable);
break;
default:
fail("Unexpected type : " + output.getType());
}
}
}

private void assertEqualsIgnoreCase(String msg, String expected, String actual) {
assertEquals(msg, expected.toLowerCase(), actual.toLowerCase());
}

@Test
public void testInputNoColumnsUsed() throws HiveAuthzPluginException, HiveAccessControlException,
CommandNeedRetryException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory;
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.hooks.LineageInfo;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
Expand All @@ -61,7 +62,6 @@
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.io.DateWritable;
Expand Down Expand Up @@ -317,7 +317,7 @@ public static String[] getQualifiedTableName(ASTNode tabNameNode) throws Semanti
return new String[] {dbName, tableName};
}
String tableName = unescapeIdentifier(tabNameNode.getChild(0).getText());
return new String[]{SessionState.get().getCurrentDatabase(), tableName};
return Utilities.getDbTableName(tableName);
}

public static String getDotName(String[] qname) throws SemanticException {
Expand Down Expand Up @@ -1218,6 +1218,10 @@ protected Table getTable(String[] qualified) throws SemanticException {
return getTable(qualified[0], qualified[1], true);
}

protected Table getTable(String[] qualified, boolean throwException) throws SemanticException {
return getTable(qualified[0], qualified[1], throwException);
}

protected Table getTable(String tblName) throws SemanticException {
return getTable(null, tblName, true);
}
Expand Down
48 changes: 27 additions & 21 deletions ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
Original file line number Diff line number Diff line change
Expand Up @@ -9416,7 +9416,7 @@ public void analyzeInternal(ASTNode ast) throws SemanticException {
}
viewSelect = child;
// prevent view from referencing itself
viewsExpanded.add(SessionState.get().getCurrentDatabase() + "." + createVwDesc.getViewName());
viewsExpanded.add(createVwDesc.getViewName());
}

// continue analyzing from the child ASTNode.
Expand Down Expand Up @@ -9998,7 +9998,9 @@ private Map<String, String> addDefaultProperties(Map<String, String> tblProp) {
*/
private ASTNode analyzeCreateTable(ASTNode ast, QB qb)
throws SemanticException {
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
String[] qualifiedTabName = getQualifiedTableName((ASTNode) ast.getChild(0));
String dbDotTab = getDotName(qualifiedTabName);

String likeTableName = null;
List<FieldSchema> cols = new ArrayList<FieldSchema>();
List<FieldSchema> partCols = new ArrayList<FieldSchema>();
Expand All @@ -10024,7 +10026,7 @@ private ASTNode analyzeCreateTable(ASTNode ast, QB qb)
RowFormatParams rowFormatParams = new RowFormatParams();
StorageFormat storageFormat = new StorageFormat(conf);

LOG.info("Creating table " + tableName + " position="
LOG.info("Creating table " + dbDotTab + " position="
+ ast.getCharPositionInLine());
int numCh = ast.getChildCount();

Expand Down Expand Up @@ -10155,7 +10157,7 @@ private ASTNode analyzeCreateTable(ASTNode ast, QB qb)
// check for existence of table
if (ifNotExists) {
try {
Table table = getTable(tableName, false);
Table table = getTable(qualifiedTabName, false);
if (table != null) { // table exists
return null;
}
Expand All @@ -10165,11 +10167,7 @@ private ASTNode analyzeCreateTable(ASTNode ast, QB qb)
}
}

String[] qualified = Hive.getQualifiedNames(tableName);
String dbName = qualified.length == 1 ? SessionState.get().getCurrentDatabase() : qualified[0];
Database database = getDatabase(dbName);
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
outputs.add(new WriteEntity(new Table(dbName, tableName), WriteEntity.WriteType.DDL_NO_LOCK));
addDbAndTabToOutputs(qualifiedTabName);

if (isTemporary) {
if (partCols.size() > 0) {
Expand Down Expand Up @@ -10198,7 +10196,7 @@ private ASTNode analyzeCreateTable(ASTNode ast, QB qb)
case CREATE_TABLE: // REGULAR CREATE TABLE DDL
tblProps = addDefaultProperties(tblProps);

crtTblDesc = new CreateTableDesc(tableName, isExt, isTemporary, cols, partCols,
crtTblDesc = new CreateTableDesc(dbDotTab, isExt, isTemporary, cols, partCols,
bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
rowFormatParams.fieldEscape,
rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim,
Expand Down Expand Up @@ -10227,7 +10225,7 @@ private ASTNode analyzeCreateTable(ASTNode ast, QB qb)
+ "and source table in CREATE TABLE LIKE is partitioned.");
}
}
CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(tableName, isExt, isTemporary,
CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(dbDotTab, isExt, isTemporary,
storageFormat.getInputFormat(), storageFormat.getOutputFormat(), location,
storageFormat.getSerde(), storageFormat.getSerdeProps(), tblProps, ifNotExists,
likeTableName);
Expand All @@ -10240,21 +10238,20 @@ private ASTNode analyzeCreateTable(ASTNode ast, QB qb)

// Verify that the table does not already exist
try {
Table dumpTable = db.newTable(tableName);
Table dumpTable = db.newTable(dbDotTab);
if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false)) {
throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(tableName));
throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(dbDotTab));
}
} catch (HiveException e) {
throw new SemanticException(e);
}

tblProps = addDefaultProperties(tblProps);

crtTblDesc = new CreateTableDesc(dbName, tableName, isExt, isTemporary, cols, partCols,
bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
rowFormatParams.fieldEscape,
rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim,
comment, storageFormat.getInputFormat(),
crtTblDesc = new CreateTableDesc(qualifiedTabName[0], dbDotTab, isExt, isTemporary, cols,
partCols, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim,
rowFormatParams.lineDelim, comment, storageFormat.getInputFormat(),
storageFormat.getOutputFormat(), location, storageFormat.getSerde(),
storageFormat.getStorageHandler(), storageFormat.getSerdeProps(), tblProps, ifNotExists,
skewedColNames, skewedValues);
Expand All @@ -10271,9 +10268,17 @@ private ASTNode analyzeCreateTable(ASTNode ast, QB qb)
return null;
}

private void addDbAndTabToOutputs(String[] qualifiedTabName) throws SemanticException {
Database database = getDatabase(qualifiedTabName[0]);
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
outputs.add(new WriteEntity(new Table(qualifiedTabName[0], qualifiedTabName[1]),
WriteEntity.WriteType.DDL_NO_LOCK));
}

private ASTNode analyzeCreateView(ASTNode ast, QB qb)
throws SemanticException {
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
String[] qualTabName = getQualifiedTableName((ASTNode) ast.getChild(0));
String dbDotTable = getDotName(qualTabName);
List<FieldSchema> cols = null;
boolean ifNotExists = false;
boolean orReplace = false;
Expand All @@ -10283,7 +10288,7 @@ private ASTNode analyzeCreateView(ASTNode ast, QB qb)
Map<String, String> tblProps = null;
List<String> partColNames = null;

LOG.info("Creating view " + tableName + " position="
LOG.info("Creating view " + dbDotTable + " position="
+ ast.getCharPositionInLine());
int numCh = ast.getChildCount();
for (int num = 1; num < numCh; num++) {
Expand Down Expand Up @@ -10326,13 +10331,14 @@ private ASTNode analyzeCreateView(ASTNode ast, QB qb)
}

createVwDesc = new CreateViewDesc(
tableName, cols, comment, tblProps, partColNames,
dbDotTable, cols, comment, tblProps, partColNames,
ifNotExists, orReplace, isAlterViewAs);

unparseTranslator.enable();
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
createVwDesc), conf));

addDbAndTabToOutputs(qualTabName);
return selectStmt;
}

Expand Down
17 changes: 13 additions & 4 deletions ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
Expand Down Expand Up @@ -227,11 +228,19 @@ public void compile(final ParseContext pCtx, final List<Task<? extends Serializa

crtTblDesc.validate(conf);

// Clear the output for CTAS since we don't need the output from the
// mapredWork, the
// clear the mapredWork output file from outputs for CTAS
// DDLWork at the tail of the chain will have the output
outputs.clear();

Iterator<WriteEntity> outIter = outputs.iterator();
while (outIter.hasNext()) {
switch (outIter.next().getType()) {
case DFS_DIR:
case LOCAL_DIR:
outIter.remove();
break;
default:
break;
}
}
Task<? extends Serializable> crtTblTask = TaskFactory.get(new DDLWork(
inputs, outputs, crtTblDesc), conf);

Expand Down
2 changes: 1 addition & 1 deletion ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ public static TableDesc getTableDesc(CreateTableDesc crtTblDesc, String cols,

if (crtTblDesc.getTableName() != null && crtTblDesc.getDatabaseName() != null) {
properties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,
crtTblDesc.getDatabaseName() + "." + crtTblDesc.getTableName());
crtTblDesc.getTableName());
}

if (crtTblDesc.getTblProps() != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -278,8 +278,9 @@ public HivePrivilegeObjectType getObjectType() {
(SEL_NOGRANT_AR, null));
op2Priv.put(HiveOperationType.SHOW_TBLPROPERTIES, PrivRequirement.newIOPrivRequirement
(SEL_NOGRANT_AR, null));
op2Priv.put(HiveOperationType.CREATETABLE_AS_SELECT, PrivRequirement.newIOPrivRequirement
(SEL_NOGRANT_AR, null));
op2Priv.put(HiveOperationType.CREATETABLE_AS_SELECT, PrivRequirement.newPrivRequirementList(
new PrivRequirement(SEL_NOGRANT_AR, IOType.INPUT),
new PrivRequirement(OWNER_PRIV_AR, HivePrivilegeObjectType.DATABASE)));

// QUERY,LOAD op can contain an insert & overwrite,
// require delete privilege if this is an insert-overwrite
Expand All @@ -302,8 +303,9 @@ public HivePrivilegeObjectType getObjectType() {

// for now allow only create-view with 'select with grant'
// the owner will also have select with grant privileges on new view
op2Priv.put(HiveOperationType.CREATEVIEW, PrivRequirement.newIOPrivRequirement
(SEL_GRANT_AR, null));
op2Priv.put(HiveOperationType.CREATEVIEW, PrivRequirement.newPrivRequirementList(
new PrivRequirement(SEL_GRANT_AR, IOType.INPUT),
new PrivRequirement(OWNER_PRIV_AR, HivePrivilegeObjectType.DATABASE)));

op2Priv.put(HiveOperationType.SHOWFUNCTIONS, PrivRequirement.newIOPrivRequirement
(null, null));
Expand Down
13 changes: 13 additions & 0 deletions ql/src/test/queries/clientnegative/authorization_create_view.q
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
set hive.test.authz.sstd.hs2.mode=true;
set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
set hive.security.authorization.enabled=true;

set user.name=user3;
create database db1;
use db1;
create table tab1(i int);

set user.name=user4;
-- create view should fail as view is being created in db that it does not own
create view db1.view1(i) as select * from tab1;
14 changes: 14 additions & 0 deletions ql/src/test/queries/clientnegative/authorization_ctas2.q
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
set hive.test.authz.sstd.hs2.mode=true;
set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
set hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
set hive.security.authorization.enabled=true;

set user.name=user_dbowner;
-- check ctas without db ownership
create database ctas_auth;

set user.name=user_unauth;
create table t1(i int);
use ctas_auth;
show tables;
create table t2 as select * from default.t1;
4 changes: 2 additions & 2 deletions ql/src/test/queries/clientpositive/input46.q
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
create database if not exists table_in_database_creation;
create table table_in_database_creation.test1 as select * from src limit 1;
create table `table_in_database_creation.test2` as select * from src limit 1;
create table `table_in_database_creation`.`test2` as select * from src limit 1;
create table table_in_database_creation.test3 (a string);
create table `table_in_database_creation.test4` (a string);
create table `table_in_database_creation`.`test4` (a string);
drop database table_in_database_creation cascade;
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ PREHOOK: type: DROPVIEW
POSTHOOK: query: DROP VIEW testView
POSTHOOK: type: DROPVIEW
FAILED: SemanticException [Error 10216]: Cannot ALTER VIEW AS SELECT if view currently does not exist
The following view does not exist: testView
The following view does not exist: default.testView
Loading

0 comments on commit b71adbd

Please sign in to comment.