Skip to content

Commit

Permalink
HIVE-16989 : Fix some issues identified by lgtm.com (Malcolm Taylor v…
Browse files Browse the repository at this point in the history
…ia Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan <[email protected]>
  • Loading branch information
malcolmtaylor authored and ashutoshc committed Jul 14, 2017
1 parent adca35a commit 094c1d5
Show file tree
Hide file tree
Showing 11 changed files with 181 additions and 56 deletions.
3 changes: 1 addition & 2 deletions beeline/src/java/org/apache/hive/beeline/BeeLine.java
Original file line number Diff line number Diff line change
Expand Up @@ -2182,8 +2182,7 @@ private Driver[] scanDriversOLD(String line) {
output(getColorBuffer().pad(loc("scanning", f.getAbsolutePath()), 60),
false);

try {
ZipFile zf = new ZipFile(f);
try (ZipFile zf = new ZipFile(f)) {
int total = zf.size();
int index = 0;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,7 @@ public StaticPermanentFunctionChecker(Configuration conf) {
LOG.warn("Could not find UDF whitelist in configuration: " + PERMANENT_FUNCTIONS_LIST);
return;
}
try {
BufferedReader r = new BufferedReader(new InputStreamReader(logger.openStream()));
try (BufferedReader r = new BufferedReader(new InputStreamReader(logger.openStream()))) {
String klassName = r.readLine();
while (klassName != null) {
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -216,8 +216,8 @@ public String getMetaStoreSchemaVersion(MetaStoreConnectionInfo connectionInfo)
versionQuery = "select t.SCHEMA_VERSION from VERSION t";
}
try (Connection metastoreDbConnection =
HiveSchemaHelper.getConnectionToMetastore(connectionInfo)) {
Statement stmt = metastoreDbConnection.createStatement();
HiveSchemaHelper.getConnectionToMetastore(connectionInfo); Statement stmt =
metastoreDbConnection.createStatement()) {
ResultSet res = stmt.executeQuery(versionQuery);
if (!res.next()) {
throw new HiveMetaException("Could not find version info in metastore VERSION table.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1484,16 +1484,17 @@ long generateCompactionQueueId(Statement stmt) throws SQLException, MetaExceptio
// Get the id for the next entry in the queue
String s = sqlGenerator.addForUpdateClause("select ncq_next from NEXT_COMPACTION_QUEUE_ID");
LOG.debug("going to execute query <" + s + ">");
ResultSet rs = stmt.executeQuery(s);
if (!rs.next()) {
throw new IllegalStateException("Transaction tables not properly initiated, " +
"no record found in next_compaction_queue_id");
try (ResultSet rs = stmt.executeQuery(s)) {
if (!rs.next()) {
throw new IllegalStateException("Transaction tables not properly initiated, "
+ "no record found in next_compaction_queue_id");
}
long id = rs.getLong(1);
s = "update NEXT_COMPACTION_QUEUE_ID set ncq_next = " + (id + 1);
LOG.debug("Going to execute update <" + s + ">");
stmt.executeUpdate(s);
return id;
}
long id = rs.getLong(1);
s = "update NEXT_COMPACTION_QUEUE_ID set ncq_next = " + (id + 1);
LOG.debug("Going to execute update <" + s + ">");
stmt.executeUpdate(s);
return id;
}
@Override
@RetrySemantics.Idempotent
Expand Down Expand Up @@ -2863,22 +2864,26 @@ private void heartbeatTxn(Connection dbConn, long txnid)
private TxnStatus findTxnState(long txnid, Statement stmt) throws SQLException, MetaException {
String s = "select txn_state from TXNS where txn_id = " + txnid;
LOG.debug("Going to execute query <" + s + ">");
ResultSet rs = stmt.executeQuery(s);
if (!rs.next()) {
s = sqlGenerator.addLimitClause(1, "1 from COMPLETED_TXN_COMPONENTS where CTC_TXNID = " + txnid);
LOG.debug("Going to execute query <" + s + ">");
ResultSet rs2 = stmt.executeQuery(s);
if(rs2.next()) {
return TxnStatus.COMMITTED;
try (ResultSet rs = stmt.executeQuery(s)) {
if (!rs.next()) {
s =
sqlGenerator.addLimitClause(1, "1 from COMPLETED_TXN_COMPONENTS where CTC_TXNID = "
+ txnid);
LOG.debug("Going to execute query <" + s + ">");
try (ResultSet rs2 = stmt.executeQuery(s)) {
if (rs2.next()) {
return TxnStatus.COMMITTED;
}
}
// could also check WRITE_SET but that seems overkill
return TxnStatus.UNKNOWN;
}
//could also check WRITE_SET but that seems overkill
return TxnStatus.UNKNOWN;
}
char txnState = rs.getString(1).charAt(0);
if (txnState == TXN_ABORTED) {
return TxnStatus.ABORTED;
char txnState = rs.getString(1).charAt(0);
if (txnState == TXN_ABORTED) {
return TxnStatus.ABORTED;
}
assert txnState == TXN_OPEN : "we found it in TXNS but it's not ABORTED, so must be OPEN";
}
assert txnState == TXN_OPEN : "we found it in TXNS but it's not ABORTED, so must be OPEN";
return TxnStatus.OPEN;
}

Expand Down Expand Up @@ -2909,27 +2914,32 @@ private static void ensureValidTxn(Connection dbConn, long txnid, Statement stmt
// We need to check whether this transaction is valid and open
String s = "select txn_state from TXNS where txn_id = " + txnid;
LOG.debug("Going to execute query <" + s + ">");
ResultSet rs = stmt.executeQuery(s);
if (!rs.next()) {
//todo: add LIMIT 1 instead of count - should be more efficient
s = "select count(*) from COMPLETED_TXN_COMPONENTS where CTC_TXNID = " + txnid;
ResultSet rs2 = stmt.executeQuery(s);
//todo: strictly speaking you can commit an empty txn, thus 2nd conjunct is wrong but only
//possible for for multi-stmt txns
boolean alreadyCommitted = rs2.next() && rs2.getInt(1) > 0;
LOG.debug("Going to rollback");
rollbackDBConn(dbConn);
if(alreadyCommitted) {
//makes the message more informative - helps to find bugs in client code
throw new NoSuchTxnException("Transaction " + JavaUtils.txnIdToString(txnid) + " is already committed.");
try (ResultSet rs = stmt.executeQuery(s)) {
if (!rs.next()) {
// todo: add LIMIT 1 instead of count - should be more efficient
s = "select count(*) from COMPLETED_TXN_COMPONENTS where CTC_TXNID = " + txnid;
try (ResultSet rs2 = stmt.executeQuery(s)) {
// todo: strictly speaking you can commit an empty txn, thus 2nd conjunct is wrong but
// only
// possible for for multi-stmt txns
boolean alreadyCommitted = rs2.next() && rs2.getInt(1) > 0;
LOG.debug("Going to rollback");
rollbackDBConn(dbConn);
if (alreadyCommitted) {
// makes the message more informative - helps to find bugs in client code
throw new NoSuchTxnException("Transaction " + JavaUtils.txnIdToString(txnid)
+ " is already committed.");
}
throw new NoSuchTxnException("No such transaction " + JavaUtils.txnIdToString(txnid));
}
}
if (rs.getString(1).charAt(0) == TXN_ABORTED) {
LOG.debug("Going to rollback");
rollbackDBConn(dbConn);
throw new TxnAbortedException("Transaction " + JavaUtils.txnIdToString(txnid)
+ " already aborted");// todo: add time of abort, which is not currently tracked.
// Requires schema change
}
throw new NoSuchTxnException("No such transaction " + JavaUtils.txnIdToString(txnid));
}
if (rs.getString(1).charAt(0) == TXN_ABORTED) {
LOG.debug("Going to rollback");
rollbackDBConn(dbConn);
throw new TxnAbortedException("Transaction " + JavaUtils.txnIdToString(txnid) +
" already aborted");//todo: add time of abort, which is not currently tracked. Requires schema change
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,9 @@ public int hashCode() {

@Override
public boolean equals(Object obj) {
if (!(obj instanceof ListKeyWrapper)) {
return false;
}
Object[] copied_in_hashmap = ((ListKeyWrapper) obj).keys;
return equalComparer.areEqual(copied_in_hashmap, keys);
}
Expand Down Expand Up @@ -182,6 +185,9 @@ public int hashCode() {

@Override
public boolean equals(Object other) {
if (!(other instanceof TextKeyWrapper)) {
return false;
}
Object obj = ((TextKeyWrapper) other).key;
Text t1;
Text t2;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ public int size() {
}

public Iterator<Object> iterator() {
return listIterator();
return super.listIterator();
}

public ListIterator<Object> listIterator(int index) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,6 @@ class RepeatedGroupConverter extends HiveGroupConverter
private final ConverterParent parent;
private final int index;
private final List<Writable> list = new ArrayList<Writable>();
private final Map<String, String> metadata = new HashMap<String, String>();


public RepeatedGroupConverter(GroupType groupType, ConverterParent parent, int index, TypeInfo hiveTypeInfo) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,9 @@ public void setDump(DumpType lvl, Long eventFrom, Long eventTo, Path cmRoot) {
}

private void loadDumpFromFile() throws SemanticException {
try {
try (FileSystem fs = dumpFile.getFileSystem(hiveConf); BufferedReader br =
new BufferedReader(new InputStreamReader(fs.open(dumpFile)))) {
// read from dumpfile and instantiate self
FileSystem fs = dumpFile.getFileSystem(hiveConf);
BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(dumpFile)));
String line = null;
if ((line = br.readLine()) != null) {
String[] lineContents = line.split("\t", 5);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ public TezEdgeProperty(HiveConf hiveConf, EdgeType edgeType, boolean isAutoReduc
boolean isSlowStart, int minReducer, int maxReducer, long bytesPerReducer) {
this(hiveConf, edgeType, -1);
setAutoReduce(hiveConf, isAutoReduce, minReducer, maxReducer, bytesPerReducer);
this.isSlowStart = isSlowStart;
}

public void setAutoReduce(HiveConf hiveConf, boolean isAutoReduce, int minReducer,
Expand All @@ -60,7 +61,6 @@ public void setAutoReduce(HiveConf hiveConf, boolean isAutoReduce, int minReduce
this.maxReducer = maxReducer;
this.isAutoReduce = isAutoReduce;
this.inputSizePerReducer = bytesPerReducer;
this.isSlowStart = isSlowStart;
}

public TezEdgeProperty(EdgeType edgeType) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ public Object terminatePartial(AggregationBuffer agg) throws HiveException {
public void merge(AggregationBuffer agg, Object partial)
throws HiveException {
MkArrayAggregationBuffer myagg = (MkArrayAggregationBuffer) agg;
List<Object> partialResult = (ArrayList<Object>) internalMergeOI.getList(partial);
List<Object> partialResult = (List<Object>) internalMergeOI.getList(partial);
if (partialResult != null) {
for(Object i : partialResult) {
putIntoCollection(i, myagg);
Expand Down
113 changes: 113 additions & 0 deletions ql/src/test/org/apache/hadoop/hive/ql/exec/TestKeyWrapperFactory.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec;


import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;

import java.util.ArrayList;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.KeyWrapperFactory.ListKeyWrapper;
import org.apache.hadoop.hive.ql.exec.KeyWrapperFactory.TextKeyWrapper;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde2.objectinspector.InspectableObject;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.Text;
import org.junit.Before;
import org.junit.Test;

public class TestKeyWrapperFactory {
private KeyWrapperFactory factory;


@Before
public void setup() throws Exception {
SessionState ss = new SessionState(new HiveConf());
SessionState.setCurrentSessionState(ss);

ArrayList<Text> col1 = new ArrayList<Text>();
col1.add(new Text("0"));
col1.add(new Text("1"));
col1.add(new Text("2"));
col1.add(new Text("3"));
TypeInfo col1Type = TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo);
ArrayList<Text> cola = new ArrayList<Text>();
cola.add(new Text("a"));
cola.add(new Text("b"));
cola.add(new Text("c"));
TypeInfo colaType = TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo);
try {
ArrayList<Object> data = new ArrayList<Object>();
data.add(col1);
data.add(cola);
ArrayList<String> names = new ArrayList<String>();
names.add("col1");
names.add("cola");
ArrayList<TypeInfo> typeInfos = new ArrayList<TypeInfo>();
typeInfos.add(col1Type);
typeInfos.add(colaType);
TypeInfo dataType = TypeInfoFactory.getStructTypeInfo(names, typeInfos);

InspectableObject r = new InspectableObject();
ObjectInspector[] oi = new ObjectInspector[1];
r.o = data;
oi[0]= TypeInfoUtils
.getStandardWritableObjectInspectorFromTypeInfo(dataType);
try {
// get a evaluator for a simple field expression
ExprNodeDesc exprDesc = new ExprNodeColumnDesc(colaType, "cola", "",
false);
ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(exprDesc);
ExprNodeEvaluator[] evals = new ExprNodeEvaluator[1];
evals[0] = eval;
ObjectInspector resultOI = eval.initialize(oi[0]);
ObjectInspector[] resultOIs = new ObjectInspector[1];
resultOIs[0] = resultOI;
factory = new KeyWrapperFactory(evals, oi, resultOIs);
} catch (Throwable e) {
e.printStackTrace();
throw e;
}
} catch (Throwable e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}

@Test
public void testKeyWrapperEqualsCopy() throws Exception {
KeyWrapper w1 = factory.getKeyWrapper();
KeyWrapper w2 = w1.copyKey();
assertTrue(w1.equals(w2));
}

@Test
public void testDifferentWrapperTypesUnequal() {
TextKeyWrapper w3 = factory.new TextKeyWrapper(false);
ListKeyWrapper w4 = factory.new ListKeyWrapper(false);
assertFalse(w3.equals(w4));
assertFalse(w4.equals(w3));
}
}

0 comments on commit 094c1d5

Please sign in to comment.