Skip to content

Commit

Permalink
HIVE-17812 Move remaining classes that HiveMetaStore depends on. This c…
Browse files Browse the repository at this point in the history
…loses apache#261.  (Alan Gates, reviewed by Vihang Karajgaonkar)
  • Loading branch information
Alan Gates committed Nov 2, 2017
1 parent 10aa330 commit c5a9673
Show file tree
Hide file tree
Showing 141 changed files with 1,311 additions and 1,220 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,9 @@
import javax.naming.NamingException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
import org.apache.hadoop.hive.metastore.IHMSHandler;
import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
Expand All @@ -54,6 +51,7 @@
import org.apache.hive.hcatalog.common.HCatConstants;
import org.apache.hive.hcatalog.messaging.HCatEventMessage;
import org.apache.hive.hcatalog.messaging.MessageFactory;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -182,7 +180,7 @@ public void onCreateDatabase(CreateDatabaseEvent dbEvent)
// by listening on a topic named "HCAT" and message selector string
// as "HCAT_EVENT = HCAT_ADD_DATABASE"
if (dbEvent.getStatus()) {
String topicName = getTopicPrefix(dbEvent.getHandler().getHiveConf());
String topicName = getTopicPrefix(dbEvent.getIHMSHandler().getConf());
send(messageFactory.buildCreateDatabaseMessage(dbEvent.getDatabase()), topicName);
}
}
Expand All @@ -193,7 +191,7 @@ public void onDropDatabase(DropDatabaseEvent dbEvent) throws MetaException {
// by listening on a topic named "HCAT" and message selector string
// as "HCAT_EVENT = HCAT_DROP_DATABASE"
if (dbEvent.getStatus()) {
String topicName = getTopicPrefix(dbEvent.getHandler().getHiveConf());
String topicName = getTopicPrefix(dbEvent.getIHMSHandler().getConf());
send(messageFactory.buildDropDatabaseMessage(dbEvent.getDatabase()), topicName);
}
}
Expand All @@ -205,8 +203,8 @@ public void onCreateTable(CreateTableEvent tableEvent) throws MetaException {
// as "HCAT_EVENT = HCAT_ADD_TABLE"
if (tableEvent.getStatus()) {
Table tbl = tableEvent.getTable();
HMSHandler handler = tableEvent.getHandler();
HiveConf conf = handler.getHiveConf();
IHMSHandler handler = tableEvent.getIHMSHandler();
Configuration conf = handler.getConf();
Table newTbl;
try {
newTbl = handler.get_table_core(tbl.getDbName(), tbl.getTableName())
Expand All @@ -216,11 +214,7 @@ public void onCreateTable(CreateTableEvent tableEvent) throws MetaException {
getTopicPrefix(conf) + "." + newTbl.getDbName().toLowerCase() + "."
+ newTbl.getTableName().toLowerCase());
handler.alter_table(newTbl.getDbName(), newTbl.getTableName(), newTbl);
} catch (InvalidOperationException e) {
MetaException me = new MetaException(e.toString());
me.initCause(e);
throw me;
} catch (NoSuchObjectException e) {
} catch (TException e) {
MetaException me = new MetaException(e.toString());
me.initCause(e);
throw me;
Expand Down Expand Up @@ -258,7 +252,7 @@ public void onAlterTable(AlterTableEvent tableEvent) throws MetaException {
}
// I think this is wrong, the alter table statement should come on the table topic not the
// DB topic - Alan.
String topicName = getTopicPrefix(tableEvent.getHandler().getHiveConf()) + "." +
String topicName = getTopicPrefix(tableEvent.getIHMSHandler().getConf()) + "." +
after.getDbName().toLowerCase();
send(messageFactory.buildAlterTableMessage(before, after), topicName);
}
Expand Down Expand Up @@ -288,7 +282,7 @@ public void onDropTable(DropTableEvent tableEvent) throws MetaException {
Table table = tableEvent.getTable();
// I think this is wrong, the drop table statement should come on the table topic not the
// DB topic - Alan.
String topicName = getTopicPrefix(tableEvent.getHandler().getHiveConf()) + "." + table.getDbName().toLowerCase();
String topicName = getTopicPrefix(tableEvent.getIHMSHandler().getConf()) + "." + table.getDbName().toLowerCase();
send(messageFactory.buildDropTableMessage(table), topicName);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
import org.apache.hadoop.hive.metastore.api.TxnInfo;
import org.apache.hadoop.hive.metastore.api.TxnState;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.metastore.txn.AcidHouseKeeperService;
import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
import org.apache.hadoop.hive.ql.Driver;
Expand All @@ -77,7 +78,6 @@
import org.apache.hadoop.hive.ql.io.orc.RecordReader;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.txn.AcidHouseKeeperService;
import org.apache.hadoop.hive.ql.txn.compactor.Worker;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
Expand Down Expand Up @@ -740,11 +740,8 @@ public void testTimeOutReaper() throws Exception {
//ensure txn timesout
conf.setTimeVar(HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 1, TimeUnit.MILLISECONDS);
AcidHouseKeeperService houseKeeperService = new AcidHouseKeeperService();
houseKeeperService.start(conf);
while(houseKeeperService.getIsAliveCounter() <= Integer.MIN_VALUE) {
Thread.sleep(100);//make sure it has run at least once
}
houseKeeperService.stop();
houseKeeperService.setConf(conf);
houseKeeperService.run();
try {
//should fail because the TransactionBatch timed out
txnBatch.commit();
Expand All @@ -757,12 +754,7 @@ public void testTimeOutReaper() throws Exception {
txnBatch.beginNextTransaction();
txnBatch.commit();
txnBatch.beginNextTransaction();
int lastCount = houseKeeperService.getIsAliveCounter();
houseKeeperService.start(conf);
while(houseKeeperService.getIsAliveCounter() <= lastCount) {
Thread.sleep(100);//make sure it has run at least once
}
houseKeeperService.stop();
houseKeeperService.run();
try {
//should fail because the TransactionBatch timed out
txnBatch.commit();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -509,13 +509,13 @@ public void testMetaConfSameHandler() throws Exception {
closingClient.setMetaConf(metaConfKey, "[test pattern modified]");
ConfigChangeEvent event = (ConfigChangeEvent) DummyListener.getLastEvent();
int beforeCloseNotificationEventCounts = DummyListener.notifyList.size();
HiveMetaStore.HMSHandler beforeHandler = event.getHandler();
IHMSHandler beforeHandler = event.getIHMSHandler();
closingClient.close();

Thread.sleep(5 * 1000);
event = (ConfigChangeEvent) DummyListener.getLastEvent();
int afterCloseNotificationEventCounts = DummyListener.notifyList.size();
HiveMetaStore.HMSHandler afterHandler = event.getHandler();
IHMSHandler afterHandler = event.getIHMSHandler();
// Meta-conf cleanup should trigger an event to listener
assertNotSame(beforeCloseNotificationEventCounts, afterCloseNotificationEventCounts);
// Both the handlers should be same
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
import org.apache.hadoop.hive.metastore.IHMSHandler;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
Expand Down Expand Up @@ -201,7 +201,7 @@ private String debugPrivPrint(Privilege[] privileges) {
}

@Override
public void setMetaStoreHandler(HMSHandler handler) {
public void setMetaStoreHandler(IHMSHandler handler) {
debugLog("DHMAP.setMetaStoreHandler");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
import org.apache.hadoop.hive.metastore.IHMSHandler;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.session.SessionState;

Expand Down Expand Up @@ -91,7 +91,7 @@ public void setConf(Configuration config) {
}

@Override
public void setMetaStoreHandler(HMSHandler handler) {
public void setMetaStoreHandler(IHMSHandler handler) {
hmap.setMetaStoreHandler(handler);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,10 @@
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.metastore.security.MetastoreDelegationTokenManager;
import org.apache.hadoop.hive.metastore.security.TUGIContainingTransport;
import org.apache.hadoop.hive.metastore.txn.AcidHouseKeeperService;
import org.apache.hadoop.hive.metastore.txn.AcidOpenTxnsCounterService;
import org.apache.hadoop.hive.metastore.txn.AcidCompactionHistoryService;
import org.apache.hadoop.hive.metastore.txn.AcidWriteSetService;
import org.apache.hadoop.hive.metastore.txn.TxnStore;
import org.apache.hadoop.hive.metastore.txn.TxnUtils;
import org.apache.hadoop.hive.serde2.Deserializer;
Expand Down Expand Up @@ -215,17 +218,7 @@ public TTransport getTransport(TTransport trans) {
}
}

/**
* An ugly interface because everything about this file is ugly. RawStore is threadlocal so this
* thread-local disease propagates everywhere, and FileMetadataManager cannot just get a RawStore
* or handlers to use; it will need to have this method to make thread-local handlers and a
* thread-local RawStore.
*/
public interface ThreadLocalRawStore {
RawStore getMS() throws MetaException;
}

public static class HMSHandler extends FacebookBase implements IHMSHandler, ThreadLocalRawStore {
public static class HMSHandler extends FacebookBase implements IHMSHandler {
public static final Logger LOG = HiveMetaStore.LOG;
private final HiveConf hiveConf; // stores datastore (jpox) properties,
// right now they come from jpox.properties
Expand Down Expand Up @@ -406,10 +399,20 @@ public static String getThreadLocalIpAddress() {
return threadLocalIpAddress.get();
}

/**
* Use {@link #getThreadId()} instead.
* @return thread id
*/
@Deprecated
public static Integer get() {
return threadLocalId.get();
}

@Override
public int getThreadId() {
return threadLocalId.get();
}

public HMSHandler(String name) throws MetaException {
this(name, new HiveConf(HMSHandler.class), true);
}
Expand Down Expand Up @@ -457,7 +460,8 @@ public HiveConf getHiveConf() {
}
}

List<TransactionalMetaStoreEventListener> getTransactionalListeners() {
@Override
public List<TransactionalMetaStoreEventListener> getTransactionalListeners() {
return transactionalListeners;
}

Expand Down Expand Up @@ -537,7 +541,7 @@ public void init() throws MetaException {
cleaner.schedule(new DumpDirCleanerTask(hiveConf), cleanFreq, cleanFreq);
}
expressionProxy = PartFilterExprUtil.createExpressionProxy(hiveConf);
fileMetadataManager = new FileMetadataManager((ThreadLocalRawStore)this, hiveConf);
fileMetadataManager = new FileMetadataManager(this.getMS(), hiveConf);
}

private static String addPrefix(String s) {
Expand Down Expand Up @@ -580,6 +584,7 @@ private Map<String, String> getModifiedConf() {
return modifiedConf;
}

@Override
public Warehouse getWh() {
return wh;
}
Expand Down Expand Up @@ -1027,14 +1032,7 @@ public Database get_database(final String name) throws NoSuchObjectException, Me
return db;
}

/**
* Equivalent to get_database, but does not write to audit logs, or fire pre-event listners.
* Meant to be used for internal hive classes that don't use the thrift interface.
* @param name
* @return
* @throws NoSuchObjectException
* @throws MetaException
*/
@Override
public Database get_database_core(final String name) throws NoSuchObjectException,
MetaException {
Database db = null;
Expand Down Expand Up @@ -2400,16 +2398,7 @@ public List<TableMeta> get_table_meta(String dbnames, String tblNames, List<Stri
return t;
}

/**
* Equivalent of get_table, but does not log audits and fire pre-event listener.
* Meant to be used for calls made by other hive classes, that are not using the
* thrift interface.
* @param dbname
* @param name
* @return Table object
* @throws MetaException
* @throws NoSuchObjectException
*/
@Override
public Table get_table_core(final String dbname, final String name) throws MetaException,
NoSuchObjectException {
Table t = null;
Expand Down Expand Up @@ -8059,33 +8048,29 @@ private static void startHouseKeeperService(HiveConf conf) throws Exception {
if(!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_COMPACTOR_INITIATOR_ON)) {
return;
}
startHouseKeeperService(conf, Class.forName("org.apache.hadoop.hive.ql.txn.AcidHouseKeeperService"));
startHouseKeeperService(conf, Class.forName("org.apache.hadoop.hive.ql.txn.AcidCompactionHistoryService"));
startHouseKeeperService(conf, Class.forName("org.apache.hadoop.hive.ql.txn.AcidWriteSetService"));

ThreadPool.initialize(conf);
RunnableConfigurable rc = new AcidOpenTxnsCounterService();
rc.setConf(conf);
ThreadPool.getPool().scheduleAtFixedRate(rc, 100, MetastoreConf.getTimeVar(conf,
MetastoreConf.ConfVars.COUNT_OPEN_TXNS_INTERVAL, TimeUnit.MILLISECONDS),
TimeUnit.MILLISECONDS);

startOneHouseKeeperService(new AcidHouseKeeperService(), conf,
MetastoreConf.getTimeVar(conf, MetastoreConf.ConfVars.TIMEDOUT_TXN_REAPER_INTERVAL,
TimeUnit.MILLISECONDS));
startOneHouseKeeperService(new AcidOpenTxnsCounterService(), conf,
MetastoreConf.getTimeVar(conf, MetastoreConf.ConfVars.COUNT_OPEN_TXNS_INTERVAL,
TimeUnit.MILLISECONDS));
startOneHouseKeeperService(new AcidCompactionHistoryService(), conf,
MetastoreConf.getTimeVar(conf, MetastoreConf.ConfVars.COMPACTOR_HISTORY_REAPER_INTERVAL,
TimeUnit.MILLISECONDS));
startOneHouseKeeperService(new AcidWriteSetService(), conf,
MetastoreConf.getTimeVar(conf, MetastoreConf.ConfVars.WRITE_SET_REAPER_INTERVAL,
TimeUnit.MILLISECONDS));
}
private static void startHouseKeeperService(HiveConf conf, Class<?> c) throws Exception {
//todo: when metastore adds orderly-shutdown logic, houseKeeper.stop()
//should be called form it
HouseKeeperService houseKeeper = (HouseKeeperService)c.newInstance();
try {
houseKeeper.start(conf);
}
catch (Exception ex) {
LOG.error("Failed to start {}" , houseKeeper.getClass() +
". The system will not handle {} " , houseKeeper.getServiceDescription(),
". Root Cause: ", ex);
}

private static void startOneHouseKeeperService(RunnableConfigurable rc, Configuration conf,
long interval) {
rc.setConf(conf);
ThreadPool.getPool().scheduleAtFixedRate(rc, 0, interval, TimeUnit.MILLISECONDS);
}

public static Map<FileMetadataExprType, FileMetadataHandler> createHandlerMap() {
static Map<FileMetadataExprType, FileMetadataHandler> createHandlerMap() {
Map<FileMetadataExprType, FileMetadataHandler> fmHandlers = new HashMap<>();
for (FileMetadataExprType v : FileMetadataExprType.values()) {
switch (v) {
Expand Down

This file was deleted.

Loading

0 comments on commit c5a9673

Please sign in to comment.