Skip to content

Commit

Permalink
HIVE-8785 : HiveServer2 LogDivertAppender should be more selective fo…
Browse files Browse the repository at this point in the history
…r beeline getLogs (Thejas Nair, reviewed by Gopal V)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1637582 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
Thejas Nair committed Nov 8, 2014
1 parent cf4166f commit 4934636
Show file tree
Hide file tree
Showing 10 changed files with 108 additions and 40 deletions.
5 changes: 3 additions & 2 deletions common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
Original file line number Diff line number Diff line change
Expand Up @@ -1529,12 +1529,13 @@ public static enum ConfVars {

// operation log configuration
HIVE_SERVER2_LOGGING_OPERATION_ENABLED("hive.server2.logging.operation.enabled", true,
"When true, HS2 will save operation logs"),
"When true, HS2 will save operation logs and make them available for clients"),
HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION("hive.server2.logging.operation.log.location",
"${system:java.io.tmpdir}" + File.separator + "${system:user.name}" + File.separator +
"operation_logs",
"Top level directory where operation logs are stored if logging functionality is enabled"),

HIVE_SERVER2_LOGGING_OPERATION_VERBOSE("hive.server2.logging.operation.verbose", false,
"When true, HS2 operation logs available for clients will be verbose"),
// logging configuration
HIVE_LOG4J_FILE("hive.log4j.file", "",
"Hive log4j configuration file.\n" +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,7 @@ public void testEmbeddedBeelineConnection() throws Throwable{
public void testQueryProgress() throws Throwable {
final String SCRIPT_TEXT = "set hive.support.concurrency = false;\n" +
"select count(*) from " + tableName + ";\n";
final String EXPECTED_PATTERN = "Parsing command";
final String EXPECTED_PATTERN = "number of splits";
testScriptFile(SCRIPT_TEXT, EXPECTED_PATTERN, true, getBaseArgs(miniHS2.getBaseJdbcURL()));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.processors.DfsProcessor;
Expand Down Expand Up @@ -105,6 +106,7 @@ public TestJdbcDriver2() {
public static void setUpBeforeClass() throws SQLException, ClassNotFoundException{
Class.forName(driverName);
Connection con1 = getConnection("default");
System.setProperty(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_VERBOSE.varname, "" + true);

Statement stmt1 = con1.createStatement();
assertNotNull("Statement is null", stmt1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.hive.service.cli;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
import org.apache.hive.service.cli.thrift.ThriftCLIService;
import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
Expand All @@ -35,6 +36,7 @@ public class TestEmbeddedThriftBinaryCLIService extends CLIServiceTest {
@BeforeClass
public static void setUpBeforeClass() throws Exception {
service = new EmbeddedThriftBinaryCLIService();
service.init(new HiveConf());
client = new ThriftCLIServiceClient(service);
}

Expand Down
5 changes: 4 additions & 1 deletion jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
import org.apache.hive.service.auth.HiveAuthFactory;
Expand Down Expand Up @@ -143,7 +144,9 @@ public HiveConnection(String uri, Properties info) throws SQLException {
isEmbeddedMode = connParams.isEmbeddedMode();

if (isEmbeddedMode) {
client = new EmbeddedThriftBinaryCLIService();
EmbeddedThriftBinaryCLIService embeddedClient = new EmbeddedThriftBinaryCLIService();
embeddedClient.init(new HiveConf());
client = embeddedClient;
} else {
// extract user/password from JDBC connection properties if its not supplied in the
// connection URL
Expand Down
4 changes: 3 additions & 1 deletion ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import java.util.Map;
import java.util.Set;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.Context;
Expand Down Expand Up @@ -170,7 +171,8 @@ public int execute(DriverContext driverContext) {
counters = client.getDAGStatus(statusGetOpts).getDAGCounters();
TezSessionPoolManager.getInstance().returnSession(session);

if (LOG.isInfoEnabled() && counters != null) {
if (LOG.isInfoEnabled() && counters != null
&& conf.getBoolVar(conf, HiveConf.ConfVars.TEZ_EXEC_SUMMARY)) {
for (CounterGroup group: counters) {
LOG.info(group.getDisplayName() +":");
for (TezCounter counter: group) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,17 @@

package org.apache.hive.service.cli.operation;
import java.io.CharArrayWriter;
import java.util.regex.Pattern;

import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.log4j.Layout;
import org.apache.log4j.Logger;
import org.apache.log4j.WriterAppender;
import org.apache.log4j.spi.Filter;
import org.apache.log4j.spi.LoggingEvent;

import com.google.common.base.Joiner;

/**
* An Appender to divert logs from individual threads to the LogObject they belong to.
*/
Expand All @@ -33,20 +37,29 @@ public class LogDivertAppender extends WriterAppender {
private final OperationManager operationManager;

/**
* A log filter that exclude messages coming from the logger with the given name.
* We apply this filter on the Loggers used by the log diversion stuff, so that
* A log filter that filters messages coming from the logger with the given names.
* It be used as a white list filter or a black list filter.
* We apply black list filter on the Loggers used by the log diversion stuff, so that
* they don't generate more logs for themselves when they process logs.
* White list filter is used for less verbose log collection
*/
private static class NameExclusionFilter extends Filter {
private String excludeLoggerName = null;
private static class NameFilter extends Filter {
private final Pattern namePattern;
private final boolean excludeMatches;

public NameExclusionFilter(String excludeLoggerName) {
this.excludeLoggerName = excludeLoggerName;
public NameFilter(boolean isExclusionFilter, String [] loggerNames) {
this.excludeMatches = isExclusionFilter;
String matchRegex = Joiner.on("|").join(loggerNames);
this.namePattern = Pattern.compile(matchRegex);
}

@Override
public int decide(LoggingEvent ev) {
if (ev.getLoggerName().equals(excludeLoggerName)) {
boolean isMatch = namePattern.matcher(ev.getLoggerName()).matches();
if (excludeMatches == isMatch) {
// Deny if this is black-list filter (excludeMatches = true) and it
// matched
// or if this is whitelist filter and it didn't match
return Filter.DENY;
}
return Filter.NEUTRAL;
Expand All @@ -56,21 +69,29 @@ public int decide(LoggingEvent ev) {
/** This is where the log message will go to */
private final CharArrayWriter writer = new CharArrayWriter();

public LogDivertAppender(Layout layout, OperationManager operationManager) {
public LogDivertAppender(Layout layout, OperationManager operationManager, boolean isVerbose) {
setLayout(layout);
setWriter(writer);
setName("LogDivertAppender");
this.operationManager = operationManager;

// Filter out messages coming from log processing classes, or we'll run an infinite loop.
addFilter(new NameExclusionFilter(LOG.getName()));
addFilter(new NameExclusionFilter(OperationLog.class.getName()));
addFilter(new NameExclusionFilter(OperationManager.class.getName()));
if (isVerbose) {
// Filter out messages coming from log processing classes, or we'll run an
// infinite loop.
String[] exclLoggerNames = { LOG.getName(), OperationLog.class.getName(),
OperationManager.class.getName() };
addFilter(new NameFilter(true, exclLoggerNames));
} else {
// in non verbose mode, show only select logger messages
String[] inclLoggerNames = { "org.apache.hadoop.mapreduce.JobSubmitter",
"org.apache.hadoop.mapreduce.Job", "SessionState", Task.class.getName() };
addFilter(new NameFilter(false, inclLoggerNames));
}
}

/**
* Overrides WriterAppender.subAppend(), which does the real logging.
* No need to worry about concurrency since log4j calls this synchronously.
* Overrides WriterAppender.subAppend(), which does the real logging. No need
* to worry about concurrency since log4j calls this synchronously.
*/
@Override
protected void subAppend(LoggingEvent event) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@

package org.apache.hive.service.cli.operation;

import java.util.Enumeration;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
Expand All @@ -30,16 +30,26 @@
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Schema;
import org.apache.hive.service.AbstractService;
import org.apache.hive.service.cli.*;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationHandle;
import org.apache.hive.service.cli.OperationState;
import org.apache.hive.service.cli.OperationStatus;
import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
import org.apache.log4j.*;
import org.apache.log4j.Appender;
import org.apache.log4j.ConsoleAppender;
import org.apache.log4j.Layout;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;

/**
* OperationManager.
*
*/
public class OperationManager extends AbstractService {
private static final String DEFAULT_LAYOUT_PATTERN = "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n";
private final Log LOG = LogFactory.getLog(OperationManager.class.getName());

private HiveConf hiveConf;
Expand All @@ -54,7 +64,8 @@ public OperationManager() {
public synchronized void init(HiveConf hiveConf) {
this.hiveConf = hiveConf;
if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) {
initOperationLogCapture();
boolean isVerbose = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_VERBOSE);
initOperationLogCapture(isVerbose);
} else {
LOG.debug("Operation level logging is turned off");
}
Expand All @@ -73,7 +84,7 @@ public synchronized void stop() {
super.stop();
}

private void initOperationLogCapture() {
private void initOperationLogCapture(boolean isVerbose) {
// There should be a ConsoleAppender. Copy its Layout.
Logger root = Logger.getRootLogger();
Layout layout = null;
Expand All @@ -87,13 +98,19 @@ private void initOperationLogCapture() {
}
}

if (layout == null) {
layout = new PatternLayout(DEFAULT_LAYOUT_PATTERN);
LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern.");
}
final String VERBOSE_PATTERN = "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n";
final String NONVERBOSE_PATTERN = "%-5p : %m%n";

if (isVerbose) {
if (layout == null) {
layout = new PatternLayout(VERBOSE_PATTERN);
LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern.");
}
} else {
layout = new PatternLayout(NONVERBOSE_PATTERN);
}
// Register another Appender (with the same layout) that talks to us.
Appender ap = new LogDivertAppender(layout, this);
Appender ap = new LogDivertAppender(layout, this, isVerbose);
root.addAppender(ap);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,13 @@ public EmbeddedThriftBinaryCLIService() {
super(new CLIService(null));
isEmbedded = true;
HiveConf.setLoadHiveServer2Config(true);
cliService.init(new HiveConf());
}

@Override
public synchronized void init(HiveConf hiveConf) {
cliService.init(hiveConf);
cliService.start();
super.init(hiveConf);
}

public ICLIService getService() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,36 +17,51 @@
*/
package org.apache.hive.service.cli.operation;

import org.junit.Assert;
import java.io.File;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hive.service.cli.*;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.FetchType;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationHandle;
import org.apache.hive.service.cli.OperationState;
import org.apache.hive.service.cli.OperationStatus;
import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.SessionHandle;
import org.apache.hive.service.cli.thrift.EmbeddedThriftBinaryCLIService;
import org.apache.hive.service.cli.thrift.ThriftCLIServiceClient;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;

import java.io.File;

/**
* TestOperationLoggingAPI
* Test the FetchResults of TFetchType.LOG in thrift level.
*/
public class TestOperationLoggingAPI {
private HiveConf hiveConf = new HiveConf();
private String tableName = "testOperationLoggingAPI_table";
private static HiveConf hiveConf;
private final String tableName = "testOperationLoggingAPI_table";
private File dataFile;
private ThriftCLIServiceClient client;
private SessionHandle sessionHandle;
private String sql = "select * from " + tableName;
private String[] expectedLogs = {
private final String sql = "select * from " + tableName;
private final String[] expectedLogs = {
"Parsing command",
"Parse Completed",
"Starting Semantic Analysis",
"Semantic Analysis Completed",
"Starting command"
};

@BeforeClass
public static void setUpBeforeClass() {
hiveConf = new HiveConf();
hiveConf.setBoolean(ConfVars.HIVE_SERVER2_LOGGING_OPERATION_VERBOSE.varname, true);
}

/**
* Start embedded mode, open a session, and create a table for cases usage
* @throws Exception
Expand Down Expand Up @@ -247,7 +262,7 @@ private void verifyFetchedLog(RowSet rowSet) {

private void verifyFetchedLog(String logs) {
for (String log : expectedLogs) {
Assert.assertTrue(logs.contains(log));
Assert.assertTrue("Checking for presence of " + log, logs.contains(log));
}
}
}

0 comments on commit 4934636

Please sign in to comment.