Skip to content

Commit

Permalink
user can enable spark log
Browse files Browse the repository at this point in the history
  • Loading branch information
allwefantasy committed Jun 1, 2019
1 parent 181378a commit 9d3bd19
Show file tree
Hide file tree
Showing 6 changed files with 38 additions and 7 deletions.
4 changes: 2 additions & 2 deletions dev/docker/log4j.properties
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@ log4j.rootCategory=INFO, console,file
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %X{owner} %p %c{1}: %m%n

log4j.appender.file=org.apache.log4j.rolling.RollingFileAppender
log4j.appender.file.File=${REALTIME_LOG_HOME}/mlsql_engine.log
log4j.appender.file.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy
log4j.appender.file.rollingPolicy.fileNamePattern=${REALTIME_LOG_HOME}/mlsql_engine.%d.gz
log4j.appender.file.layout=org.apache.log4j.PatternLayout
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %X{owner} %p %c{1}: %m%n
log4j.appender.file.MaxBackupIndex=5
# Set the default spark-shell log level to WARN. When running the spark-shell, the
# log level for this class is used to overwrite the root logger's log level, so that
Expand Down
2 changes: 2 additions & 0 deletions streamingpro-mlsql/src/main/java/tech/mlsql/MLSQLEnvKey.scala
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ object MLSQLEnvKey {

val CONTEXT_STATEMENT_NUM = "context_statement_num"

val REQUEST_CONTEXT_ENABLE_SPARK_LOG = "enableSparkLog"

def realTimeLogHome = {
val item = System.getProperty(REALTIME_LOG_HOME)
if (item == null) "/tmp/__mlsql__/logs"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import org.apache.spark.sql.mlsql.session.{SessionIdentifier, SparkSessionCacheM
import streaming.dsl.ScriptSQLExec
import streaming.log.{Logging, WowLog}
import tech.mlsql.job.JobListener.{JobFinishedEvent, JobStartedEvent}
import tech.mlsql.job.listeners.CleanCacheListener
import tech.mlsql.job.listeners.{CleanCacheListener, EngineMDCLogListener}

import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
Expand Down Expand Up @@ -44,6 +44,7 @@ object JobManager extends Logging {
logInfo(s"JobManager started with initialDelay=${initialDelay} checkTimeInterval=${checkTimeInterval}")
_jobManager = new JobManager(spark, initialDelay, checkTimeInterval)
_jobListeners += new CleanCacheListener
_jobListeners += new EngineMDCLogListener
_jobManager.run
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
package tech.mlsql.job.listeners

import org.slf4j.MDC
import streaming.dsl.ScriptSQLExec
import tech.mlsql.MLSQLEnvKey
import tech.mlsql.job.JobListener

/**
* 2019-06-01 WilliamZhu([email protected])
*/
class EngineMDCLogListener extends JobListener {
override def onJobStarted(event: JobListener.JobStartedEvent): Unit = {
val context = ScriptSQLExec.contextGetOrForTest()
val items = context.userDefinedParam
if (items.getOrElse(MLSQLEnvKey.REQUEST_CONTEXT_ENABLE_SPARK_LOG, "false").toBoolean) {
MDC.put("owner", s"[owner] [${context.owner}]")
}

}

override def onJobFinished(event: JobListener.JobFinishedEvent): Unit = {
val context = ScriptSQLExec.contextGetOrForTest()
val items = context.userDefinedParam
if (items.getOrElse(MLSQLEnvKey.REQUEST_CONTEXT_ENABLE_SPARK_LOG, "false").toBoolean) {
MDC.remove("owner")
}
}
}
4 changes: 2 additions & 2 deletions streamingpro-mlsql/src/main/resources-local/log4j.properties
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@ log4j.rootCategory=INFO, console,file
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %X{owner} %p %c{1}: %m%n
log4j.appender.file=org.apache.log4j.rolling.RollingFileAppender
log4j.appender.file.File=${REALTIME_LOG_HOME}/mlsql_engine.log
log4j.appender.file.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy
log4j.appender.file.rollingPolicy.fileNamePattern=${REALTIME_LOG_HOME}/mlsql_engine.%d.gz
log4j.appender.file.layout=org.apache.log4j.PatternLayout
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %X{owner} %p %c{1}: %m%n
log4j.appender.file.MaxBackupIndex=5
# Set the default spark-shell log level to WARN. When running the spark-shell, the
# log level for this class is used to overwrite the root logger's log level, so that
Expand Down
4 changes: 2 additions & 2 deletions streamingpro-mlsql/src/main/resources-online/log4j.properties
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@ log4j.rootCategory=INFO,file, console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.target=System.err
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %X{owner} %p %c{1}: %m%n
log4j.appender.file=org.apache.log4j.rolling.RollingFileAppender
log4j.appender.file.File=${REALTIME_LOG_HOME}/mlsql_engine.log
log4j.appender.file.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy
log4j.appender.file.rollingPolicy.fileNamePattern=${REALTIME_LOG_HOME}/mlsql_engine.%d.gz
log4j.appender.file.layout=org.apache.log4j.PatternLayout
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %X{owner} %p %c{1}: %m%n
log4j.appender.file.MaxBackupIndex=5
# Set the default spark-shell log level to WARN. When running the spark-shell, the
# log level for this class is used to overwrite the root logger's log level, so that
Expand Down

0 comments on commit 9d3bd19

Please sign in to comment.