Skip to content

Commit

Permalink
fix spark 2.0.0 Logging package not Compatibility
Browse files Browse the repository at this point in the history
  • Loading branch information
allwefantasy committed Sep 16, 2016
1 parent 660d50f commit 26b9550
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 11 deletions.
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package org.apache.spark.util

import com.google.common.cache.{CacheBuilder, CacheLoader}
import org.apache.spark.Logging

import streaming.common.CodeTemplates

import scala.collection.mutable.ArrayBuffer
Expand All @@ -18,7 +18,7 @@ trait StreamingProGenerateClass {
def execute(rawLine: String): Map[String, Any]
}

object ScalaSourceCodeCompiler extends Logging {
object ScalaSourceCodeCompiler {

private val scriptCache = CacheBuilder.newBuilder()
.maximumSize(100)
Expand All @@ -40,7 +40,7 @@ object ScalaSourceCodeCompiler extends Logging {
val result = compileCode(wrapper)
val endTime = System.nanoTime()
def timeMs: Double = (endTime - startTime).toDouble / 1000000
logInfo(s"Code generated in $timeMs ms")

result.asInstanceOf[StreamingProGenerateClass]
}
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import java.util.{List => JList, Map => JMap}
import net.csdn.ServiceFramwork
import net.csdn.bootstrap.Application
import net.csdn.common.logging.Loggers
import org.apache.spark.Logging
import serviceframework.dispatcher.StrategyDispatcher
import streaming.common.zk.{ZKClient, ZkRegister}
import streaming.common.{ParamsUtil, SQLContextHolder, SparkCompatibility}
Expand All @@ -21,7 +20,7 @@ import scala.collection.mutable.ArrayBuffer
*/


class PlatformManager extends Logging{
class PlatformManager {
self =>
val config = new AtomicReference[ParamsUtil]()

Expand Down Expand Up @@ -134,7 +133,7 @@ class PlatformManager extends Logging{
PlatformManager.setLastInstantiatedContext(self)
}

object PlatformManager extends Logging{
object PlatformManager {
private val INSTANTIATION_LOCK = new Object()

/**
Expand Down Expand Up @@ -197,7 +196,6 @@ object PlatformManager extends Logging{
case _ => SparkStreamingRuntime.getOrCreate(tempParams)
}
if (SQLContextHolder.sqlContextHolder == null) {
logInfo("....create sqlContextHolder.....")
SQLContextHolder.setActive(createSQLContextHolder(tempParams, runtime))
tempParams.put("_sqlContextHolder_", SQLContextHolder.getOrCreate())
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package org.apache.spark.streaming

import org.apache.spark.Logging
import org.apache.spark.util.ManualClock
import org.scalatest._
import serviceframework.dispatcher.StrategyDispatcher
Expand All @@ -10,7 +9,7 @@ import streaming.core.strategy.platform.{PlatformManager, SparkRuntime, SparkStr
/**
* 8/29/16 WilliamZhu([email protected])
*/
trait BasicStreamingOperation extends FlatSpec with Logging with Matchers {
trait BasicStreamingOperation extends FlatSpec with Matchers {

def manualClock(streamingContext: StreamingContext) = {
streamingContext.scheduler.clock.asInstanceOf[ManualClock]
Expand All @@ -28,7 +27,7 @@ trait BasicStreamingOperation extends FlatSpec with Logging with Matchers {
runtime.destroyRuntime(false, true)
} catch {
case e: Exception =>
logError("Error stopping StreamingContext", e)
e.printStackTrace()
}
}
}
Expand All @@ -45,7 +44,7 @@ trait BasicStreamingOperation extends FlatSpec with Logging with Matchers {
runtime.destroyRuntime(false, true)
} catch {
case e: Exception =>
logError("Error stopping StreamingContext", e)
e.printStackTrace()
}
}
}
Expand Down

0 comments on commit 26b9550

Please sign in to comment.