diff --git a/app/com/linkedin/drelephant/spark/fetchers/SparkLogClient.scala b/app/com/linkedin/drelephant/spark/fetchers/SparkLogClient.scala index 90a2eb9a2..2bdefa15c 100644 --- a/app/com/linkedin/drelephant/spark/fetchers/SparkLogClient.scala +++ b/app/com/linkedin/drelephant/spark/fetchers/SparkLogClient.scala @@ -19,8 +19,7 @@ package com.linkedin.drelephant.spark.fetchers import java.io.InputStream import java.security.PrivilegedAction -import scala.async.Async -import scala.concurrent.{ExecutionContext, Future} +import scala.concurrent.{ExecutionContext, Future, blocking} import scala.io.Source import com.linkedin.drelephant.security.HadoopSecurity @@ -39,7 +38,6 @@ import org.json4s.jackson.JsonMethods */ class SparkLogClient(hadoopConfiguration: Configuration, sparkConf: SparkConf, eventLogUri: Option[String]) { import SparkLogClient._ - import Async.async private val logger: Logger = Logger.getLogger(classOf[SparkLogClient]) @@ -64,8 +62,9 @@ class SparkLogClient(hadoopConfiguration: Configuration, sparkConf: SparkConf, e val (eventLogPath, eventLogCodec) = sparkUtils.pathAndCodecforEventLog(sparkConf, eventLogFileSystem, baseEventLogPath, appId, attemptId) - async { - sparkUtils.withEventLog(eventLogFileSystem, eventLogPath, eventLogCodec)(findDerivedData(_)) + Future { blocking { + sparkUtils.withEventLog(eventLogFileSystem, eventLogPath, eventLogCodec)(findDerivedData(_)) + } } } } diff --git a/app/com/linkedin/drelephant/spark/fetchers/SparkRestClient.scala b/app/com/linkedin/drelephant/spark/fetchers/SparkRestClient.scala index 7a39454cd..04233868d 100644 --- a/app/com/linkedin/drelephant/spark/fetchers/SparkRestClient.scala +++ b/app/com/linkedin/drelephant/spark/fetchers/SparkRestClient.scala @@ -79,7 +79,6 @@ class SparkRestClient(sparkConf: SparkConf) { ): Future[SparkRestDerivedData] = { val (applicationInfo, attemptTarget) = getApplicationMetaData(appId) - // Limit the scope of async. Future { blocking { val futureJobDatas = Future { @@ -140,8 +139,6 @@ class SparkRestClient(sparkConf: SparkConf) { val applicationInfo = getApplicationInfo(appTarget) - // These are pure and cannot fail, therefore it is safe to have - // them outside of the async block. val lastAttemptId = applicationInfo.attempts.maxBy { _.startTime }.attemptId diff --git a/project/Dependencies.scala b/project/Dependencies.scala index acf786ff5..4afa41f9e 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -90,10 +90,9 @@ object Dependencies { "io.dropwizard.metrics" % "metrics-healthchecks" % "3.1.2", "org.mockito" % "mockito-core" % "1.10.19" exclude ("org.hamcrest", "hamcrest-core"), "org.jmockit" % "jmockit" % "1.23" % Test, - "org.scala-lang.modules" %% "scala-async" % "0.9.5", "org.apache.httpcomponents" % "httpclient" % "4.5.2", "org.apache.httpcomponents" % "httpcore" % "4.4.4", - "org.scalatest" %% "scalatest" % "3.0.0" % Test, + "org.scalatest" %% "scalatest" % "3.0.0" % Test, "com.h2database" % "h2" % "1.4.196" % Test ) :+ sparkExclusion