Skip to content

Commit

Permalink
LIVY-225. Fix two issues running integration test with Spark 2.0.
Browse files Browse the repository at this point in the history
1. Create a dummy jar to let Spark yarn#client correctly launch the application.
2. Since `SQLContext` is deprecated in Spark 2.0, so additional information will be printed out when creating. Changing the regex pattern.

Closes apache#213
  • Loading branch information
alex-the-man authored Oct 28, 2016
1 parent 5e8474e commit e7e929f
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package com.cloudera.livy.test.framework

import java.io._
import java.nio.file.{Files, Paths}
import javax.servlet.http.HttpServletResponse

import scala.concurrent.duration._
Expand Down Expand Up @@ -230,9 +231,14 @@ class MiniCluster(config: Map[String, String]) extends Cluster with MiniClusterU

// When running a real Spark cluster, don't set the classpath.
val extraCp = if (!isRealSpark()) {
val dummyJar = Files.createTempFile(Paths.get(tempDir.toURI), "dummy", "jar").toFile
Map(
SparkLauncher.DRIVER_EXTRA_CLASSPATH -> childClasspath,
SparkLauncher.EXECUTOR_EXTRA_CLASSPATH -> childClasspath)
SparkLauncher.EXECUTOR_EXTRA_CLASSPATH -> childClasspath,
// Used for Spark 2.0. Spark 2.0 will upload specified jars to distributed cache in yarn
// mode, if not specified it will check jars folder. Here since jars folder is not
// existed, so it will throw exception.
"spark.yarn.jars" -> dummyJar.getAbsolutePath)
} else {
Map()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ class InteractiveIT extends BaseIntegrationTestSuite {
s.run("""sc.getConf.get("spark.executor.instances")""").verifyResult("res1: String = 1")
s.run("sqlContext").verifyResult(startsWith("res2: org.apache.spark.sql.hive.HiveContext"))
s.run("val sql = new org.apache.spark.sql.SQLContext(sc)").verifyResult(
startsWith("sql: org.apache.spark.sql.SQLContext = org.apache.spark.sql.SQLContext"))

".*" + Pattern.quote(
"sql: org.apache.spark.sql.SQLContext = org.apache.spark.sql.SQLContext") + ".*")
s.run("abcde").verifyError(evalue = ".*?:[0-9]+: error: not found: value abcde.*")
s.run("throw new IllegalStateException()")
.verifyError(evalue = ".*java\\.lang\\.IllegalStateException.*")
Expand Down

0 comments on commit e7e929f

Please sign in to comment.