Skip to content

Commit

Permalink
[SPARK-12641] Remove unused code related to Hadoop 0.23
Browse files Browse the repository at this point in the history
Currently we don't support Hadoop 0.23 but there is a few code related to it so let's clean it up.

Author: Kousuke Saruta <[email protected]>

Closes apache#10590 from sarutak/SPARK-12641.
  • Loading branch information
sarutak authored and rxin committed Jan 5, 2016
1 parent 53beddc commit 8eb2dc7
Showing 1 changed file with 3 additions and 10 deletions.
13 changes: 3 additions & 10 deletions core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -662,9 +662,7 @@ private[spark] object Utils extends Logging {

private[spark] def isRunningInYarnContainer(conf: SparkConf): Boolean = {
// These environment variables are set by YARN.
// For Hadoop 0.23.X, we check for YARN_LOCAL_DIRS (we use this below in getYarnLocalDirs())
// For Hadoop 2.X, we check for CONTAINER_ID.
conf.getenv("CONTAINER_ID") != null || conf.getenv("YARN_LOCAL_DIRS") != null
conf.getenv("CONTAINER_ID") != null
}

/**
Expand Down Expand Up @@ -740,17 +738,12 @@ private[spark] object Utils extends Logging {
logError(s"Failed to create local root dir in $root. Ignoring this directory.")
None
}
}.toArray
}
}

/** Get the Yarn approved local directories. */
private def getYarnLocalDirs(conf: SparkConf): String = {
// Hadoop 0.23 and 2.x have different Environment variable names for the
// local dirs, so lets check both. We assume one of the 2 is set.
// LOCAL_DIRS => 2.X, YARN_LOCAL_DIRS => 0.23.X
val localDirs = Option(conf.getenv("YARN_LOCAL_DIRS"))
.getOrElse(Option(conf.getenv("LOCAL_DIRS"))
.getOrElse(""))
val localDirs = Option(conf.getenv("LOCAL_DIRS")).getOrElse("")

if (localDirs.isEmpty) {
throw new Exception("Yarn Local dirs can't be empty")
Expand Down

0 comments on commit 8eb2dc7

Please sign in to comment.