Skip to content

Commit

Permalink
Spark dependency resolution based on version (linkedin#25)
Browse files Browse the repository at this point in the history
  • Loading branch information
tglstory authored and akshayrai committed Apr 27, 2016
1 parent 3cb34de commit 3729db4
Showing 1 changed file with 16 additions and 7 deletions.
23 changes: 16 additions & 7 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,21 @@ object Dependencies {
sparkVersion = System.getProperties.getProperty(SPARK_VERSION)
}

val sparkExclusion = if (sparkVersion >= "1.5.0") {
"org.apache.spark" % "spark-core_2.10" % sparkVersion excludeAll(
ExclusionRule(organization = "com.typesafe.akka"),
ExclusionRule(organization = "org.apache.avro"),
ExclusionRule(organization = "org.apache.hadoop"),
ExclusionRule(organization = "net.razorvine")
)
} else {
"org.apache.spark" % "spark-core_2.10" % sparkVersion excludeAll(
ExclusionRule(organization = "org.apache.avro"),
ExclusionRule(organization = "org.apache.hadoop"),
ExclusionRule(organization = "net.razorvine")
)
}

// Dependency coordinates
var requiredDep = Seq(
"com.google.code.gson" % "gson" % gsonVersion,
Expand All @@ -52,16 +67,10 @@ object Dependencies {
"org.apache.hadoop" % "hadoop-common" % hadoopVersion % "compileonly",
"org.apache.hadoop" % "hadoop-common" % hadoopVersion % Test,
"org.apache.hadoop" % "hadoop-hdfs" % hadoopVersion % "compileonly",
// TODO: Cleanup Spark dependencies
"org.apache.spark" % "spark-core_2.10" % sparkVersion excludeAll(
ExclusionRule(organization = "org.apache.avro"),
ExclusionRule(organization = "org.apache.hadoop"),
ExclusionRule(organization = "net.razorvine")
),
"org.codehaus.jackson" % "jackson-mapper-asl" % jacksonMapperAslVersion,
"org.jsoup" % "jsoup" % jsoupVersion,
"org.mockito" % "mockito-core" % "1.10.19"
)
) :+ sparkExclusion

var dependencies = Seq(javaJdbc, javaEbean, cache)
dependencies ++= requiredDep
Expand Down

0 comments on commit 3729db4

Please sign in to comment.