Skip to content

Commit

Permalink
Test against Spark 1.3.0 and 1.5.0-rc2 in Travis.
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshRosen committed Aug 28, 2015
1 parent 173f288 commit e776048
Show file tree
Hide file tree
Showing 5 changed files with 159 additions and 102 deletions.
37 changes: 29 additions & 8 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,13 +1,34 @@
language: scala
scala:
- 2.10.4
jdk:
- openjdk7
- openjdk6
sudo: false
cache:
directories:
- $HOME/.ivy2
matrix:
include:
# Spark 1.3.0
- jdk: openjdk6
scala: 2.10.4
env: TEST_SPARK_VERSION="1.3.0"
- jdk: openjdk6
scala: 2.11.6
env: TEST_SPARK_VERSION="1.3.0"
# Spark 1.4.0
- jdk: openjdk6
scala: 2.11.6
env: TEST_SPARK_VERSION="1.4.0"
- jdk: openjdk7
scala: 2.11.6
env: TEST_SPARK_VERSION="1.4.0"
# Spark 1.5.0
# TOOD: after 1.5.0 is released, update this to use the released version
- jdk: openjdk7
scala: 2.10.4
env: TEST_SPARK_VERSION="1.5.0-rc2"
- jdk: openjdk7
scala: 2.11.6
env: TEST_SPARK_VERSION="1.5.0-rc2"
script:
- sbt -jvm-opts travis/jvmopts.compile compile
- sbt -jvm-opts travis/jvmopts.test coverage test
- sbt -jvm-opts travis/jvmopts.test scalastyle
- sbt ++$TRAVIS_SCALA_VERSION -Dspark.testVersion=$TEST_SPARK_VERSION coverage test
- sbt ++$TRAVIS_SCALA_VERSION scalastyle
after_success:
- bash <(curl -s https://codecov.io/bash)
38 changes: 24 additions & 14 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -6,22 +6,40 @@ organization := "com.databricks"

scalaVersion := "2.11.6"

parallelExecution in Test := false
spName := "databricks/spark-csv"

crossScalaVersions := Seq("2.10.4", "2.11.6")

libraryDependencies += "org.apache.commons" % "commons-csv" % "1.1"
sparkVersion := "1.4.0"

libraryDependencies += "com.univocity" % "univocity-parsers" % "1.5.1"
val testSparkVersion = settingKey[String]("The version of Spark to test against.")

libraryDependencies += "org.slf4j" % "slf4j-api" % "1.7.5" % "provided"
testSparkVersion := sys.props.get("spark.testVersion").getOrElse(sparkVersion.value)

resolvers ++= Seq(
"Apache Staging" at "https://repository.apache.org/content/repositories/staging/",
"Typesafe" at "http://repo.typesafe.com/typesafe/releases",
"Local Maven Repository" at "file://"+Path.userHome.absolutePath+"/.m2/repository"
)

// TODO: remove once Spark 1.5.0 is released.
resolvers += "Spark 1.5.0 RC2 Staging" at "https://repository.apache.org/content/repositories/orgapachespark-1141"

sparkComponents := Seq("core", "sql")

libraryDependencies ++= Seq(
"org.apache.commons" % "commons-csv" % "1.1",
"com.univocity" % "univocity-parsers" % "1.5.1",
"org.slf4j" % "slf4j-api" % "1.7.5" % "provided",
"org.scalatest" %% "scalatest" % "2.2.1" % "test",
"com.novocode" % "junit-interface" % "0.9" % "test"
)

libraryDependencies ++= Seq(
"org.apache.spark" %% "spark-core" % testSparkVersion.value % "test" force(),
"org.apache.spark" %% "spark-sql" % testSparkVersion.value % "test" force()
)

publishMavenStyle := true

spAppendScalaVersion := true
Expand Down Expand Up @@ -57,17 +75,9 @@ pomExtra := (
</developer>
</developers>)

spName := "databricks/spark-csv"

sparkVersion := "1.4.0"

sparkComponents += "sql"

libraryDependencies += "org.scalatest" %% "scalatest" % "2.2.1" % "test"

libraryDependencies += "com.novocode" % "junit-interface" % "0.9" % "test"
parallelExecution in Test := false

ScoverageSbtPlugin.ScoverageKeys.coverageHighlighting := {
if (scalaBinaryVersion.value == "2.10") false
else false
else true
}
7 changes: 3 additions & 4 deletions src/test/java/com/databricks/spark/csv/JavaCsvSuite.java
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,14 @@

import java.io.File;
import java.util.HashMap;
import java.util.Random;

import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;

import org.apache.spark.SparkContext;
import org.apache.spark.sql.*;
import org.apache.spark.sql.test.TestSQLContext$;

public class JavaCsvSuite {
private transient SQLContext sqlContext;
Expand All @@ -22,12 +21,12 @@ public class JavaCsvSuite {

@Before
public void setUp() {
// Trigger static initializer of TestData
sqlContext = TestSQLContext$.MODULE$;
sqlContext = new SQLContext(new SparkContext("local[2]", "JavaCsvSuite"));
}

@After
public void tearDown() {
sqlContext.sparkContext().stop();
sqlContext = null;
}

Expand Down
Loading

0 comments on commit e776048

Please sign in to comment.