Skip to content

Commit

Permalink
[SPARK-15424][SPARK-15437][SPARK-14807][SQL] Revert Create a hivecont…
Browse files Browse the repository at this point in the history
…ext-compatibility module

## What changes were proposed in this pull request?
I initially asked to create a hivecontext-compatibility module to put the HiveContext there. But we are so close to Spark 2.0 release and there is only a single class in it. It seems overkill to have an entire package, which makes it more inconvenient, for a single class.

## How was this patch tested?
Tests were moved.

Author: Reynold Xin <[email protected]>

Closes apache#13207 from rxin/SPARK-15424.
  • Loading branch information
rxin committed May 21, 2016
1 parent 021c197 commit 45b7557
Show file tree
Hide file tree
Showing 7 changed files with 6 additions and 77 deletions.
2 changes: 1 addition & 1 deletion dev/run-tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def determine_modules_to_test(changed_modules):
['graphx', 'examples']
>>> x = [x.name for x in determine_modules_to_test([modules.sql])]
>>> x # doctest: +NORMALIZE_WHITESPACE
['sql', 'hive', 'mllib', 'examples', 'hive-thriftserver', 'hivecontext-compatibility',
['sql', 'hive', 'mllib', 'examples', 'hive-thriftserver',
'pyspark-sql', 'sparkr', 'pyspark-mllib', 'pyspark-ml']
"""
modules_to_test = set()
Expand Down
12 changes: 0 additions & 12 deletions dev/sparktestsupport/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,18 +158,6 @@ def __hash__(self):
)


hivecontext_compatibility = Module(
name="hivecontext-compatibility",
dependencies=[hive],
source_file_regexes=[
"sql/hivecontext-compatibility/",
],
sbt_test_goals=[
"hivecontext-compatibility/test"
]
)


sketch = Module(
name="sketch",
dependencies=[tags],
Expand Down
1 change: 0 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,6 @@
<module>sql/catalyst</module>
<module>sql/core</module>
<module>sql/hive</module>
<module>sql/hivecontext-compatibility</module>
<module>assembly</module>
<module>external/flume</module>
<module>external/flume-sink</module>
Expand Down
6 changes: 3 additions & 3 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ object BuildCommons {

private val buildLocation = file(".").getAbsoluteFile.getParentFile

val sqlProjects@Seq(catalyst, sql, hive, hiveThriftServer, hiveCompatibility) = Seq(
"catalyst", "sql", "hive", "hive-thriftserver", "hivecontext-compatibility"
val sqlProjects@Seq(catalyst, sql, hive, hiveThriftServer) = Seq(
"catalyst", "sql", "hive", "hive-thriftserver"
).map(ProjectRef(buildLocation, _))

val streamingProjects@Seq(
Expand Down Expand Up @@ -339,7 +339,7 @@ object SparkBuild extends PomBuild {

val mimaProjects = allProjects.filterNot { x =>
Seq(
spark, hive, hiveThriftServer, hiveCompatibility, catalyst, repl, networkCommon, networkShuffle, networkYarn,
spark, hive, hiveThriftServer, catalyst, repl, networkCommon, networkShuffle, networkYarn,
unsafe, tags, sketch, mllibLocal
).contains(x)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.hive

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.{SparkContext, SparkFunSuite}
import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}


class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEach {
Expand All @@ -29,7 +29,7 @@ class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEac

override def beforeAll(): Unit = {
super.beforeAll()
sc = new SparkContext("local[4]", "test")
sc = SparkContext.getOrCreate(new SparkConf().setMaster("local").setAppName("test"))
HiveUtils.newTemporaryConfiguration(useInMemoryDerby = true).foreach { case (k, v) =>
sc.hadoopConfiguration.set(k, v)
}
Expand All @@ -47,7 +47,6 @@ class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEac

override def afterAll(): Unit = {
try {
sc.stop()
sc = null
hc = null
} finally {
Expand Down
57 changes: 0 additions & 57 deletions sql/hivecontext-compatibility/pom.xml

This file was deleted.

0 comments on commit 45b7557

Please sign in to comment.