Skip to content

Commit

Permalink
[SPARK-3873][TESTS] Import ordering fixes.
Browse files Browse the repository at this point in the history
Author: Marcelo Vanzin <[email protected]>

Closes apache#10582 from vanzin/SPARK-3873-tests.
  • Loading branch information
Marcelo Vanzin committed Jan 6, 2016
1 parent 7a375bb commit b3ba1be
Show file tree
Hide file tree
Showing 281 changed files with 517 additions and 575 deletions.
10 changes: 3 additions & 7 deletions core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,14 @@ import scala.language.existentials
import scala.util.Random

import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.PatienceConfiguration
import org.scalatest.concurrent.Eventually._
import org.scalatest.concurrent.PatienceConfiguration
import org.scalatest.time.SpanSugar._

import org.apache.spark.rdd.{ReliableRDDCheckpointData, RDD}
import org.apache.spark.storage._
import org.apache.spark.rdd.{RDD, ReliableRDDCheckpointData}
import org.apache.spark.shuffle.hash.HashShuffleManager
import org.apache.spark.shuffle.sort.SortShuffleManager
import org.apache.spark.storage.BroadcastBlockId
import org.apache.spark.storage.RDDBlockId
import org.apache.spark.storage.ShuffleBlockId
import org.apache.spark.storage.ShuffleIndexBlockId
import org.apache.spark.storage._

/**
* An abstract base class for context cleaner tests, which sets up a context with a config
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ package org.apache.spark
import scala.collection.mutable

import org.scalatest.{BeforeAndAfter, PrivateMethodTester}

import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.ExecutorInfo
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/FailureSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@

package org.apache.spark

import org.apache.spark.util.NonSerializable

import java.io.{IOException, NotSerializableException, ObjectInputStream}

import org.apache.spark.util.NonSerializable

// Common state shared by FailureSuite-launched tasks. We use a global object
// for this because any local variables used in the task closures will rightfully
// be copied for each task, so there's no other way for them to share state.
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/FileServerSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@ import org.apache.commons.lang3.RandomUtils

import org.apache.spark.util.Utils

import SSLSampleConfigs._

class FileServerSuite extends SparkFunSuite with LocalSparkContext {

import SSLSampleConfigs._

@transient var tmpDir: File = _
@transient var tmpFile: File = _
@transient var tmpJarUrl: String = _
Expand Down
9 changes: 4 additions & 5 deletions core/src/test/scala/org/apache/spark/FileSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,17 +21,16 @@ import java.io.{File, FileWriter}

import scala.io.Source

import org.apache.spark.input.PortableDataStream
import org.apache.spark.storage.StorageLevel

import org.apache.hadoop.io._
import org.apache.hadoop.io.compress.DefaultCodec
import org.apache.hadoop.mapred.{JobConf, FileAlreadyExistsException, FileSplit, TextInputFormat, TextOutputFormat}
import org.apache.hadoop.mapred.{FileAlreadyExistsException, FileSplit, JobConf, TextInputFormat, TextOutputFormat}
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.input.{FileSplit => NewFileSplit, TextInputFormat => NewTextInputFormat}
import org.apache.hadoop.mapreduce.lib.output.{TextOutputFormat => NewTextOutputFormat}

import org.apache.spark.rdd.{NewHadoopRDD, HadoopRDD}
import org.apache.spark.input.PortableDataStream
import org.apache.spark.rdd.{HadoopRDD, NewHadoopRDD}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.Utils

class FileSuite extends SparkFunSuite with LocalSparkContext {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,13 @@ import scala.concurrent.Await
import scala.concurrent.duration._
import scala.language.postfixOps

import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
import org.mockito.Mockito.{mock, spy, verify, when}
import org.mockito.Matchers
import org.mockito.Matchers._
import org.mockito.Mockito.{mock, spy, verify, when}
import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}

import org.apache.spark.executor.TaskMetrics
import org.apache.spark.rpc.{RpcCallContext, RpcEndpoint, RpcEnv, RpcEndpointRef}
import org.apache.spark.rpc.{RpcCallContext, RpcEndpoint, RpcEndpointRef, RpcEnv}
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
import org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

package org.apache.spark

import _root_.io.netty.util.internal.logging.{Slf4JLoggerFactory, InternalLoggerFactory}
import _root_.io.netty.util.internal.logging.{InternalLoggerFactory, Slf4JLoggerFactory}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.BeforeAndAfterEach
import org.scalatest.Suite
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,10 @@ package org.apache.spark

import scala.collection.mutable.ArrayBuffer

import org.mockito.Mockito._
import org.mockito.Matchers.{any, isA}
import org.mockito.Mockito._

import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef, RpcCallContext, RpcEnv}
import org.apache.spark.rpc.{RpcAddress, RpcCallContext, RpcEndpointRef, RpcEnv}
import org.apache.spark.scheduler.{CompressedMapStatus, MapStatus}
import org.apache.spark.shuffle.FetchFailedException
import org.apache.spark.storage.{BlockManagerId, ShuffleBlockId}
Expand Down
3 changes: 2 additions & 1 deletion core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,10 @@ import java.io.File
import javax.net.ssl.SSLContext

import com.google.common.io.Files
import org.apache.spark.util.Utils
import org.scalatest.BeforeAndAfterAll

import org.apache.spark.util.Utils

class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {

test("test resolving property file as spark conf ") {
Expand Down
6 changes: 3 additions & 3 deletions core/src/test/scala/org/apache/spark/ShuffleSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,17 @@

package org.apache.spark

import java.util.concurrent.{Callable, Executors, ExecutorService, CyclicBarrier}
import java.util.concurrent.{Callable, CyclicBarrier, Executors, ExecutorService}

import org.scalatest.Matchers

import org.apache.spark.ShuffleSuite.NonJavaSerializableClass
import org.apache.spark.memory.TaskMemoryManager
import org.apache.spark.rdd.{CoGroupedRDD, OrderedRDDFunctions, RDD, ShuffledRDD, SubtractedRDD}
import org.apache.spark.scheduler.{MyRDD, MapStatus, SparkListener, SparkListenerTaskEnd}
import org.apache.spark.scheduler.{MapStatus, MyRDD, SparkListener, SparkListenerTaskEnd}
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.shuffle.ShuffleWriter
import org.apache.spark.storage.{ShuffleDataBlockId, ShuffleBlockId}
import org.apache.spark.storage.{ShuffleBlockId, ShuffleDataBlockId}
import org.apache.spark.util.MutablePair

abstract class ShuffleSuite extends SparkFunSuite with Matchers with LocalSparkContext {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ import org.apache.commons.io.filefilter.TrueFileFilter
import org.scalatest.BeforeAndAfterAll

import org.apache.spark.rdd.ShuffledRDD
import org.apache.spark.shuffle.sort.SortShuffleManager
import org.apache.spark.serializer.{JavaSerializer, KryoSerializer}
import org.apache.spark.shuffle.sort.SortShuffleManager
import org.apache.spark.util.Utils

class SortShuffleSuite extends ShuffleSuite with BeforeAndAfterAll {
Expand Down
9 changes: 5 additions & 4 deletions core/src/test/scala/org/apache/spark/SparkConfSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,18 @@

package org.apache.spark

import java.util.concurrent.{TimeUnit, Executors}
import java.util.concurrent.{Executors, TimeUnit}

import scala.collection.JavaConverters._
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.util.{Try, Random}
import scala.util.{Random, Try}

import com.esotericsoftware.kryo.Kryo

import org.apache.spark.network.util.ByteUnit
import org.apache.spark.serializer.{KryoRegistrator, KryoSerializer}
import org.apache.spark.util.{RpcUtils, ResetSystemProperties}
import com.esotericsoftware.kryo.Kryo
import org.apache.spark.util.{ResetSystemProperties, RpcUtils}

class SparkConfSuite extends SparkFunSuite with LocalSparkContext with ResetSystemProperties {
test("Test byteString conversion") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.spark

import org.scalatest.Assertions

import org.apache.spark.storage.StorageLevel

class SparkContextInfoSuite extends SparkFunSuite with LocalSparkContext {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@ package org.apache.spark

import org.scalatest.PrivateMethodTester

import org.apache.spark.util.Utils
import org.apache.spark.scheduler.{SchedulerBackend, TaskScheduler, TaskSchedulerImpl}
import org.apache.spark.scheduler.cluster.{SimrSchedulerBackend, SparkDeploySchedulerBackend}
import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
import org.apache.spark.scheduler.local.LocalBackend
import org.apache.spark.util.Utils

class SparkContextSchedulerCreationSuite
extends SparkFunSuite with LocalSparkContext with PrivateMethodTester with Logging {
Expand Down
10 changes: 5 additions & 5 deletions core/src/test/scala/org/apache/spark/SparkContextSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,18 @@ package org.apache.spark
import java.io.File
import java.util.concurrent.TimeUnit

import scala.concurrent.Await
import scala.concurrent.duration.Duration

import com.google.common.base.Charsets._
import com.google.common.io.Files

import org.apache.hadoop.io.{BytesWritable, LongWritable, Text}
import org.apache.hadoop.mapred.TextInputFormat
import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
import org.apache.spark.util.Utils

import scala.concurrent.Await
import scala.concurrent.duration.Duration
import org.scalatest.Matchers._

import org.apache.spark.util.Utils

class SparkContextSuite extends SparkFunSuite with LocalSparkContext {

test("Only one SparkContext may be active at a time") {
Expand Down
5 changes: 2 additions & 3 deletions core/src/test/scala/org/apache/spark/ThreadingSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,8 @@

package org.apache.spark

import java.util.concurrent.{TimeUnit, Semaphore}
import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.{Semaphore, TimeUnit}
import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger}

import org.apache.spark.scheduler._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@

package org.apache.spark.api.python

import scala.io.Source
import java.io.{File, PrintWriter}

import java.io.{PrintWriter, File}
import scala.io.Source

import org.scalatest.Matchers

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@ package org.apache.spark.deploy
import java.io.File
import java.util.Date

import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner}
import org.apache.spark.{SecurityManager, SparkConf}

private[deploy] object DeployTestUtils {
def createAppDesc(): ApplicationDescription = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,8 @@ import java.util.jar.Manifest

import scala.collection.mutable.ArrayBuffer

import com.google.common.io.{Files, ByteStreams}

import com.google.common.io.{ByteStreams, Files}
import org.apache.commons.io.FileUtils

import org.apache.ivy.core.settings.IvySettings

import org.apache.spark.TestUtils.{createCompiledClass, JavaSourceFromString}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ import com.fasterxml.jackson.core.JsonParseException
import org.json4s._
import org.json4s.jackson.JsonMethods

import org.apache.spark.{JsonTestUtils, SparkFunSuite}
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse}
import org.apache.spark.deploy.master.{ApplicationInfo, RecoveryState}
import org.apache.spark.deploy.worker.ExecutorRunner
import org.apache.spark.{JsonTestUtils, SparkFunSuite}

class JsonProtocolSuite extends SparkFunSuite with JsonTestUtils {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,9 @@ import java.net.URL
import scala.collection.mutable
import scala.io.Source

import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.scheduler.{SparkListenerExecutorAdded, SparkListener}
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.scheduler.{SparkListener, SparkListenerExecutorAdded}
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.util.SparkConfWithEnv

class LogUrlsStandaloneSuite extends SparkFunSuite with LocalSparkContext {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@

package org.apache.spark.deploy

import java.io.{PrintStream, OutputStream, File}
import java.io.{File, OutputStream, PrintStream}
import java.net.URI
import java.util.jar.Attributes.Name
import java.util.jar.{JarFile, Manifest}
import java.util.jar.Attributes.Name
import java.util.zip.ZipFile

import scala.collection.JavaConverters._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,14 @@

package org.apache.spark.deploy

import java.io.{File, PrintStream, OutputStream}
import java.io.{File, OutputStream, PrintStream}

import scala.collection.mutable.ArrayBuffer
import org.scalatest.BeforeAndAfterAll

import org.apache.ivy.core.module.descriptor.MDArtifact
import org.apache.ivy.core.settings.IvySettings
import org.apache.ivy.plugins.resolver.{AbstractResolver, FileSystemResolver, IBiblioResolver}
import org.scalatest.BeforeAndAfterAll

import org.apache.spark.SparkFunSuite
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ import java.net.URI
import java.util.concurrent.TimeUnit
import java.util.zip.{ZipInputStream, ZipOutputStream}

import scala.io.Source
import scala.concurrent.duration._
import scala.io.Source
import scala.language.postfixOps

import com.google.common.base.Charsets
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.curator.test.TestingServer

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.rpc.{RpcEndpoint, RpcEnv}
import org.apache.spark.serializer.{Serializer, JavaSerializer}
import org.apache.spark.serializer.{JavaSerializer, Serializer}
import org.apache.spark.util.Utils

class PersistenceEngineSuite extends SparkFunSuite {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@ import scala.io.Source
import scala.language.postfixOps

import org.json4s.jackson.JsonMethods._
import org.json4s.JsonAST.{JNothing, JString, JInt}
import org.json4s.JsonAST.{JInt, JNothing, JString}
import org.mockito.Mockito.{mock, when}
import org.scalatest.BeforeAndAfter

import org.apache.spark.{SparkConf, SecurityManager, SparkFunSuite}
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.DeployMessages.MasterStateResponse
import org.apache.spark.deploy.DeployTestUtils._
import org.apache.spark.deploy.master._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,16 +24,16 @@ import javax.servlet.http.HttpServletResponse
import scala.collection.mutable

import com.google.common.base.Charsets
import org.scalatest.BeforeAndAfterEach
import org.json4s.JsonAST._
import org.json4s.jackson.JsonMethods._
import org.scalatest.BeforeAndAfterEach

import org.apache.spark._
import org.apache.spark.rpc._
import org.apache.spark.util.Utils
import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.{SparkSubmit, SparkSubmitArguments}
import org.apache.spark.deploy.DeployMessages._
import org.apache.spark.deploy.master.DriverState._
import org.apache.spark.rpc._
import org.apache.spark.util.Utils

/**
* Tests for the REST application submission protocol used in standalone cluster mode.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,11 @@

package org.apache.spark.deploy.worker

import org.scalatest.{Matchers, PrivateMethodTester}

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.Command
import org.apache.spark.util.Utils
import org.scalatest.{Matchers, PrivateMethodTester}

class CommandUtilsSuite extends SparkFunSuite with Matchers with PrivateMethodTester {

Expand Down
Loading

0 comments on commit b3ba1be

Please sign in to comment.