Skip to content

Commit

Permalink
[SQL] remove unnecessary import in spark-sql
Browse files Browse the repository at this point in the history
Author: Jacky Li <[email protected]>

Closes apache#3630 from jackylk/remove and squashes the following commits:

150e7e0 [Jacky Li] remove unnecessary import
  • Loading branch information
jackylk authored and JoshRosen committed Dec 9, 2014
1 parent cda94d1 commit 9443843
Show file tree
Hide file tree
Showing 5 changed files with 3 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.sql

import java.util.{Map => JMap, List => JList}
import java.io.StringWriter


import scala.collection.JavaConversions._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,11 @@ package org.apache.spark.sql.execution

import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataType, StructType, Row, SQLContext}
import org.apache.spark.sql.{StructType, Row, SQLContext}
import org.apache.spark.sql.catalyst.ScalaReflection
import org.apache.spark.sql.catalyst.ScalaReflection.Schema
import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
import org.apache.spark.sql.catalyst.expressions.{Attribute, GenericMutableRow}
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Statistics}
import org.apache.spark.sql.catalyst.types.UserDefinedType

/**
* :: DeveloperApi ::
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.spark.sql.execution

import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.trees._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.physical._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,11 @@ import org.apache.spark.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.{ScalaReflection, trees}
import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.plans.physical._


object SparkPlan {
protected[sql] val currentContext = new ThreadLocal[SQLContext]()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import scala.collection.JavaConverters._

import org.apache.spark.sql._
import org.apache.spark.sql.api.java.{DataType => JDataType, StructField => JStructField,
MetadataBuilder => JMetaDataBuilder, UDTWrappers, JavaToScalaUDTWrapper}
MetadataBuilder => JMetaDataBuilder, UDTWrappers}
import org.apache.spark.sql.api.java.{DecimalType => JDecimalType}
import org.apache.spark.sql.catalyst.types.decimal.Decimal
import org.apache.spark.sql.catalyst.ScalaReflection
Expand Down

0 comments on commit 9443843

Please sign in to comment.