Skip to content

Commit

Permalink
[SPARK-4250] [SQL] Fix bug of constant null value mapping to Constant…
Browse files Browse the repository at this point in the history
…ObjectInspector

Author: Cheng Hao <[email protected]>

Closes apache#3114 from chenghao-intel/constant_null_oi and squashes the following commits:

e603bda [Cheng Hao] fix the bug of null value for primitive types
50a13ba [Cheng Hao] fix the timezone issue
f54f369 [Cheng Hao] fix bug of constant null value for ObjectInspector
  • Loading branch information
chenghao-intel authored and marmbrus committed Nov 11, 2014
1 parent d793d80 commit fa77783
Show file tree
Hide file tree
Showing 11 changed files with 199 additions and 86 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ private[hive] trait HiveInspectors {
* @return convert the data into catalyst type
*/
def unwrap(data: Any, oi: ObjectInspector): Any = oi match {
case _ if data == null => null
case hvoi: HiveVarcharObjectInspector =>
if (data == null) null else hvoi.getPrimitiveJavaObject(data).getValue
case hdoi: HiveDecimalObjectInspector =>
Expand Down Expand Up @@ -250,46 +251,53 @@ private[hive] trait HiveInspectors {
}

def toInspector(expr: Expression): ObjectInspector = expr match {
case Literal(value: String, StringType) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value)
case Literal(value: Int, IntegerType) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value)
case Literal(value: Double, DoubleType) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value)
case Literal(value: Boolean, BooleanType) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value)
case Literal(value: Long, LongType) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value)
case Literal(value: Float, FloatType) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value)
case Literal(value: Short, ShortType) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value)
case Literal(value: Byte, ByteType) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value)
case Literal(value: Array[Byte], BinaryType) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value)
case Literal(value: java.sql.Date, DateType) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value)
case Literal(value: java.sql.Timestamp, TimestampType) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value)
case Literal(value: BigDecimal, DecimalType()) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value)
case Literal(value: Decimal, DecimalType()) =>
HiveShim.getPrimitiveWritableConstantObjectInspector(value.toBigDecimal)
case Literal(value, StringType) =>
HiveShim.getStringWritableConstantObjectInspector(value)
case Literal(value, IntegerType) =>
HiveShim.getIntWritableConstantObjectInspector(value)
case Literal(value, DoubleType) =>
HiveShim.getDoubleWritableConstantObjectInspector(value)
case Literal(value, BooleanType) =>
HiveShim.getBooleanWritableConstantObjectInspector(value)
case Literal(value, LongType) =>
HiveShim.getLongWritableConstantObjectInspector(value)
case Literal(value, FloatType) =>
HiveShim.getFloatWritableConstantObjectInspector(value)
case Literal(value, ShortType) =>
HiveShim.getShortWritableConstantObjectInspector(value)
case Literal(value, ByteType) =>
HiveShim.getByteWritableConstantObjectInspector(value)
case Literal(value, BinaryType) =>
HiveShim.getBinaryWritableConstantObjectInspector(value)
case Literal(value, DateType) =>
HiveShim.getDateWritableConstantObjectInspector(value)
case Literal(value, TimestampType) =>
HiveShim.getTimestampWritableConstantObjectInspector(value)
case Literal(value, DecimalType()) =>
HiveShim.getDecimalWritableConstantObjectInspector(value)
case Literal(_, NullType) =>
HiveShim.getPrimitiveNullWritableConstantObjectInspector
case Literal(value: Seq[_], ArrayType(dt, _)) =>
case Literal(value, ArrayType(dt, _)) =>
val listObjectInspector = toInspector(dt)
val list = new java.util.ArrayList[Object]()
value.foreach(v => list.add(wrap(v, listObjectInspector)))
ObjectInspectorFactory.getStandardConstantListObjectInspector(listObjectInspector, list)
case Literal(map: Map[_, _], MapType(keyType, valueType, _)) =>
val value = new java.util.HashMap[Object, Object]()
if (value == null) {
ObjectInspectorFactory.getStandardConstantListObjectInspector(listObjectInspector, null)
} else {
val list = new java.util.ArrayList[Object]()
value.asInstanceOf[Seq[_]].foreach(v => list.add(wrap(v, listObjectInspector)))
ObjectInspectorFactory.getStandardConstantListObjectInspector(listObjectInspector, list)
}
case Literal(value, MapType(keyType, valueType, _)) =>
val keyOI = toInspector(keyType)
val valueOI = toInspector(valueType)
map.foreach (entry => value.put(wrap(entry._1, keyOI), wrap(entry._2, valueOI)))
ObjectInspectorFactory.getStandardConstantMapObjectInspector(keyOI, valueOI, value)
case Literal(_, dt) => sys.error(s"Hive doesn't support the constant type [$dt].")
if (value == null) {
ObjectInspectorFactory.getStandardConstantMapObjectInspector(keyOI, valueOI, null)
} else {
val map = new java.util.HashMap[Object, Object]()
value.asInstanceOf[Map[_, _]].foreach (entry => {
map.put(wrap(entry._1, keyOI), wrap(entry._2, valueOI))
})
ObjectInspectorFactory.getStandardConstantMapObjectInspector(keyOI, valueOI, map)
}
case _ => toInspector(expr.dataType)
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
1 NULL 1 NULL 1.0 NULL true NULL 1 NULL 1.0 NULL 1 NULL 1 NULL 1 NULL 1970-01-01 NULL 1969-12-31 16:00:00.001 NULL 1 NULL
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
There is no documentation for function 'if'
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
There is no documentation for function 'if'
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
1 1 1 1 NULL 2
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
128 1.1 ABC 12.3
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@
package org.apache.spark.sql.hive.execution

import java.io.File
import java.util.{Locale, TimeZone}

import org.scalatest.BeforeAndAfter

import scala.util.Try

Expand All @@ -28,14 +31,59 @@ import org.apache.spark.sql.catalyst.plans.logical.Project
import org.apache.spark.sql.hive._
import org.apache.spark.sql.hive.test.TestHive
import org.apache.spark.sql.hive.test.TestHive._
import org.apache.spark.sql.{Row, SchemaRDD}
import org.apache.spark.sql.{SQLConf, Row, SchemaRDD}

case class TestData(a: Int, b: String)

/**
* A set of test cases expressed in Hive QL that are not covered by the tests included in the hive distribution.
*/
class HiveQuerySuite extends HiveComparisonTest {
class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter {
private val originalTimeZone = TimeZone.getDefault
private val originalLocale = Locale.getDefault

override def beforeAll() {
TestHive.cacheTables = true
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
// Add Locale setting
Locale.setDefault(Locale.US)
}

override def afterAll() {
TestHive.cacheTables = false
TimeZone.setDefault(originalTimeZone)
Locale.setDefault(originalLocale)
}

createQueryTest("constant null testing",
"""SELECT
|IF(FALSE, CAST(NULL AS STRING), CAST(1 AS STRING)) AS COL1,
|IF(TRUE, CAST(NULL AS STRING), CAST(1 AS STRING)) AS COL2,
|IF(FALSE, CAST(NULL AS INT), CAST(1 AS INT)) AS COL3,
|IF(TRUE, CAST(NULL AS INT), CAST(1 AS INT)) AS COL4,
|IF(FALSE, CAST(NULL AS DOUBLE), CAST(1 AS DOUBLE)) AS COL5,
|IF(TRUE, CAST(NULL AS DOUBLE), CAST(1 AS DOUBLE)) AS COL6,
|IF(FALSE, CAST(NULL AS BOOLEAN), CAST(1 AS BOOLEAN)) AS COL7,
|IF(TRUE, CAST(NULL AS BOOLEAN), CAST(1 AS BOOLEAN)) AS COL8,
|IF(FALSE, CAST(NULL AS BIGINT), CAST(1 AS BIGINT)) AS COL9,
|IF(TRUE, CAST(NULL AS BIGINT), CAST(1 AS BIGINT)) AS COL10,
|IF(FALSE, CAST(NULL AS FLOAT), CAST(1 AS FLOAT)) AS COL11,
|IF(TRUE, CAST(NULL AS FLOAT), CAST(1 AS FLOAT)) AS COL12,
|IF(FALSE, CAST(NULL AS SMALLINT), CAST(1 AS SMALLINT)) AS COL13,
|IF(TRUE, CAST(NULL AS SMALLINT), CAST(1 AS SMALLINT)) AS COL14,
|IF(FALSE, CAST(NULL AS TINYINT), CAST(1 AS TINYINT)) AS COL15,
|IF(TRUE, CAST(NULL AS TINYINT), CAST(1 AS TINYINT)) AS COL16,
|IF(FALSE, CAST(NULL AS BINARY), CAST("1" AS BINARY)) AS COL17,
|IF(TRUE, CAST(NULL AS BINARY), CAST("1" AS BINARY)) AS COL18,
|IF(FALSE, CAST(NULL AS DATE), CAST("1970-01-01" AS DATE)) AS COL19,
|IF(TRUE, CAST(NULL AS DATE), CAST("1970-01-01" AS DATE)) AS COL20,
|IF(FALSE, CAST(NULL AS TIMESTAMP), CAST(1 AS TIMESTAMP)) AS COL21,
|IF(TRUE, CAST(NULL AS TIMESTAMP), CAST(1 AS TIMESTAMP)) AS COL22,
|IF(FALSE, CAST(NULL AS DECIMAL), CAST(1 AS DECIMAL)) AS COL23,
|IF(TRUE, CAST(NULL AS DECIMAL), CAST(1 AS DECIMAL)) AS COL24
|FROM src LIMIT 1""".stripMargin)

createQueryTest("constant array",
"""
|SELECT sort_array(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,54 +57,74 @@ private[hive] object HiveShim {
new TableDesc(serdeClass, inputFormatClass, outputFormatClass, properties)
}

def getPrimitiveWritableConstantObjectInspector(value: String): ObjectInspector =
def getStringWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.STRING, new hadoopIo.Text(value))
PrimitiveCategory.STRING,
if (value == null) null else new hadoopIo.Text(value.asInstanceOf[String]))

def getPrimitiveWritableConstantObjectInspector(value: Int): ObjectInspector =
def getIntWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.INT, new hadoopIo.IntWritable(value))
PrimitiveCategory.INT,
if (value == null) null else new hadoopIo.IntWritable(value.asInstanceOf[Int]))

def getPrimitiveWritableConstantObjectInspector(value: Double): ObjectInspector =
def getDoubleWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.DOUBLE, new hiveIo.DoubleWritable(value))
PrimitiveCategory.DOUBLE,
if (value == null) null else new hiveIo.DoubleWritable(value.asInstanceOf[Double]))

def getPrimitiveWritableConstantObjectInspector(value: Boolean): ObjectInspector =
def getBooleanWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.BOOLEAN, new hadoopIo.BooleanWritable(value))
PrimitiveCategory.BOOLEAN,
if (value == null) null else new hadoopIo.BooleanWritable(value.asInstanceOf[Boolean]))

def getPrimitiveWritableConstantObjectInspector(value: Long): ObjectInspector =
def getLongWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.LONG, new hadoopIo.LongWritable(value))
PrimitiveCategory.LONG,
if (value == null) null else new hadoopIo.LongWritable(value.asInstanceOf[Long]))

def getPrimitiveWritableConstantObjectInspector(value: Float): ObjectInspector =
def getFloatWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.FLOAT, new hadoopIo.FloatWritable(value))
PrimitiveCategory.FLOAT,
if (value == null) null else new hadoopIo.FloatWritable(value.asInstanceOf[Float]))

def getPrimitiveWritableConstantObjectInspector(value: Short): ObjectInspector =
def getShortWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.SHORT, new hiveIo.ShortWritable(value))
PrimitiveCategory.SHORT,
if (value == null) null else new hiveIo.ShortWritable(value.asInstanceOf[Short]))

def getPrimitiveWritableConstantObjectInspector(value: Byte): ObjectInspector =
def getByteWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.BYTE, new hiveIo.ByteWritable(value))
PrimitiveCategory.BYTE,
if (value == null) null else new hiveIo.ByteWritable(value.asInstanceOf[Byte]))

def getPrimitiveWritableConstantObjectInspector(value: Array[Byte]): ObjectInspector =
def getBinaryWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.BINARY, new hadoopIo.BytesWritable(value))
PrimitiveCategory.BINARY,
if (value == null) null else new hadoopIo.BytesWritable(value.asInstanceOf[Array[Byte]]))

def getPrimitiveWritableConstantObjectInspector(value: java.sql.Date): ObjectInspector =
def getDateWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.DATE, new hiveIo.DateWritable(value))
PrimitiveCategory.DATE,
if (value == null) null else new hiveIo.DateWritable(value.asInstanceOf[java.sql.Date]))

def getPrimitiveWritableConstantObjectInspector(value: java.sql.Timestamp): ObjectInspector =
def getTimestampWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.TIMESTAMP, new hiveIo.TimestampWritable(value))

def getPrimitiveWritableConstantObjectInspector(value: BigDecimal): ObjectInspector =
PrimitiveCategory.TIMESTAMP,
if (value == null) {
null
} else {
new hiveIo.TimestampWritable(value.asInstanceOf[java.sql.Timestamp])
})

def getDecimalWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveCategory.DECIMAL,
new hiveIo.HiveDecimalWritable(HiveShim.createDecimal(value.underlying())))
if (value == null) {
null
} else {
new hiveIo.HiveDecimalWritable(
HiveShim.createDecimal(value.asInstanceOf[Decimal].toBigDecimal.underlying()))
})

def getPrimitiveNullWritableConstantObjectInspector: ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,54 +56,86 @@ private[hive] object HiveShim {
new TableDesc(inputFormatClass, outputFormatClass, properties)
}

def getPrimitiveWritableConstantObjectInspector(value: String): ObjectInspector =
def getStringWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.stringTypeInfo, new hadoopIo.Text(value))
TypeInfoFactory.stringTypeInfo,
if (value == null) null else new hadoopIo.Text(value.asInstanceOf[String]))

def getPrimitiveWritableConstantObjectInspector(value: Int): ObjectInspector =
def getIntWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.intTypeInfo, new hadoopIo.IntWritable(value))
TypeInfoFactory.intTypeInfo,
if (value == null) null else new hadoopIo.IntWritable(value.asInstanceOf[Int]))

def getPrimitiveWritableConstantObjectInspector(value: Double): ObjectInspector =
def getDoubleWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.doubleTypeInfo, new hiveIo.DoubleWritable(value))
TypeInfoFactory.doubleTypeInfo, if (value == null) {
null
} else {
new hiveIo.DoubleWritable(value.asInstanceOf[Double])
})

def getPrimitiveWritableConstantObjectInspector(value: Boolean): ObjectInspector =
def getBooleanWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.booleanTypeInfo, new hadoopIo.BooleanWritable(value))
TypeInfoFactory.booleanTypeInfo, if (value == null) {
null
} else {
new hadoopIo.BooleanWritable(value.asInstanceOf[Boolean])
})

def getPrimitiveWritableConstantObjectInspector(value: Long): ObjectInspector =
def getLongWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.longTypeInfo, new hadoopIo.LongWritable(value))
TypeInfoFactory.longTypeInfo,
if (value == null) null else new hadoopIo.LongWritable(value.asInstanceOf[Long]))

def getPrimitiveWritableConstantObjectInspector(value: Float): ObjectInspector =
def getFloatWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.floatTypeInfo, new hadoopIo.FloatWritable(value))
TypeInfoFactory.floatTypeInfo, if (value == null) {
null
} else {
new hadoopIo.FloatWritable(value.asInstanceOf[Float])
})

def getPrimitiveWritableConstantObjectInspector(value: Short): ObjectInspector =
def getShortWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.shortTypeInfo, new hiveIo.ShortWritable(value))
TypeInfoFactory.shortTypeInfo,
if (value == null) null else new hiveIo.ShortWritable(value.asInstanceOf[Short]))

def getPrimitiveWritableConstantObjectInspector(value: Byte): ObjectInspector =
def getByteWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.byteTypeInfo, new hiveIo.ByteWritable(value))
TypeInfoFactory.byteTypeInfo,
if (value == null) null else new hiveIo.ByteWritable(value.asInstanceOf[Byte]))

def getPrimitiveWritableConstantObjectInspector(value: Array[Byte]): ObjectInspector =
def getBinaryWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.binaryTypeInfo, new hadoopIo.BytesWritable(value))
TypeInfoFactory.binaryTypeInfo, if (value == null) {
null
} else {
new hadoopIo.BytesWritable(value.asInstanceOf[Array[Byte]])
})

def getPrimitiveWritableConstantObjectInspector(value: java.sql.Date): ObjectInspector =
def getDateWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.dateTypeInfo, new hiveIo.DateWritable(value))
TypeInfoFactory.dateTypeInfo,
if (value == null) null else new hiveIo.DateWritable(value.asInstanceOf[java.sql.Date]))

def getPrimitiveWritableConstantObjectInspector(value: java.sql.Timestamp): ObjectInspector =
def getTimestampWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.timestampTypeInfo, new hiveIo.TimestampWritable(value))
TypeInfoFactory.timestampTypeInfo, if (value == null) {
null
} else {
new hiveIo.TimestampWritable(value.asInstanceOf[java.sql.Timestamp])
})

def getPrimitiveWritableConstantObjectInspector(value: BigDecimal): ObjectInspector =
def getDecimalWritableConstantObjectInspector(value: Any): ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
TypeInfoFactory.decimalTypeInfo,
new hiveIo.HiveDecimalWritable(HiveShim.createDecimal(value.underlying())))
if (value == null) {
null
} else {
// TODO precise, scale?
new hiveIo.HiveDecimalWritable(
HiveShim.createDecimal(value.asInstanceOf[Decimal].toBigDecimal.underlying()))
})

def getPrimitiveNullWritableConstantObjectInspector: ObjectInspector =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
Expand Down

0 comments on commit fa77783

Please sign in to comment.