diff --git a/common/utils/src/main/resources/error/error-classes.json b/common/utils/src/main/resources/error/error-classes.json index b30b1d60bb4aa..a89f0ea267279 100644 --- a/common/utils/src/main/resources/error/error-classes.json +++ b/common/utils/src/main/resources/error/error-classes.json @@ -870,7 +870,7 @@ }, "UNEXPECTED_INPUT_TYPE" : { "message" : [ - "Parameter requires the type, however has the type ." + "The parameter requires the type, however has the type ." ] }, "UNEXPECTED_NULL" : { diff --git a/docs/sql-error-conditions-datatype-mismatch-error-class.md b/docs/sql-error-conditions-datatype-mismatch-error-class.md index 25d367b13668a..1d18836ac9e77 100644 --- a/docs/sql-error-conditions-datatype-mismatch-error-class.md +++ b/docs/sql-error-conditions-datatype-mismatch-error-class.md @@ -221,7 +221,7 @@ class `` not found. ## UNEXPECTED_INPUT_TYPE -Parameter `` requires the `` type, however `` has the type ``. +The `` parameter requires the `` type, however `` has the type ``. ## UNEXPECTED_NULL diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala index 3b1663b4c54cc..89bed0518027e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala @@ -567,12 +567,7 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB case e @ (_: Union | _: SetOperation) if operator.children.length > 1 => def dataTypes(plan: LogicalPlan): Seq[DataType] = plan.output.map(_.dataType) - def ordinalNumber(i: Int): String = i match { - case 0 => "first" - case 1 => "second" - case 2 => "third" - case i => s"${i + 1}th" - } + val ref = dataTypes(operator.children.head) operator.children.tail.zipWithIndex.foreach { case (child, ti) => // Check the number of columns diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala index a0e4039479c12..c42b54222f171 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala @@ -107,7 +107,7 @@ case class CallMethodViaReflection( DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> (idx + 1).toString, + "paramIndex" -> ordinalNumber(idx), "requiredType" -> toSQLType( TypeCollection(BooleanType, ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType, StringType)), diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala index 9e656c06091f2..1a4a0271c54be 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala @@ -57,7 +57,7 @@ object ExpectsInputTypes extends QueryErrorsBase { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> (idx + 1).toString, + "paramIndex" -> ordinalNumber(idx), "requiredType" -> toSQLType(expected), "inputSql" -> toSQLExpr(input), "inputType" -> toSQLType(input.dataType))) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/UnwrapUDT.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/UnwrapUDT.scala index d6b754a297d5d..249e3955a81f4 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/UnwrapUDT.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/UnwrapUDT.scala @@ -37,7 +37,7 @@ case class UnwrapUDT(child: Expression) extends UnaryExpression with NonSQLExpre DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType("UserDefinedType"), "inputSql" -> toSQLExpr(child), "inputType" -> toSQLType(child.dataType))) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervals.scala index b5133945bc0d0..a468153b57c5a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervals.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervals.scala @@ -104,7 +104,7 @@ case class ApproxCountDistinctForIntervals( DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> s"ARRAY OF $requiredElemTypes", "inputSql" -> toSQLExpr(endpointsExpression), "inputType" -> toSQLType(inputType))) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/bitmapExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/bitmapExpressions.scala index 5c7ef5cde5b1a..ecbcd34170ffe 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/bitmapExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/bitmapExpressions.scala @@ -116,7 +116,7 @@ case class BitmapCount(child: Expression) DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "0", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(BinaryType), "inputSql" -> toSQLExpr(child), "inputType" -> toSQLType(child.dataType) @@ -261,7 +261,7 @@ case class BitmapOrAgg(child: Expression, DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "0", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(BinaryType), "inputSql" -> toSQLExpr(child), "inputType" -> toSQLType(child.dataType) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala index 92dd79fd59e02..a090bdf2bebf6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala @@ -836,7 +836,7 @@ case class MapFromEntries(child: Expression) DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> s"${toSQLType(ArrayType)} of pair ${toSQLType(StructType)}", "inputSql" -> toSQLExpr(child), "inputType" -> toSQLType(child.dataType) @@ -1066,7 +1066,7 @@ case class SortArray(base: Expression, ascendingOrder: Expression) DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> toSQLType(BooleanType), "inputSql" -> toSQLExpr(ascendingOrder), "inputType" -> toSQLType(ascendingOrder.dataType)) @@ -1084,7 +1084,7 @@ case class SortArray(base: Expression, ascendingOrder: Expression) DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(ArrayType), "inputSql" -> toSQLExpr(base), "inputType" -> toSQLType(base.dataType)) @@ -1320,7 +1320,7 @@ case class ArrayContains(left: Expression, right: Expression) DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(ArrayType), "inputSql" -> toSQLExpr(left), "inputType" -> toSQLType(left.dataType)) @@ -1427,7 +1427,7 @@ trait ArrayPendBase extends RuntimeReplaceable DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "0", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(ArrayType), "inputSql" -> toSQLExpr(left), "inputType" -> toSQLType(left.dataType) @@ -2221,7 +2221,7 @@ case class ArrayPosition(left: Expression, right: Expression) DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(ArrayType), "inputSql" -> toSQLExpr(left), "inputType" -> toSQLType(left.dataType)) @@ -2381,7 +2381,7 @@ case class ElementAt( DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> toSQLType(IntegerType), "inputSql" -> toSQLExpr(right), "inputType" -> toSQLType(right.dataType)) @@ -2400,7 +2400,7 @@ case class ElementAt( DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(TypeCollection(ArrayType, MapType)), "inputSql" -> toSQLExpr(left), "inputType" -> toSQLType(left.dataType)) @@ -2606,7 +2606,7 @@ case class Concat(children: Seq[Expression]) extends ComplexTypeMergingExpressio DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> (idx + 1).toString, + "paramIndex" -> ordinalNumber(idx), "requiredType" -> toSQLType(TypeCollection(allowedTypes: _*)), "inputSql" -> toSQLExpr(e), "inputType" -> toSQLType(e.dataType)) @@ -2823,7 +2823,7 @@ case class Flatten(child: Expression) extends UnaryExpression with NullIntoleran DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> s"${toSQLType(ArrayType)} of ${toSQLType(ArrayType)}", "inputSql" -> toSQLExpr(child), "inputType" -> toSQLType(child.dataType)) @@ -4747,7 +4747,7 @@ case class ArrayInsert( DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> toSQLType(IntegerType), "inputSql" -> toSQLExpr(second), "inputType" -> toSQLType(second.dataType)) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala index 1693c6b21484a..332a49f78ab98 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala @@ -696,7 +696,7 @@ case class UpdateFields(structExpr: Expression, fieldOps: Seq[StructFieldsOperat DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(StructType), "inputSql" -> toSQLExpr(structExpr), "inputType" -> toSQLType(structExpr.dataType)) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala index ad79fc1047043..046d4cbcd5be3 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala @@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.trees.TernaryLike import org.apache.spark.sql.catalyst.trees.TreePattern.{CASE_WHEN, IF, TreePattern} -import org.apache.spark.sql.catalyst.util.TypeUtils.{toSQLExpr, toSQLId, toSQLType} +import org.apache.spark.sql.catalyst.util.TypeUtils.{ordinalNumber, toSQLExpr, toSQLId, toSQLType} import org.apache.spark.sql.types._ import org.apache.spark.util.ArrayImplicits._ @@ -70,7 +70,7 @@ case class If(predicate: Expression, trueValue: Expression, falseValue: Expressi DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(BooleanType), "inputSql" -> toSQLExpr(predicate), "inputType" -> toSQLType(predicate.dataType) @@ -201,7 +201,7 @@ case class CaseWhen( DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> (index + 1).toString, + "paramIndex" -> ordinalNumber(index), "requiredType" -> toSQLType(BooleanType), "inputSql" -> toSQLExpr(branches(index)._1), "inputType" -> toSQLType(branches(index)._1.dataType) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala index b4be09f333d5d..2cc88a25f465d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala @@ -172,7 +172,7 @@ case class Stack(children: Seq[Expression]) extends Generator { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(IntegerType), "inputSql" -> toSQLExpr(children.head), "inputType" -> toSQLType(children.head.dataType)) @@ -342,7 +342,7 @@ abstract class ExplodeBase extends UnaryExpression with CollectionGenerator with DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(TypeCollection(ArrayType, MapType)), "inputSql" -> toSQLExpr(child), "inputType" -> toSQLType(child.dataType)) @@ -557,7 +557,7 @@ case class Inline(child: Expression) extends UnaryExpression with CollectionGene DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType("ARRAY"), "inputSql" -> toSQLExpr(child), "inputType" -> toSQLType(child.dataType)) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala index aa1f6159def8a..8c2b427e583c0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala @@ -427,7 +427,7 @@ case class ArraySort( DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(ArrayType), "inputSql" -> toSQLExpr(argument), "inputType" -> toSQLType(argument.dataType) @@ -840,7 +840,7 @@ case class ArrayAggregate( DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "3", + "paramIndex" -> ordinalNumber(2), "requiredType" -> toSQLType(zero.dataType), "inputSql" -> toSQLExpr(merge), "inputType" -> toSQLType(merge.dataType))) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala index 259556826ad92..82b5f628578e7 100755 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala @@ -299,7 +299,7 @@ case class Elt( return DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(IntegerType), "inputSql" -> toSQLExpr(indexExpr), "inputType" -> toSQLType(indexType))) @@ -308,10 +308,10 @@ case class Elt( return DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2...", + "paramIndex" -> (ordinalNumber(1) + "..."), "requiredType" -> (toSQLType(StringType) + " or " + toSQLType(BinaryType)), - "inputSql" -> inputExprs.map(toSQLExpr(_)).mkString(","), - "inputType" -> inputTypes.map(toSQLType(_)).mkString(",") + "inputSql" -> inputExprs.map(toSQLExpr).mkString(","), + "inputType" -> inputTypes.map(toSQLType).mkString(",") ) ) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala index ca3c9b064aaa6..00711332350cf 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala @@ -867,7 +867,7 @@ case class NTile(buckets: Expression) extends RowNumberLike with SizeBasedWindow DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(IntegerType), "inputSql" -> toSQLExpr(buckets), "inputType" -> toSQLType(buckets.dataType)) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala index 95c27ccea87ca..9feb49965687b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala @@ -78,7 +78,7 @@ object TypeUtils extends QueryErrorsBase { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> Seq(NumericType, AnsiIntervalType).map(toSQLType).mkString(" or "), "inputSql" -> toSQLExpr(input), "inputType" -> toSQLType(other))) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala index 26600117a0c54..b18937257bae5 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala @@ -70,5 +70,12 @@ private[sql] trait QueryErrorsBase extends DataTypeErrorsBase { else l.sql case l => l.sql } + + def ordinalNumber(i: Int): String = i match { + case 0 => "first" + case 1 => "second" + case 2 => "third" + case i => s"${i + 1}th" + } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala index f50e188806eee..46d7261a747dc 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala @@ -162,7 +162,7 @@ class AnalysisErrorSuite extends AnalysisTest with DataTypeErrorsBase { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", messageParameters = Map( "sqlExpr" -> "\"testfunction(NULL)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"NULL\"", "inputType" -> "\"DATE\"", "requiredType" -> "\"INT\"")) @@ -174,7 +174,7 @@ class AnalysisErrorSuite extends AnalysisTest with DataTypeErrorsBase { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", messageParameters = Map( "sqlExpr" -> "\"testfunction(NULL, NULL)\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"NULL\"", "inputType" -> "\"DATE\"", "requiredType" -> "\"INT\"")) @@ -186,7 +186,7 @@ class AnalysisErrorSuite extends AnalysisTest with DataTypeErrorsBase { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", messageParameters = Map( "sqlExpr" -> "\"testfunction(NULL, NULL)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"NULL\"", "inputType" -> "\"DATE\"", "requiredType" -> "\"INT\"")) @@ -389,7 +389,7 @@ class AnalysisErrorSuite extends AnalysisTest with DataTypeErrorsBase { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", messageParameters = Map( "sqlExpr" -> "\"nth_value(b, true)\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"true\"", "inputType" -> "\"BOOLEAN\"", "requiredType" -> "\"INT\"")) @@ -1198,7 +1198,7 @@ class AnalysisErrorSuite extends AnalysisTest with DataTypeErrorsBase { expectedErrorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", expectedMessageParameters = Map( "sqlExpr" -> sqlExpr, - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> inputSql, "inputType" -> inputType, "requiredType" -> "(\"INT\" or \"BIGINT\")"), diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala index 0231079dc48a0..3c628d35dcdb8 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala @@ -1351,7 +1351,7 @@ class AnalysisSuite extends AnalysisTest with Matchers { expectedErrorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", expectedMessageParameters = Map( "sqlExpr" -> "\"mean(c)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"c\"", "inputType" -> "\"BOOLEAN\"", "requiredType" -> "\"NUMERIC\" or \"ANSI INTERVAL\""), @@ -1370,7 +1370,7 @@ class AnalysisSuite extends AnalysisTest with Matchers { expectedErrorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", expectedMessageParameters = Map( "sqlExpr" -> "\"mean(c)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"c\"", "inputType" -> "\"BOOLEAN\"", "requiredType" -> "\"NUMERIC\" or \"ANSI INTERVAL\""), @@ -1388,7 +1388,7 @@ class AnalysisSuite extends AnalysisTest with Matchers { expectedErrorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", expectedMessageParameters = Map( "sqlExpr" -> "\"abs(c)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"c\"", "inputType" -> "\"BOOLEAN\"", "requiredType" -> @@ -1408,7 +1408,7 @@ class AnalysisSuite extends AnalysisTest with Matchers { expectedErrorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", expectedMessageParameters = Map( "sqlExpr" -> "\"abs(c)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"c\"", "inputType" -> "\"BOOLEAN\"", "requiredType" -> diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala index 1137996a809fa..4b58755e13ef6 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala @@ -96,7 +96,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"~stringField\"", - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "inputSql" -> "\"stringField\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"INTEGRAL\"")) @@ -340,7 +340,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(BooleanType), "inputSql" -> "\"1\"", "inputType" -> "\"INT\"" @@ -385,7 +385,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"BOOLEAN\"", "inputSql" -> "\"2\"", "inputType" -> "\"INT\"" @@ -429,7 +429,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"sum(booleanField)\"", - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "inputSql" -> "\"booleanField\"", "inputType" -> "\"BOOLEAN\"", "requiredType" -> "\"NUMERIC\" or \"ANSI INTERVAL\"")) @@ -440,7 +440,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"avg(booleanField)\"", - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "inputSql" -> "\"booleanField\"", "inputType" -> "\"BOOLEAN\"", "requiredType" -> "\"NUMERIC\" or \"ANSI INTERVAL\"")) @@ -507,7 +507,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"explode(intField)\"", - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "inputSql" -> "\"intField\"", "inputType" -> "\"INT\"", "requiredType" -> "(\"ARRAY\" or \"MAP\")")) @@ -519,7 +519,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"posexplode(intField)\"", - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "inputSql" -> "\"intField\"", "inputType" -> "\"INT\"", "requiredType" -> "(\"ARRAY\" or \"MAP\")") @@ -613,7 +613,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"round(intField, booleanField)\"", - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "inputSql" -> "\"booleanField\"", "inputType" -> "\"BOOLEAN\"", "requiredType" -> "\"INT\"")) @@ -624,7 +624,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"round(intField, mapField)\"", - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "inputSql" -> "\"mapField\"", "inputType" -> "\"MAP\"", "requiredType" -> "\"INT\"")) @@ -635,7 +635,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"round(booleanField, intField)\"", - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "inputSql" -> "\"booleanField\"", "inputType" -> "\"BOOLEAN\"", "requiredType" -> "\"NUMERIC\"")) @@ -659,7 +659,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"bround(intField, booleanField)\"", - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "inputSql" -> "\"booleanField\"", "inputType" -> "\"BOOLEAN\"", "requiredType" -> "\"INT\"")) @@ -670,7 +670,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"bround(intField, mapField)\"", - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "inputSql" -> "\"mapField\"", "inputType" -> "\"MAP\"", "requiredType" -> "\"INT\"")) @@ -681,7 +681,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"bround(booleanField, intField)\"", - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "inputSql" -> "\"booleanField\"", "inputType" -> "\"BOOLEAN\"", "requiredType" -> "\"NUMERIC\"")) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala index 29865885b8b76..77fdb33e515fc 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflectionSuite.scala @@ -24,6 +24,7 @@ import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.expressions.Cast.toSQLType import org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection +import org.apache.spark.sql.catalyst.util.TypeUtils.ordinalNumber import org.apache.spark.sql.types._ /** A static class for testing purpose. */ @@ -140,7 +141,7 @@ class CallMethodViaReflectionSuite extends SparkFunSuite with ExpressionEvalHelp DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "3", + "paramIndex" -> ordinalNumber(2), "requiredType" -> toSQLType( TypeCollection(BooleanType, ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType, StringType)), diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala index 69db277793895..133e27c5b0a66 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala @@ -32,6 +32,7 @@ import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.util.{DateTimeTestUtils, DateTimeUtils} import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.{outstandingZoneIds, LA, UTC} import org.apache.spark.sql.catalyst.util.IntervalUtils._ +import org.apache.spark.sql.catalyst.util.TypeUtils.ordinalNumber import org.apache.spark.sql.errors.DataTypeErrorsBase import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ @@ -411,7 +412,7 @@ class CollectionExpressionsSuite DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "inputSql" -> "\"1\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\" of pair \"STRUCT\"" diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ComplexTypeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ComplexTypeSuite.scala index a6de0b092cdbc..5f135e46a3775 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ComplexTypeSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ComplexTypeSuite.scala @@ -25,6 +25,7 @@ import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext import org.apache.spark.sql.catalyst.util._ +import org.apache.spark.sql.catalyst.util.TypeUtils.ordinalNumber import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.UTF8String @@ -541,7 +542,7 @@ class ComplexTypeSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> "\"STRING\"", "inputSql" -> "\"NULL\"", "inputType" -> "\"VOID\"" @@ -552,7 +553,7 @@ class ComplexTypeSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"STRING\"", "inputSql" -> "\"NULL\"", "inputType" -> "\"VOID\"" @@ -564,7 +565,7 @@ class ComplexTypeSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"STRING\"", "inputSql" -> "\"NULL\"", "inputType" -> "\"VOID\"" @@ -575,7 +576,7 @@ class ComplexTypeSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> "\"STRING\"", "inputSql" -> "\"NULL\"", "inputType" -> "\"VOID\"" diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MiscExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MiscExpressionsSuite.scala index 28da02a68f7f8..1f37886f44258 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MiscExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MiscExpressionsSuite.scala @@ -23,6 +23,7 @@ import scala.util.Random import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch +import org.apache.spark.sql.catalyst.util.TypeUtils.ordinalNumber import org.apache.spark.sql.types._ class MiscExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { @@ -45,7 +46,7 @@ class MiscExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"MAP\"", "inputSql" -> "\"map(errorMessage, 5)\"", "inputType" -> "\"MAP\"" diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala index 55e0dd2179458..a0c75b703ade4 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala @@ -29,6 +29,7 @@ import org.apache.spark.sql.catalyst.encoders.ExamplePointUDT import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext import org.apache.spark.sql.catalyst.parser.CatalystSqlParser import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData} +import org.apache.spark.sql.catalyst.util.TypeUtils.ordinalNumber import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ @@ -541,7 +542,7 @@ class PredicateSuite extends SparkFunSuite with ExpressionEvalHelper { case TypeCheckResult.DataTypeMismatch(errorSubClass, messageParameters) => assert(errorSubClass === "UNEXPECTED_INPUT_TYPE") assert(messageParameters === Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> "\"BOOLEAN\"", "inputSql" -> "\"NULL\"", "inputType" -> "\"INT\"")) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala index 98f33e2099945..a41e69f880b25 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala @@ -69,7 +69,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> "(\"STRING\" or \"BINARY\" or \"ARRAY\")", "inputSql" -> "\"1\"", "inputType" -> "\"INT\"" @@ -177,7 +177,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2...", + "paramIndex" -> (ordinalNumber(1) + "..."), "requiredType" -> "\"STRING\" or \"BINARY\"", "inputSql" -> "\"2\"", "inputType" -> "\"INT\"" @@ -765,7 +765,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> "(\"STRING\" or \"BINARY\")", "inputSql" -> "\"1\"", "inputType" -> "\"INT\"" @@ -777,7 +777,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "(\"STRING\" or \"BINARY\")", "inputSql" -> "\"2\"", "inputType" -> "\"INT\"" @@ -1903,14 +1903,14 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { assert(ParseUrl(Seq(Literal("1"), Literal(2))).checkInputDataTypes() == DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"STRING\"", "inputSql" -> "\"2\"", "inputType" -> "\"INT\""))) assert(ParseUrl(Seq(Literal(1), Literal("2"))).checkInputDataTypes() == DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> "\"STRING\"", "inputSql" -> "\"1\"", "inputType" -> "\"INT\""))) @@ -1918,7 +1918,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { Literal(3))).checkInputDataTypes() == DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "3", + "paramIndex" -> ordinalNumber(2), "requiredType" -> "\"STRING\"", "inputSql" -> "\"3\"", "inputType" -> "\"INT\""))) @@ -2023,7 +2023,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType(IntegerType), "inputSql" -> toSQLExpr(indexExpr2), "inputType" -> toSQLType(indexExpr2.dataType) @@ -2039,7 +2039,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2...", + "paramIndex" -> (ordinalNumber(1) + "..."), "requiredType" -> (toSQLType(StringType) + " or " + toSQLType(BinaryType)), "inputSql" -> inputExpr3.map(toSQLExpr(_)).mkString(","), "inputType" -> inputExpr3.map(expr => toSQLType(expr.dataType)).mkString(",") diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnwrapUDTExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnwrapUDTExpressionSuite.scala index d1b13a4bec991..292f6bc96cd68 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnwrapUDTExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnwrapUDTExpressionSuite.scala @@ -20,6 +20,7 @@ package org.apache.spark.sql.catalyst.expressions import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.expressions.Cast.toSQLType +import org.apache.spark.sql.catalyst.util.TypeUtils.ordinalNumber import org.apache.spark.sql.types.BooleanType class UnwrapUDTExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { @@ -31,7 +32,7 @@ class UnwrapUDTExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> toSQLType("UserDefinedType"), "inputSql" -> "\"false\"", "inputType" -> "\"BOOLEAN\""))) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/AggregateExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/AggregateExpressionSuite.scala index 91b580282eca8..15c85b1f83b25 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/AggregateExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/AggregateExpressionSuite.scala @@ -21,6 +21,7 @@ import org.apache.spark.SparkFunSuite import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, UnresolvedAttribute} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.expressions.{Add, AttributeSet, Literal} +import org.apache.spark.sql.catalyst.util.TypeUtils.ordinalNumber class AggregateExpressionSuite extends SparkFunSuite { @@ -37,7 +38,7 @@ class AggregateExpressionSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> "\"DOUBLE\"", "inputSql" -> "\"a\"", "inputType" -> "\"STRING\"" @@ -48,7 +49,7 @@ class AggregateExpressionSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"DOUBLE\"", "inputSql" -> "\"b\"", "inputType" -> "\"STRING\"" @@ -59,7 +60,7 @@ class AggregateExpressionSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"DOUBLE\"", "inputSql" -> "\"ARRAY(0)\"", "inputType" -> "\"ARRAY\"" @@ -75,7 +76,7 @@ class AggregateExpressionSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> "\"DOUBLE\"", "inputSql" -> "\"a\"", "inputType" -> "\"STRING\"" @@ -86,7 +87,7 @@ class AggregateExpressionSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"DOUBLE\"", "inputSql" -> "\"b\"", "inputType" -> "\"STRING\"" @@ -97,7 +98,7 @@ class AggregateExpressionSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"DOUBLE\"", "inputSql" -> "\"ARRAY(0)\"", "inputType" -> "\"ARRAY\"" @@ -113,7 +114,7 @@ class AggregateExpressionSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> "\"DOUBLE\"", "inputSql" -> "\"a\"", "inputType" -> "\"STRING\"" @@ -124,7 +125,7 @@ class AggregateExpressionSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"DOUBLE\"", "inputSql" -> "\"b\"", "inputType" -> "\"STRING\"" @@ -135,7 +136,7 @@ class AggregateExpressionSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"DOUBLE\"", "inputSql" -> "\"ARRAY(0)\"", "inputType" -> "\"ARRAY\"" diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervalsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervalsSuite.scala index efc90716e31ad..656f8b161e17f 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervalsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervalsSuite.scala @@ -42,7 +42,7 @@ class ApproxCountDistinctForIntervalsSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> ("(\"NUMERIC\" or \"TIMESTAMP\" or \"DATE\" or \"TIMESTAMP_NTZ\"" + " or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL DAY TO SECOND\")"), "inputSql" -> "\"a\"", @@ -59,7 +59,7 @@ class ApproxCountDistinctForIntervalsSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"ARRAY\"", "inputSql" -> "\"0.5\"", "inputType" -> "\"DOUBLE\"" @@ -91,7 +91,7 @@ class ApproxCountDistinctForIntervalsSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "ARRAY OF (\"NUMERIC\" or \"DATE\" or \"TIMESTAMP\" or \"TIMESTAMP_NTZ\" or \"ANSI INTERVAL\")", "inputSql" -> "\"array(foobar)\"", "inputType" -> "\"ARRAY\""))) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproximatePercentileSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproximatePercentileSuite.scala index c460e06e3c76f..48dd7764f5ad4 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproximatePercentileSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproximatePercentileSuite.scala @@ -379,7 +379,7 @@ class ApproximatePercentileSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "(\"DOUBLE\" or \"ARRAY\")", "inputSql" -> toSQLExpr(percentageExpression), "inputType" -> "\"ARRAY\"") @@ -399,7 +399,7 @@ class ApproximatePercentileSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "3", + "paramIndex" -> ordinalNumber(2), "requiredType" -> "\"INTEGRAL\"", "inputSql" -> toSQLExpr(Literal(acc)), "inputType" -> toSQLType(Literal(acc).dataType) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/PercentileSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/PercentileSuite.scala index 85928ef84d829..389c757eefb63 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/PercentileSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/PercentileSuite.scala @@ -174,7 +174,7 @@ class PercentileSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> ("(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" " + "or \"INTERVAL YEAR TO MONTH\")"), "inputSql" -> "\"a\"", @@ -197,7 +197,7 @@ class PercentileSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "1", + "paramIndex" -> ordinalNumber(0), "requiredType" -> ("(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" " + "or \"INTERVAL YEAR TO MONTH\")"), "inputSql" -> "\"a\"", @@ -216,7 +216,7 @@ class PercentileSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "3", + "paramIndex" -> ordinalNumber(2), "requiredType" -> "\"INTEGRAL\"", "inputSql" -> "\"frq\"", "inputType" -> toSQLType(frequencyType) @@ -296,7 +296,7 @@ class PercentileSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"DOUBLE\"", "inputSql" -> toSQLExpr(percentage), "inputType" -> toSQLType(dataType) @@ -346,7 +346,7 @@ class PercentileSuite extends SparkFunSuite { DataTypeMismatch( errorSubClass = "UNEXPECTED_INPUT_TYPE", messageParameters = Map( - "paramIndex" -> "2", + "paramIndex" -> ordinalNumber(1), "requiredType" -> "\"ARRAY\"", "inputSql" -> toSQLExpr(percentageExpression), "inputType" -> "\"ARRAY\"" diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/array.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/array.sql.out index f4fd42d6adea3..1ab4b03dcb266 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/array.sql.out @@ -177,7 +177,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"sort_array(array(b, d), 1)\"" }, @@ -201,7 +201,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(NULL AS BOOLEAN)\"", "inputType" : "\"BOOLEAN\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"sort_array(array(b, d), CAST(NULL AS BOOLEAN))\"" }, @@ -360,7 +360,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"map(a, 1, b, 2)\"", "inputType" : "\"MAP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"ARRAY\"", "sqlExpr" : "\"array_size(map(a, 1, b, 2))\"" }, @@ -574,7 +574,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"id\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"ARRAY\"", "sqlExpr" : "\"array_compact(id)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out index c231f3997b05a..7acfc9277679e 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/date.sql.out @@ -290,7 +290,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 1)\"" }, @@ -314,7 +314,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 1.0)\"" }, @@ -338,7 +338,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 10.0)\"" }, @@ -428,7 +428,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 1)\"" }, @@ -452,7 +452,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 1.0)\"" }, @@ -476,7 +476,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 10.0)\"" }, @@ -581,7 +581,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 10.0)\"" }, @@ -665,7 +665,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DATE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" }, @@ -689,7 +689,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"DATE '2011-11-11'\"", "inputType" : "\"DATE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(1, DATE '2011-11-11')\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out index 2e2a07beb7176..a0e8ff468f309 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/interval.sql.out @@ -1691,7 +1691,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2-2' YEAR TO MONTH\"", "inputType" : "\"INTERVAL YEAR TO MONTH\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY\"" }, @@ -1715,7 +1715,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2-2' YEAR TO MONTH\"", "inputType" : "\"INTERVAL YEAR TO MONTH\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY\"" }, @@ -1739,7 +1739,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2-2' YEAR TO MONTH\"", "inputType" : "\"INTERVAL YEAR TO MONTH\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)\"" }, @@ -1785,7 +1785,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"1 + (- INTERVAL '02' SECOND)\"" }, @@ -1831,7 +1831,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"1 + INTERVAL '02' SECOND\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/literals.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/literals.sql.out index 83d0ff3f2edf7..d0e26f698fba0 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/literals.sql.out @@ -488,7 +488,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"DATE '1999-01-01'\"", "inputType" : "\"DATE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ DATE '1999-01-01')\"" }, @@ -512,7 +512,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"TIMESTAMP '1999-01-01 00:00:00'\"", "inputType" : "\"TIMESTAMP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ TIMESTAMP '1999-01-01 00:00:00')\"" }, @@ -543,7 +543,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"map(1, 2)\"", "inputType" : "\"MAP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ map(1, 2))\"" }, @@ -567,7 +567,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"array(1, 2)\"", "inputType" : "\"ARRAY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ array(1, 2))\"" }, @@ -591,7 +591,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"named_struct(a, 1, b, spark)\"", "inputType" : "\"STRUCT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ named_struct(a, 1, b, spark))\"" }, @@ -615,7 +615,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"X'01'\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ X'01')\"" }, @@ -639,7 +639,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"DATE '1999-01-01'\"", "inputType" : "\"DATE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(- DATE '1999-01-01')\"" }, @@ -663,7 +663,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"TIMESTAMP '1999-01-01 00:00:00'\"", "inputType" : "\"TIMESTAMP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(- TIMESTAMP '1999-01-01 00:00:00')\"" }, @@ -687,7 +687,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"X'2379ACFE'\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(- X'2379ACFE')\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_arithmetic.sql.out index bbc07c22805a6..ef17f6b50b90a 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/try_arithmetic.sql.out @@ -141,7 +141,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2' YEAR\"", "inputType" : "\"INTERVAL YEAR\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2' YEAR + INTERVAL '02' SECOND\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out index c26bb210b0fff..67c9bb8d992ca 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out @@ -177,7 +177,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"sort_array(array(b, d), 1)\"" }, @@ -201,7 +201,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(NULL AS BOOLEAN)\"", "inputType" : "\"BOOLEAN\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"sort_array(array(b, d), CAST(NULL AS BOOLEAN))\"" }, @@ -360,7 +360,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"map(a, 1, b, 2)\"", "inputType" : "\"MAP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"ARRAY\"", "sqlExpr" : "\"array_size(map(a, 1, b, 2))\"" }, @@ -574,7 +574,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"id\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"ARRAY\"", "sqlExpr" : "\"array_compact(id)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/bitwise.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/bitwise.sql.out index 086622789d432..8220aa4bd25bd 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/bitwise.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/bitwise.sql.out @@ -135,7 +135,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"bit count\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"INTEGRAL\" or \"BOOLEAN\")", "sqlExpr" : "\"bit_count(bit count)\"" }, @@ -159,7 +159,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"a\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"INTEGRAL\" or \"BOOLEAN\")", "sqlExpr" : "\"bit_count(a)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out index a7b3dfdd32ca3..3d20b9641f99d 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/date.sql.out @@ -290,7 +290,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 1)\"" }, @@ -314,7 +314,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 1.0)\"" }, @@ -338,7 +338,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 10.0)\"" }, @@ -434,7 +434,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 1)\"" }, @@ -458,7 +458,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 1.0)\"" }, @@ -482,7 +482,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 10.0)\"" }, @@ -550,7 +550,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"int_str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, int_str)\"" }, @@ -574,7 +574,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"int_str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, int_str)\"" }, @@ -620,7 +620,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 10.0)\"" }, @@ -668,7 +668,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2001-09-28\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(DATE '2001-10-01', 2001-09-28)\"" }, @@ -716,7 +716,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"date_str\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(DATE '2001-09-28', date_str)\"" }, @@ -740,7 +740,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" }, @@ -764,7 +764,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out index 1879148f556d7..12a15cc9b8967 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/datetime-legacy.sql.out @@ -290,7 +290,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 1)\"" }, @@ -314,7 +314,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 1.0)\"" }, @@ -338,7 +338,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 10.0)\"" }, @@ -434,7 +434,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 1)\"" }, @@ -458,7 +458,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 1.0)\"" }, @@ -482,7 +482,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 10.0)\"" }, @@ -550,7 +550,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"int_str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, int_str)\"" }, @@ -574,7 +574,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"int_str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, int_str)\"" }, @@ -620,7 +620,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 10.0)\"" }, @@ -668,7 +668,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2001-09-28\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(DATE '2001-10-01', 2001-09-28)\"" }, @@ -716,7 +716,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"date_str\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(DATE '2001-09-28', date_str)\"" }, @@ -740,7 +740,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" }, @@ -764,7 +764,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" }, @@ -1589,7 +1589,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2011-11-11 11:11:10\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10)\"" }, @@ -1613,7 +1613,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2011-11-11 11:11:11\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10')\"" }, @@ -1657,7 +1657,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"str\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(str - TIMESTAMP '2011-11-11 11:11:11')\"" }, @@ -1681,7 +1681,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - str)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/group-by.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/group-by.sql.out index 88517449760d9..229d0cb6faf25 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/group-by.sql.out @@ -635,7 +635,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"every(1)\"" }, @@ -659,7 +659,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"SMALLINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"some(1)\"" }, @@ -683,7 +683,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"any(1)\"" }, @@ -707,7 +707,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"true\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"every(true)\"" }, @@ -731,7 +731,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"bool_and(1.0)\"" }, @@ -755,7 +755,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"bool_or(1.0)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/hll.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/hll.sql.out index cd6fcc7c459b8..23701fb092f70 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/hll.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/hll.sql.out @@ -87,7 +87,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"col\"", "inputType" : "\"ARRAY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"INT\" or \"BIGINT\" or \"STRING\" or \"BINARY\")", "sqlExpr" : "\"hll_sketch_agg(col, 12)\"" }, @@ -170,7 +170,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BINARY\"", "sqlExpr" : "\"hll_union(1, 2, false)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out index 54d6a5fd85e2a..b726de307f2a6 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/interval.sql.out @@ -1691,7 +1691,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2-2' YEAR TO MONTH\"", "inputType" : "\"INTERVAL YEAR TO MONTH\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY\"" }, @@ -1715,7 +1715,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2-2' YEAR TO MONTH\"", "inputType" : "\"INTERVAL YEAR TO MONTH\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY\"" }, @@ -1739,7 +1739,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2-2' YEAR TO MONTH\"", "inputType" : "\"INTERVAL YEAR TO MONTH\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)\"" }, @@ -1785,7 +1785,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"1 + (- INTERVAL '02' SECOND)\"" }, @@ -1831,7 +1831,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"1 + INTERVAL '02' SECOND\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/json-functions.sql.out index 5d83e3195be1a..48b3bb07ef37b 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/json-functions.sql.out @@ -494,7 +494,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"STRING\"", "sqlExpr" : "\"json_array_length(2)\"" }, @@ -627,7 +627,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"200\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"STRING\"", "sqlExpr" : "\"json_object_keys(200)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/literals.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/literals.sql.out index 83d0ff3f2edf7..d0e26f698fba0 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/literals.sql.out @@ -488,7 +488,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"DATE '1999-01-01'\"", "inputType" : "\"DATE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ DATE '1999-01-01')\"" }, @@ -512,7 +512,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"TIMESTAMP '1999-01-01 00:00:00'\"", "inputType" : "\"TIMESTAMP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ TIMESTAMP '1999-01-01 00:00:00')\"" }, @@ -543,7 +543,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"map(1, 2)\"", "inputType" : "\"MAP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ map(1, 2))\"" }, @@ -567,7 +567,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"array(1, 2)\"", "inputType" : "\"ARRAY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ array(1, 2))\"" }, @@ -591,7 +591,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"named_struct(a, 1, b, spark)\"", "inputType" : "\"STRUCT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ named_struct(a, 1, b, spark))\"" }, @@ -615,7 +615,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"X'01'\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ X'01')\"" }, @@ -639,7 +639,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"DATE '1999-01-01'\"", "inputType" : "\"DATE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(- DATE '1999-01-01')\"" }, @@ -663,7 +663,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"TIMESTAMP '1999-01-01 00:00:00'\"", "inputType" : "\"TIMESTAMP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(- TIMESTAMP '1999-01-01 00:00:00')\"" }, @@ -687,7 +687,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"X'2379ACFE'\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(- X'2379ACFE')\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/mask-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/mask-functions.sql.out index b9f0bde76b40e..dba6a8293c9fa 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/mask-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/mask-functions.sql.out @@ -456,7 +456,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"replaceArg\"", "inputType" : "\"INT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"STRING\"", "sqlExpr" : "\"mask(c1, replaceArg, x, n, NULL)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/random.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/random.sql.out index e0c051e0a9349..3cacbdc141053 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/random.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/random.sql.out @@ -33,7 +33,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"INT\" or \"BIGINT\")", "sqlExpr" : "\"rand(1.0)\"" }, @@ -81,7 +81,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"INT\" or \"BIGINT\")", "sqlExpr" : "\"rand(1)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/table-valued-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/table-valued-functions.sql.out index 9ee8d741aed32..765de505d9a29 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/table-valued-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/table-valued-functions.sql.out @@ -266,7 +266,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"NULL\"", "inputType" : "\"VOID\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"ARRAY\" or \"MAP\")", "sqlExpr" : "\"explode(NULL)\"" }, @@ -290,7 +290,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"NULL\"", "inputType" : "\"VOID\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"ARRAY\" or \"MAP\")", "sqlExpr" : "\"explode(NULL)\"" }, @@ -314,7 +314,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"ARRAY\" or \"MAP\")", "sqlExpr" : "\"explode(1)\"" }, @@ -481,7 +481,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"NULL\"", "inputType" : "\"VOID\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"ARRAY\"", "sqlExpr" : "\"inline(NULL)\"" }, @@ -595,7 +595,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"ARRAY\" or \"MAP\")", "sqlExpr" : "\"posexplode(1)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out index bee762b33b331..4a48795e4cca5 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestamp.sql.out @@ -558,7 +558,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2011-11-11 11:11:10\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10)\"" }, @@ -582,7 +582,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2011-11-11 11:11:11\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10')\"" }, @@ -626,7 +626,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"str\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(str - TIMESTAMP '2011-11-11 11:11:11')\"" }, @@ -650,7 +650,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - str)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out index b204dacd192c9..4aa1ffcbc31db 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/timestampNTZ/timestamp.sql.out @@ -560,7 +560,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2011-11-11 11:11:10\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(TIMESTAMP_NTZ '2011-11-11 11:11:11' - 2011-11-11 11:11:10)\"" }, @@ -584,7 +584,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2011-11-11 11:11:11\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(2011-11-11 11:11:11 - TIMESTAMP_NTZ '2011-11-11 11:11:10')\"" }, @@ -628,7 +628,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"str\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(str - TIMESTAMP_NTZ '2011-11-11 11:11:11')\"" }, @@ -652,7 +652,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(TIMESTAMP_NTZ '2011-11-11 11:11:11' - str)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/try_arithmetic.sql.out index bbc07c22805a6..ef17f6b50b90a 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/try_arithmetic.sql.out @@ -141,7 +141,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2' YEAR\"", "inputType" : "\"INTERVAL YEAR\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2' YEAR + INTERVAL '02' SECOND\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/try_reflect.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/try_reflect.sql.out index 0b816cecf1a05..7b395b9e0a05a 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/try_reflect.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/try_reflect.sql.out @@ -110,7 +110,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2.5\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "3", + "paramIndex" : "third", "requiredType" : "(\"BOOLEAN\" or \"TINYINT\" or \"SMALLINT\" or \"INT\" or \"BIGINT\" or \"FLOAT\" or \"DOUBLE\" or \"STRING\")", "sqlExpr" : "\"reflect(java.lang.Math, round, 2.5)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/dateTimeOperations.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/dateTimeOperations.sql.out index 4e5884d1219b5..a50c797e78c20 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/dateTimeOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/dateTimeOperations.sql.out @@ -17,7 +17,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS TINYINT)\"", "inputType" : "\"TINYINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS TINYINT) + INTERVAL '2' DAY\"" }, @@ -41,7 +41,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS SMALLINT)\"", "inputType" : "\"SMALLINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS SMALLINT) + INTERVAL '2' DAY\"" }, @@ -65,7 +65,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS INT)\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS INT) + INTERVAL '2' DAY\"" }, @@ -89,7 +89,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BIGINT)\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BIGINT) + INTERVAL '2' DAY\"" }, @@ -113,7 +113,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS FLOAT)\"", "inputType" : "\"FLOAT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS FLOAT) + INTERVAL '2' DAY\"" }, @@ -137,7 +137,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DOUBLE)\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS DOUBLE) + INTERVAL '2' DAY\"" }, @@ -161,7 +161,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY\"" }, @@ -199,7 +199,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BINARY)\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BINARY) + INTERVAL '2' DAY\"" }, @@ -223,7 +223,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BOOLEAN)\"", "inputType" : "\"BOOLEAN\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BOOLEAN) + INTERVAL '2' DAY\"" }, @@ -261,7 +261,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS TINYINT)\"", "inputType" : "\"TINYINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS TINYINT) + INTERVAL '2' DAY\"" }, @@ -285,7 +285,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS SMALLINT)\"", "inputType" : "\"SMALLINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS SMALLINT) + INTERVAL '2' DAY\"" }, @@ -309,7 +309,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS INT)\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS INT) + INTERVAL '2' DAY\"" }, @@ -333,7 +333,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BIGINT)\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BIGINT) + INTERVAL '2' DAY\"" }, @@ -357,7 +357,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS FLOAT)\"", "inputType" : "\"FLOAT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS FLOAT) + INTERVAL '2' DAY\"" }, @@ -381,7 +381,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DOUBLE)\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS DOUBLE) + INTERVAL '2' DAY\"" }, @@ -405,7 +405,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY\"" }, @@ -443,7 +443,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BINARY)\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BINARY) + INTERVAL '2' DAY\"" }, @@ -467,7 +467,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BOOLEAN)\"", "inputType" : "\"BOOLEAN\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BOOLEAN) + INTERVAL '2' DAY\"" }, @@ -505,7 +505,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS TINYINT)\"", "inputType" : "\"TINYINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS TINYINT) + (- INTERVAL '2' DAY)\"" }, @@ -529,7 +529,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS SMALLINT)\"", "inputType" : "\"SMALLINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS SMALLINT) + (- INTERVAL '2' DAY)\"" }, @@ -553,7 +553,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS INT)\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS INT) + (- INTERVAL '2' DAY)\"" }, @@ -577,7 +577,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BIGINT)\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BIGINT) + (- INTERVAL '2' DAY)\"" }, @@ -601,7 +601,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS FLOAT)\"", "inputType" : "\"FLOAT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS FLOAT) + (- INTERVAL '2' DAY)\"" }, @@ -625,7 +625,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DOUBLE)\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS DOUBLE) + (- INTERVAL '2' DAY)\"" }, @@ -649,7 +649,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS DECIMAL(10,0)) + (- INTERVAL '2' DAY)\"" }, @@ -687,7 +687,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BINARY)\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BINARY) + (- INTERVAL '2' DAY)\"" }, @@ -711,7 +711,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BOOLEAN)\"", "inputType" : "\"BOOLEAN\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BOOLEAN) + (- INTERVAL '2' DAY)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/decimalPrecision.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/decimalPrecision.sql.out index 5fd5c1fa57406..eebe370666edb 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/decimalPrecision.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/decimalPrecision.sql.out @@ -501,7 +501,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(3,0))\"", "inputType" : "\"DECIMAL(3,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(3,0)))\"" }, @@ -525,7 +525,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(5,0))\"", "inputType" : "\"DECIMAL(5,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(5,0)))\"" }, @@ -549,7 +549,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(10,0)))\"" }, @@ -573,7 +573,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(20,0))\"", "inputType" : "\"DECIMAL(20,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(20,0)))\"" }, @@ -1213,7 +1213,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(3,0))\"", "inputType" : "\"DECIMAL(3,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(3,0)))\"" }, @@ -1237,7 +1237,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(5,0))\"", "inputType" : "\"DECIMAL(5,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(5,0)))\"" }, @@ -1261,7 +1261,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(10,0)))\"" }, @@ -1285,7 +1285,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(20,0))\"", "inputType" : "\"DECIMAL(20,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(20,0)))\"" }, @@ -1705,7 +1705,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(3,0))\"", "inputType" : "\"DECIMAL(3,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))\"" }, @@ -1729,7 +1729,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(5,0))\"", "inputType" : "\"DECIMAL(5,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))\"" }, @@ -1753,7 +1753,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))\"" }, @@ -1777,7 +1777,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(20,0))\"", "inputType" : "\"DECIMAL(20,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))\"" }, @@ -1801,7 +1801,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(3,0))\"", "inputType" : "\"DECIMAL(3,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(3,0)))\"" }, @@ -1825,7 +1825,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(5,0))\"", "inputType" : "\"DECIMAL(5,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(5,0)))\"" }, @@ -1849,7 +1849,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(10,0)))\"" }, @@ -1873,7 +1873,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(20,0))\"", "inputType" : "\"DECIMAL(20,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(20,0)))\"" }, @@ -2425,7 +2425,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(3,0))\"", "inputType" : "\"DECIMAL(3,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(3,0)) - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, @@ -2449,7 +2449,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(5,0))\"", "inputType" : "\"DECIMAL(5,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(5,0)) - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, @@ -2473,7 +2473,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, @@ -2497,7 +2497,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(20,0))\"", "inputType" : "\"DECIMAL(20,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(20,0)) - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, @@ -2521,7 +2521,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(3,0))\"", "inputType" : "\"DECIMAL(3,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"DATE\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(3,0)) - CAST(2017-12-11 09:30:00 AS DATE))\"" }, @@ -2545,7 +2545,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(5,0))\"", "inputType" : "\"DECIMAL(5,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"DATE\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(5,0)) - CAST(2017-12-11 09:30:00 AS DATE))\"" }, @@ -2569,7 +2569,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"DATE\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) - CAST(2017-12-11 09:30:00 AS DATE))\"" }, @@ -2593,7 +2593,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(20,0))\"", "inputType" : "\"DECIMAL(20,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"DATE\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(20,0)) - CAST(2017-12-11 09:30:00 AS DATE))\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/promoteStrings.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/promoteStrings.sql.out index dad6e815e93d7..0a32a7eaac474 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/promoteStrings.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/typeCoercion/native/promoteStrings.sql.out @@ -171,7 +171,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" }, @@ -327,7 +327,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(1 - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, @@ -1209,7 +1209,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" }, @@ -1354,7 +1354,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - 1)\"" }, @@ -1378,7 +1378,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-group-by.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-group-by.sql.out index 3765acad36b64..12b903477b6c4 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-group-by.sql.out @@ -554,7 +554,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"udf(1)\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"every(udf(1))\"" }, @@ -578,7 +578,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"udf(1)\"", "inputType" : "\"SMALLINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"some(udf(1))\"" }, @@ -602,7 +602,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"udf(1)\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"any(udf(1))\"" }, @@ -626,7 +626,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"true\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"every(true)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/xml-functions.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/xml-functions.sql.out index 3fbc8d19b94a6..05b8eed46d1d6 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/xml-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/xml-functions.sql.out @@ -23,7 +23,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"array(named_struct(a, 1, b, 2))\"", "inputType" : "\"ARRAY>\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"STRUCT\"", "sqlExpr" : "\"to_xml(array(named_struct(a, 1, b, 2)))\"" }, @@ -47,7 +47,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"map(a, 1)\"", "inputType" : "\"MAP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"STRUCT\"", "sqlExpr" : "\"to_xml(map(a, 1))\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out index 49e18411ffa37..c0f78f45db413 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out @@ -134,7 +134,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"sort_array(array(b, d), 1)\"" }, @@ -160,7 +160,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(NULL AS BOOLEAN)\"", "inputType" : "\"BOOLEAN\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"sort_array(array(b, d), CAST(NULL AS BOOLEAN))\"" }, @@ -456,7 +456,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"map(a, 1, b, 2)\"", "inputType" : "\"MAP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"ARRAY\"", "sqlExpr" : "\"array_size(map(a, 1, b, 2))\"" }, @@ -711,7 +711,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"id\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"ARRAY\"", "sqlExpr" : "\"array_compact(id)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index 507df161e8b45..b1b26b2f74ad1 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -384,7 +384,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 1)\"" }, @@ -410,7 +410,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 1.0)\"" }, @@ -436,7 +436,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 10.0)\"" }, @@ -559,7 +559,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 1)\"" }, @@ -585,7 +585,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 1.0)\"" }, @@ -611,7 +611,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 10.0)\"" }, @@ -734,7 +734,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 10.0)\"" }, @@ -840,7 +840,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DATE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" }, @@ -866,7 +866,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"DATE '2011-11-11'\"", "inputType" : "\"DATE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(1, DATE '2011-11-11')\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index b0d128e967a6d..2a8de35039fef 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -2108,7 +2108,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2-2' YEAR TO MONTH\"", "inputType" : "\"INTERVAL YEAR TO MONTH\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY\"" }, @@ -2134,7 +2134,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2-2' YEAR TO MONTH\"", "inputType" : "\"INTERVAL YEAR TO MONTH\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY\"" }, @@ -2160,7 +2160,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2-2' YEAR TO MONTH\"", "inputType" : "\"INTERVAL YEAR TO MONTH\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)\"" }, @@ -2210,7 +2210,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"1 + (- INTERVAL '02' SECOND)\"" }, @@ -2260,7 +2260,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"1 + INTERVAL '02' SECOND\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out index 6e2c8a65206ef..b79a329b62abb 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out @@ -549,7 +549,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"DATE '1999-01-01'\"", "inputType" : "\"DATE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ DATE '1999-01-01')\"" }, @@ -575,7 +575,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"TIMESTAMP '1999-01-01 00:00:00'\"", "inputType" : "\"TIMESTAMP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ TIMESTAMP '1999-01-01 00:00:00')\"" }, @@ -609,7 +609,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"map(1, 2)\"", "inputType" : "\"MAP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ map(1, 2))\"" }, @@ -635,7 +635,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"array(1, 2)\"", "inputType" : "\"ARRAY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ array(1, 2))\"" }, @@ -661,7 +661,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"named_struct(a, 1, b, spark)\"", "inputType" : "\"STRUCT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ named_struct(a, 1, b, spark))\"" }, @@ -687,7 +687,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"X'01'\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ X'01')\"" }, @@ -713,7 +713,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"DATE '1999-01-01'\"", "inputType" : "\"DATE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(- DATE '1999-01-01')\"" }, @@ -739,7 +739,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"TIMESTAMP '1999-01-01 00:00:00'\"", "inputType" : "\"TIMESTAMP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(- TIMESTAMP '1999-01-01 00:00:00')\"" }, @@ -765,7 +765,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"X'2379ACFE'\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(- X'2379ACFE')\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out index 414198b19645d..adb6550e80830 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/try_arithmetic.sql.out @@ -217,7 +217,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2' YEAR\"", "inputType" : "\"INTERVAL YEAR\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2' YEAR + INTERVAL '02' SECOND\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/array.sql.out b/sql/core/src/test/resources/sql-tests/results/array.sql.out index e568f5fa7796d..12635385bbb77 100644 --- a/sql/core/src/test/resources/sql-tests/results/array.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/array.sql.out @@ -134,7 +134,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"sort_array(array(b, d), 1)\"" }, @@ -160,7 +160,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(NULL AS BOOLEAN)\"", "inputType" : "\"BOOLEAN\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"sort_array(array(b, d), CAST(NULL AS BOOLEAN))\"" }, @@ -337,7 +337,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"map(a, 1, b, 2)\"", "inputType" : "\"MAP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"ARRAY\"", "sqlExpr" : "\"array_size(map(a, 1, b, 2))\"" }, @@ -592,7 +592,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"id\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"ARRAY\"", "sqlExpr" : "\"array_compact(id)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out b/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out index a828a0642177f..2c8b733004aac 100644 --- a/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/bitwise.sql.out @@ -155,7 +155,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"bit count\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"INTEGRAL\" or \"BOOLEAN\")", "sqlExpr" : "\"bit_count(bit count)\"" }, @@ -181,7 +181,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"a\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"INTEGRAL\" or \"BOOLEAN\")", "sqlExpr" : "\"bit_count(a)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out b/sql/core/src/test/resources/sql-tests/results/date.sql.out index bae2b42912225..c46c200ff026f 100644 --- a/sql/core/src/test/resources/sql-tests/results/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out @@ -339,7 +339,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 1)\"" }, @@ -365,7 +365,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 1.0)\"" }, @@ -391,7 +391,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 10.0)\"" }, @@ -504,7 +504,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 1)\"" }, @@ -530,7 +530,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 1.0)\"" }, @@ -556,7 +556,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 10.0)\"" }, @@ -637,7 +637,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"int_str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, int_str)\"" }, @@ -663,7 +663,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"int_str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, int_str)\"" }, @@ -705,7 +705,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 10.0)\"" }, @@ -763,7 +763,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2001-09-28\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(DATE '2001-10-01', 2001-09-28)\"" }, @@ -821,7 +821,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"date_str\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(DATE '2001-09-28', date_str)\"" }, @@ -847,7 +847,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" }, @@ -873,7 +873,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index 8b8a09ae3f1fc..7353df600dd4f 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -339,7 +339,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 1)\"" }, @@ -365,7 +365,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 1.0)\"" }, @@ -391,7 +391,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, 10.0)\"" }, @@ -504,7 +504,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 1)\"" }, @@ -530,7 +530,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 1.0)\"" }, @@ -556,7 +556,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, 10.0)\"" }, @@ -637,7 +637,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"int_str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(2011-11-11, int_str)\"" }, @@ -663,7 +663,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"int_str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(2011-11-11, int_str)\"" }, @@ -705,7 +705,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"10.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 10.0)\"" }, @@ -763,7 +763,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2001-09-28\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(DATE '2001-10-01', 2001-09-28)\"" }, @@ -821,7 +821,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"date_str\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(DATE '2001-09-28', date_str)\"" }, @@ -847,7 +847,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" }, @@ -873,7 +873,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(DATE '2011-11-11', 1)\"" }, @@ -1835,7 +1835,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2011-11-11 11:11:10\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10)\"" }, @@ -1861,7 +1861,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2011-11-11 11:11:11\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10')\"" }, @@ -1911,7 +1911,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"str\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(str - TIMESTAMP '2011-11-11 11:11:11')\"" }, @@ -1937,7 +1937,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - str)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out index e9addb9631536..3e94c62b7c782 100644 --- a/sql/core/src/test/resources/sql-tests/results/group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/group-by.sql.out @@ -557,7 +557,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"every(1)\"" }, @@ -583,7 +583,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"SMALLINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"some(1)\"" }, @@ -609,7 +609,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"any(1)\"" }, @@ -635,7 +635,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"true\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"every(true)\"" }, @@ -661,7 +661,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"bool_and(1.0)\"" }, @@ -687,7 +687,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"bool_or(1.0)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/hll.sql.out b/sql/core/src/test/resources/sql-tests/results/hll.sql.out index adf6c40d1bd82..ea9dc8e8af172 100644 --- a/sql/core/src/test/resources/sql-tests/results/hll.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/hll.sql.out @@ -84,7 +84,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"col\"", "inputType" : "\"ARRAY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"INT\" or \"BIGINT\" or \"STRING\" or \"BINARY\")", "sqlExpr" : "\"hll_sketch_agg(col, 12)\"" }, @@ -201,7 +201,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BINARY\"", "sqlExpr" : "\"hll_union(1, 2, false)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 296681b844d6e..f66c1c34695f6 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -1921,7 +1921,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2-2' YEAR TO MONTH\"", "inputType" : "\"INTERVAL YEAR TO MONTH\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY\"" }, @@ -1947,7 +1947,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2-2' YEAR TO MONTH\"", "inputType" : "\"INTERVAL YEAR TO MONTH\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2-2' YEAR TO MONTH + INTERVAL '3' DAY\"" }, @@ -1973,7 +1973,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2-2' YEAR TO MONTH\"", "inputType" : "\"INTERVAL YEAR TO MONTH\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2-2' YEAR TO MONTH + (- INTERVAL '3' DAY)\"" }, @@ -2023,7 +2023,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"1 + (- INTERVAL '02' SECOND)\"" }, @@ -2073,7 +2073,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"1 + INTERVAL '02' SECOND\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out index 99e65fa1f5ff3..2232e32077e23 100644 --- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out @@ -565,7 +565,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"STRING\"", "sqlExpr" : "\"json_array_length(2)\"" }, @@ -713,7 +713,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"200\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"STRING\"", "sqlExpr" : "\"json_object_keys(200)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index 6e2c8a65206ef..b79a329b62abb 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -549,7 +549,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"DATE '1999-01-01'\"", "inputType" : "\"DATE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ DATE '1999-01-01')\"" }, @@ -575,7 +575,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"TIMESTAMP '1999-01-01 00:00:00'\"", "inputType" : "\"TIMESTAMP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ TIMESTAMP '1999-01-01 00:00:00')\"" }, @@ -609,7 +609,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"map(1, 2)\"", "inputType" : "\"MAP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ map(1, 2))\"" }, @@ -635,7 +635,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"array(1, 2)\"", "inputType" : "\"ARRAY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ array(1, 2))\"" }, @@ -661,7 +661,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"named_struct(a, 1, b, spark)\"", "inputType" : "\"STRUCT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ named_struct(a, 1, b, spark))\"" }, @@ -687,7 +687,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"X'01'\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(+ X'01')\"" }, @@ -713,7 +713,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"DATE '1999-01-01'\"", "inputType" : "\"DATE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(- DATE '1999-01-01')\"" }, @@ -739,7 +739,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"TIMESTAMP '1999-01-01 00:00:00'\"", "inputType" : "\"TIMESTAMP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(- TIMESTAMP '1999-01-01 00:00:00')\"" }, @@ -765,7 +765,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"X'2379ACFE'\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"NUMERIC\" or \"INTERVAL DAY TO SECOND\" or \"INTERVAL YEAR TO MONTH\" or \"INTERVAL\")", "sqlExpr" : "\"(- X'2379ACFE')\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/mask-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/mask-functions.sql.out index 3b5922bc565c5..fdaad7168b844 100644 --- a/sql/core/src/test/resources/sql-tests/results/mask-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/mask-functions.sql.out @@ -492,7 +492,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"replaceArg\"", "inputType" : "\"INT\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"STRING\"", "sqlExpr" : "\"mask(c1, replaceArg, x, n, NULL)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/random.sql.out b/sql/core/src/test/resources/sql-tests/results/random.sql.out index 8a182a0646cac..16984de3ff257 100644 --- a/sql/core/src/test/resources/sql-tests/results/random.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/random.sql.out @@ -43,7 +43,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1.0\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"INT\" or \"BIGINT\")", "sqlExpr" : "\"rand(1.0)\"" }, @@ -101,7 +101,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"INT\" or \"BIGINT\")", "sqlExpr" : "\"rand(1)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out index 358217d3b1900..55f96ce954168 100644 --- a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out @@ -311,7 +311,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"NULL\"", "inputType" : "\"VOID\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"ARRAY\" or \"MAP\")", "sqlExpr" : "\"explode(NULL)\"" }, @@ -337,7 +337,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"NULL\"", "inputType" : "\"VOID\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"ARRAY\" or \"MAP\")", "sqlExpr" : "\"explode(NULL)\"" }, @@ -363,7 +363,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"ARRAY\" or \"MAP\")", "sqlExpr" : "\"explode(1)\"" }, @@ -540,7 +540,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"NULL\"", "inputType" : "\"VOID\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"ARRAY\"", "sqlExpr" : "\"inline(NULL)\"" }, @@ -659,7 +659,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "(\"ARRAY\" or \"MAP\")", "sqlExpr" : "\"posexplode(1)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out index 3128c5dd359ba..0e0b014a3b161 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out @@ -638,7 +638,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2011-11-11 11:11:10\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - 2011-11-11 11:11:10)\"" }, @@ -664,7 +664,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2011-11-11 11:11:11\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10')\"" }, @@ -714,7 +714,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"str\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(str - TIMESTAMP '2011-11-11 11:11:11')\"" }, @@ -740,7 +740,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(TIMESTAMP '2011-11-11 11:11:11' - str)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out index 24edf1a357713..102935c90f9c1 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out @@ -638,7 +638,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2011-11-11 11:11:10\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(TIMESTAMP_NTZ '2011-11-11 11:11:11' - 2011-11-11 11:11:10)\"" }, @@ -664,7 +664,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2011-11-11 11:11:11\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(2011-11-11 11:11:11 - TIMESTAMP_NTZ '2011-11-11 11:11:10')\"" }, @@ -714,7 +714,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"str\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(str - TIMESTAMP_NTZ '2011-11-11 11:11:11')\"" }, @@ -740,7 +740,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"str\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(TIMESTAMP_NTZ '2011-11-11 11:11:11' - str)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out index c706a26078926..fa83652da0edc 100644 --- a/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/try_arithmetic.sql.out @@ -171,7 +171,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"INTERVAL '2' YEAR\"", "inputType" : "\"INTERVAL YEAR\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"INTERVAL '2' YEAR + INTERVAL '02' SECOND\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/try_reflect.sql.out b/sql/core/src/test/resources/sql-tests/results/try_reflect.sql.out index 13da0edca9898..8b2c4f0ed737a 100644 --- a/sql/core/src/test/resources/sql-tests/results/try_reflect.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/try_reflect.sql.out @@ -132,7 +132,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"2.5\"", "inputType" : "\"DECIMAL(2,1)\"", - "paramIndex" : "3", + "paramIndex" : "third", "requiredType" : "(\"BOOLEAN\" or \"TINYINT\" or \"SMALLINT\" or \"INT\" or \"BIGINT\" or \"FLOAT\" or \"DOUBLE\" or \"STRING\")", "sqlExpr" : "\"reflect(java.lang.Math, round, 2.5)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out index 72231bd169ed4..10cceabca0525 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/dateTimeOperations.sql.out @@ -19,7 +19,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS TINYINT)\"", "inputType" : "\"TINYINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS TINYINT) + INTERVAL '2' DAY\"" }, @@ -45,7 +45,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS SMALLINT)\"", "inputType" : "\"SMALLINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS SMALLINT) + INTERVAL '2' DAY\"" }, @@ -71,7 +71,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS INT)\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS INT) + INTERVAL '2' DAY\"" }, @@ -97,7 +97,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BIGINT)\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BIGINT) + INTERVAL '2' DAY\"" }, @@ -123,7 +123,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS FLOAT)\"", "inputType" : "\"FLOAT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS FLOAT) + INTERVAL '2' DAY\"" }, @@ -149,7 +149,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DOUBLE)\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS DOUBLE) + INTERVAL '2' DAY\"" }, @@ -175,7 +175,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY\"" }, @@ -217,7 +217,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BINARY)\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BINARY) + INTERVAL '2' DAY\"" }, @@ -243,7 +243,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BOOLEAN)\"", "inputType" : "\"BOOLEAN\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BOOLEAN) + INTERVAL '2' DAY\"" }, @@ -285,7 +285,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS TINYINT)\"", "inputType" : "\"TINYINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS TINYINT) + INTERVAL '2' DAY\"" }, @@ -311,7 +311,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS SMALLINT)\"", "inputType" : "\"SMALLINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS SMALLINT) + INTERVAL '2' DAY\"" }, @@ -337,7 +337,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS INT)\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS INT) + INTERVAL '2' DAY\"" }, @@ -363,7 +363,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BIGINT)\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BIGINT) + INTERVAL '2' DAY\"" }, @@ -389,7 +389,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS FLOAT)\"", "inputType" : "\"FLOAT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS FLOAT) + INTERVAL '2' DAY\"" }, @@ -415,7 +415,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DOUBLE)\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS DOUBLE) + INTERVAL '2' DAY\"" }, @@ -441,7 +441,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS DECIMAL(10,0)) + INTERVAL '2' DAY\"" }, @@ -483,7 +483,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BINARY)\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BINARY) + INTERVAL '2' DAY\"" }, @@ -509,7 +509,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BOOLEAN)\"", "inputType" : "\"BOOLEAN\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BOOLEAN) + INTERVAL '2' DAY\"" }, @@ -551,7 +551,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS TINYINT)\"", "inputType" : "\"TINYINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS TINYINT) + (- INTERVAL '2' DAY)\"" }, @@ -577,7 +577,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS SMALLINT)\"", "inputType" : "\"SMALLINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS SMALLINT) + (- INTERVAL '2' DAY)\"" }, @@ -603,7 +603,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS INT)\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS INT) + (- INTERVAL '2' DAY)\"" }, @@ -629,7 +629,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BIGINT)\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BIGINT) + (- INTERVAL '2' DAY)\"" }, @@ -655,7 +655,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS FLOAT)\"", "inputType" : "\"FLOAT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS FLOAT) + (- INTERVAL '2' DAY)\"" }, @@ -681,7 +681,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DOUBLE)\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS DOUBLE) + (- INTERVAL '2' DAY)\"" }, @@ -707,7 +707,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS DECIMAL(10,0)) + (- INTERVAL '2' DAY)\"" }, @@ -749,7 +749,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BINARY)\"", "inputType" : "\"BINARY\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BINARY) + (- INTERVAL '2' DAY)\"" }, @@ -775,7 +775,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS BOOLEAN)\"", "inputType" : "\"BOOLEAN\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"CAST(1 AS BOOLEAN) + (- INTERVAL '2' DAY)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out index 834bb6b296eb9..b23e57c470fe4 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out @@ -435,7 +435,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(3,0))\"", "inputType" : "\"DECIMAL(3,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(3,0)))\"" }, @@ -461,7 +461,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(5,0))\"", "inputType" : "\"DECIMAL(5,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(5,0)))\"" }, @@ -487,7 +487,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(10,0)))\"" }, @@ -513,7 +513,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(20,0))\"", "inputType" : "\"DECIMAL(20,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(20,0)))\"" }, @@ -1083,7 +1083,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(3,0))\"", "inputType" : "\"DECIMAL(3,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(3,0)))\"" }, @@ -1109,7 +1109,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(5,0))\"", "inputType" : "\"DECIMAL(5,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(5,0)))\"" }, @@ -1135,7 +1135,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(10,0)))\"" }, @@ -1161,7 +1161,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(20,0))\"", "inputType" : "\"DECIMAL(20,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(20,0)))\"" }, @@ -1507,7 +1507,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(3,0))\"", "inputType" : "\"DECIMAL(3,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))\"" }, @@ -1533,7 +1533,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(5,0))\"", "inputType" : "\"DECIMAL(5,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))\"" }, @@ -1559,7 +1559,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))\"" }, @@ -1585,7 +1585,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(20,0))\"", "inputType" : "\"DECIMAL(20,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))\"" }, @@ -1611,7 +1611,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(3,0))\"", "inputType" : "\"DECIMAL(3,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(3,0)))\"" }, @@ -1637,7 +1637,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(5,0))\"", "inputType" : "\"DECIMAL(5,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(5,0)))\"" }, @@ -1663,7 +1663,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(10,0)))\"" }, @@ -1689,7 +1689,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(20,0))\"", "inputType" : "\"DECIMAL(20,0)\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(CAST(2017-12-11 09:30:00 AS DATE), CAST(1 AS DECIMAL(20,0)))\"" }, @@ -2163,7 +2163,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(3,0))\"", "inputType" : "\"DECIMAL(3,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(3,0)) - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, @@ -2189,7 +2189,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(5,0))\"", "inputType" : "\"DECIMAL(5,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(5,0)) - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, @@ -2215,7 +2215,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, @@ -2241,7 +2241,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(20,0))\"", "inputType" : "\"DECIMAL(20,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(20,0)) - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, @@ -2267,7 +2267,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(3,0))\"", "inputType" : "\"DECIMAL(3,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"DATE\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(3,0)) - CAST(2017-12-11 09:30:00 AS DATE))\"" }, @@ -2293,7 +2293,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(5,0))\"", "inputType" : "\"DECIMAL(5,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"DATE\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(5,0)) - CAST(2017-12-11 09:30:00 AS DATE))\"" }, @@ -2319,7 +2319,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(10,0))\"", "inputType" : "\"DECIMAL(10,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"DATE\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(10,0)) - CAST(2017-12-11 09:30:00 AS DATE))\"" }, @@ -2345,7 +2345,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"CAST(1 AS DECIMAL(20,0))\"", "inputType" : "\"DECIMAL(20,0)\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"DATE\"", "sqlExpr" : "\"(CAST(1 AS DECIMAL(20,0)) - CAST(2017-12-11 09:30:00 AS DATE))\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out index 8b87d9228bcad..5005d682e1927 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out @@ -155,7 +155,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" }, @@ -293,7 +293,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(1 - CAST(2017-12-11 09:30:00.0 AS TIMESTAMP))\"" }, @@ -1095,7 +1095,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_add(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" }, @@ -1225,7 +1225,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"STRING\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "\"(TIMESTAMP OR TIMESTAMP WITHOUT TIME ZONE)\"", "sqlExpr" : "\"(CAST(2017-12-11 09:30:00.0 AS TIMESTAMP) - 1)\"" }, @@ -1251,7 +1251,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"1\"", "inputType" : "\"DOUBLE\"", - "paramIndex" : "2", + "paramIndex" : "second", "requiredType" : "(\"INT\" or \"SMALLINT\" or \"TINYINT\")", "sqlExpr" : "\"date_sub(CAST(2017-12-11 09:30:00 AS DATE), 1)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out index e1737de2b6700..ce70c91d8d06e 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-group-by.sql.out @@ -451,7 +451,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"udf(1)\"", "inputType" : "\"INT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"every(udf(1))\"" }, @@ -477,7 +477,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"udf(1)\"", "inputType" : "\"SMALLINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"some(udf(1))\"" }, @@ -503,7 +503,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"udf(1)\"", "inputType" : "\"BIGINT\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"any(udf(1))\"" }, @@ -529,7 +529,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"true\"", "inputType" : "\"STRING\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"BOOLEAN\"", "sqlExpr" : "\"every(true)\"" }, diff --git a/sql/core/src/test/resources/sql-tests/results/xml-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/xml-functions.sql.out index 0194a676ad3b4..b03b7d2e70c7c 100644 --- a/sql/core/src/test/resources/sql-tests/results/xml-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/xml-functions.sql.out @@ -27,7 +27,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"array(named_struct(a, 1, b, 2))\"", "inputType" : "\"ARRAY>\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"STRUCT\"", "sqlExpr" : "\"to_xml(array(named_struct(a, 1, b, 2)))\"" }, @@ -53,7 +53,7 @@ org.apache.spark.sql.catalyst.ExtendedAnalysisException "messageParameters" : { "inputSql" : "\"map(a, 1)\"", "inputType" : "\"MAP\"", - "paramIndex" : "1", + "paramIndex" : "first", "requiredType" : "\"STRUCT\"", "sqlExpr" : "\"to_xml(map(a, 1))\"" }, diff --git a/sql/core/src/test/scala/org/apache/spark/sql/BitmapExpressionsQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/BitmapExpressionsQuerySuite.scala index 74c744635ecc7..0778599d54f49 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/BitmapExpressionsQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/BitmapExpressionsQuerySuite.scala @@ -217,7 +217,7 @@ class BitmapExpressionsQuerySuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"bitmap_count(a)\"", - "paramIndex" -> "0", + "paramIndex" -> "first", "requiredType" -> "\"BINARY\"", "inputSql" -> "\"a\"", "inputType" -> "\"INT\"" @@ -239,7 +239,7 @@ class BitmapExpressionsQuerySuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"bitmap_or_agg(a)\"", - "paramIndex" -> "0", + "paramIndex" -> "first", "requiredType" -> "\"BINARY\"", "inputSql" -> "\"a\"", "inputType" -> "\"INT\"" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala index 13888272cad33..6daf89173f405 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala @@ -107,7 +107,7 @@ class CollationSuite extends QueryTest with SharedSparkSession { sqlState = "42K09", parameters = Map( "sqlExpr" -> "\"collate(1)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"1\"", "inputType" -> "\"INT\"", "requiredType" -> "\"STRING\""), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala index 733a2287ed803..936bcc21b763d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala @@ -1058,7 +1058,7 @@ class ColumnExpressionSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"update_fields(key, WithField(2))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"key\"", "inputType" -> "\"INT\"", "requiredType" -> "\"STRUCT\""), @@ -1106,7 +1106,7 @@ class ColumnExpressionSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"update_fields(a.b, WithField(2))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"a.b\"", "inputType" -> "\"INT\"", "requiredType" -> "\"STRUCT\""), @@ -1857,7 +1857,7 @@ class ColumnExpressionSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"update_fields(key, dropfield())\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"key\"", "inputType" -> "\"INT\"", "requiredType" -> "\"STRUCT\""), @@ -1897,7 +1897,7 @@ class ColumnExpressionSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"update_fields(a.b, dropfield())\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"a.b\"", "inputType" -> "\"INT\"", "requiredType" -> "\"STRUCT\""), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala index 5c7cf874bd793..15dffb3696dd1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala @@ -1156,7 +1156,7 @@ class DataFrameAggregateSuite extends QueryTest sqlState = None, parameters = Map( "sqlExpr" -> "\"count_if(x)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"x\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"BOOLEAN\""), @@ -1319,7 +1319,7 @@ class DataFrameAggregateSuite extends QueryTest errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"col[a]\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"a\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"INTEGRAL\""), @@ -1970,7 +1970,7 @@ class DataFrameAggregateSuite extends QueryTest errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"hll_sketch_agg(value, text)\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"text\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"INT\"" @@ -1999,7 +1999,7 @@ class DataFrameAggregateSuite extends QueryTest errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"hll_union_agg(sketch, Hll_4)\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"Hll_4\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"BOOLEAN\"" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala index ed75cb48a4bf0..e42f397cbfc29 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala @@ -167,7 +167,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"map_from_arrays(k, v)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "requiredType" -> "\"ARRAY\"", "inputSql" -> "\"k\"", "inputType" -> "\"INT\"" @@ -767,7 +767,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> """"array_sort\(a, lambdafunction\(`-`\(x_\d+, y_\d+\), x_\d+, y_\d+\)\)"""", - "paramIndex" -> "1", + "paramIndex" -> "first", "requiredType" -> "\"ARRAY\"", "inputSql" -> "\"a\"", "inputType" -> "\"INT\"" @@ -834,7 +834,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"sort_array(a, true)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "requiredType" -> "\"ARRAY\"", "inputSql" -> "\"a\"", "inputType" -> "\"STRING\"" @@ -871,7 +871,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"array_sort(a, lambdafunction((IF(((left IS NULL) AND (right IS NULL)), 0, (IF((left IS NULL), 1, (IF((right IS NULL), -1, (IF((left < right), -1, (IF((left > right), 1, 0)))))))))), left, right))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"a\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"ARRAY\""), @@ -1414,7 +1414,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"map_from_entries(a)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"a\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\" of pair \"STRUCT\"" @@ -1555,7 +1555,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"array_contains(a string, foo)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "requiredType" -> "\"ARRAY\"", "inputSql" -> "\"a string\"", "inputType" -> "\"STRING\"" @@ -1710,7 +1710,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"array_join(x, 1)\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"1\"", "inputType" -> "\"INT\"", "requiredType" -> "\"STRING\"" @@ -1724,7 +1724,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"array_join(x, , , 1)\"", - "paramIndex" -> "3", + "paramIndex" -> "third", "inputSql" -> "\"1\"", "inputType" -> "\"INT\"", "requiredType" -> "\"STRING\"" @@ -1984,7 +1984,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"reverse(struct(1, a))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"struct(1, a)\"", "inputType" -> "\"STRUCT\"", "requiredType" -> "(\"STRING\" or \"ARRAY\")" @@ -1999,7 +1999,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"reverse(map(1, a))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"map(1, a)\"", "inputType" -> "\"MAP\"", "requiredType" -> "(\"STRING\" or \"ARRAY\")" @@ -2109,7 +2109,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"array_position(_1, _2)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "requiredType" -> "\"ARRAY\"", "inputSql" -> "\"_1\"", "inputType" -> "\"STRING\"" @@ -2197,7 +2197,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"element_at(_1, _2)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"_1\"", "inputType" -> "\"STRING\"", "requiredType" -> "(\"ARRAY\" or \"MAP\")" @@ -2227,7 +2227,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"element_at(array(a, b), 1)\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"1\"", "inputType" -> "\"BIGINT\"", "requiredType" -> "\"INT\"" @@ -2551,7 +2551,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"concat(map(1, 2), map(3, 4))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "requiredType" -> "(\"STRING\" or \"BINARY\" or \"ARRAY\")", "inputSql" -> "\"map(1, 2)\"", "inputType" -> "\"MAP\"" @@ -2662,7 +2662,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"flatten(arr)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"arr\"", "inputType" -> "\"ARRAY\"", "requiredType" -> "\"ARRAY\" of \"ARRAY\"" @@ -2677,7 +2677,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"flatten(i)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\" of \"ARRAY\"" @@ -2692,7 +2692,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"flatten(s)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"s\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"ARRAY\" of \"ARRAY\"" @@ -2707,7 +2707,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"flatten(NULL)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"NULL\"", "inputType" -> "\"VOID\"", "requiredType" -> "\"ARRAY\" of \"ARRAY\"" @@ -2803,7 +2803,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"array_repeat(a, b)\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"b\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"INT\"" @@ -2818,7 +2818,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"array_repeat(a, 1)\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"1\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"INT\"" @@ -2833,7 +2833,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"array_repeat(a, 1.0)\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"1.0\"", "inputType" -> "\"DECIMAL(2,1)\"", "requiredType" -> "\"INT\"" @@ -2883,7 +2883,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { }, errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( - "paramIndex" -> "0", + "paramIndex" -> "first", "sqlExpr" -> "\"array_prepend(_1, _2)\"", "inputSql" -> "\"_1\"", "inputType" -> "\"STRING\"", @@ -3675,7 +3675,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"transform(i, lambdafunction(x, x))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\""), @@ -3773,7 +3773,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"map_filter(i, lambdafunction((k > v), k, v))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"MAP\""), @@ -3790,7 +3790,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { matchPVals = true, parameters = Map( "sqlExpr" -> """"map_filter\(i, lambdafunction\(`>`\(x_\d+, y_\d+\), x_\d+, y_\d+\)\)"""", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"MAP\""), @@ -3958,7 +3958,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"filter(i, lambdafunction(x, x))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\""), @@ -3975,7 +3975,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { matchPVals = true, parameters = Map( "sqlExpr" -> """"filter\(i, lambdafunction\(x_\d+, x_\d+\)\)"""", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\""), @@ -3989,7 +3989,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"filter(s, lambdafunction(namedlambdavariable(), namedlambdavariable()))\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"lambdafunction(namedlambdavariable(), namedlambdavariable())\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"BOOLEAN\""), @@ -4005,7 +4005,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"filter(s, lambdafunction(namedlambdavariable(), namedlambdavariable()))\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"lambdafunction(namedlambdavariable(), namedlambdavariable())\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"BOOLEAN\""), @@ -4146,7 +4146,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"exists(i, lambdafunction(x, x))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\""), @@ -4163,7 +4163,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { matchPVals = true, parameters = Map( "sqlExpr" -> """"exists\(i, lambdafunction\(x_\d+, x_\d+\)\)"""", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\""), @@ -4177,7 +4177,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"exists(s, lambdafunction(namedlambdavariable(), namedlambdavariable()))\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"lambdafunction(namedlambdavariable(), namedlambdavariable())\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"BOOLEAN\""), @@ -4194,7 +4194,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"exists(s, lambdafunction(namedlambdavariable(), namedlambdavariable()))\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"lambdafunction(namedlambdavariable(), namedlambdavariable())\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"BOOLEAN\""), @@ -4347,7 +4347,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"forall(i, lambdafunction(x, x))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\""), @@ -4364,7 +4364,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { matchPVals = true, parameters = Map( "sqlExpr" -> """"forall\(i, lambdafunction\(x_\d+, x_\d+\)\)"""", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\""), @@ -4378,7 +4378,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"forall(s, lambdafunction(namedlambdavariable(), namedlambdavariable()))\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"lambdafunction(namedlambdavariable(), namedlambdavariable())\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"BOOLEAN\""), @@ -4394,7 +4394,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"forall(s, lambdafunction(namedlambdavariable(), namedlambdavariable()))\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"lambdafunction(namedlambdavariable(), namedlambdavariable())\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"BOOLEAN\""), @@ -4632,7 +4632,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> s""""$agg(i, 0, lambdafunction(x, acc, x), lambdafunction(id, id))"""", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\""), @@ -4651,7 +4651,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { matchPVals = true, parameters = Map( "sqlExpr" -> """"aggregate\(i, 0, lambdafunction\(y_\d+, x_\d+, y_\d+\), lambdafunction\(x_\d+, x_\d+\)\)"""", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\""), @@ -4668,7 +4668,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> s""""$agg(s, 0, lambdafunction(namedlambdavariable(), namedlambdavariable(), namedlambdavariable()), lambdafunction(namedlambdavariable(), namedlambdavariable()))"""", - "paramIndex" -> "3", + "paramIndex" -> "third", "inputSql" -> "\"lambdafunction(namedlambdavariable(), namedlambdavariable(), namedlambdavariable())\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"INT\"" @@ -4688,7 +4688,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> """"aggregate(s, 0, lambdafunction(namedlambdavariable(), namedlambdavariable(), namedlambdavariable()), lambdafunction(namedlambdavariable(), namedlambdavariable()))"""", - "paramIndex" -> "3", + "paramIndex" -> "third", "inputSql" -> "\"lambdafunction(namedlambdavariable(), namedlambdavariable(), namedlambdavariable())\"", "inputType" -> "\"STRING\"", "requiredType" -> "\"INT\"" @@ -4813,7 +4813,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"map_zip_with(i, mis, lambdafunction(concat(x, y, z), x, y, z))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"MAP\""), context = ExpectedContext( @@ -4830,7 +4830,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { matchPVals = true, parameters = Map( "sqlExpr" -> """"map_zip_with\(i, mis, lambdafunction\(concat\(x_\d+, y_\d+, z_\d+\), x_\d+, y_\d+, z_\d+\)\)"""", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"MAP\""), queryContext = Array( @@ -4845,7 +4845,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"map_zip_with(mis, i, lambdafunction(concat(x, y, z), x, y, z))\"", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"MAP\""), context = ExpectedContext( @@ -4862,7 +4862,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { matchPVals = true, parameters = Map( "sqlExpr" -> """"map_zip_with\(mis, i, lambdafunction\(concat\(x_\d+, y_\d+, z_\d+\), x_\d+, y_\d+, z_\d+\)\)"""", - "paramIndex" -> "2", + "paramIndex" -> "second", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"MAP\""), queryContext = Array( @@ -5037,7 +5037,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"transform_keys(j, lambdafunction((k + 1), k, v))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"j\"", "inputType" -> "\"ARRAY\"", "requiredType" -> "\"MAP\""), @@ -5297,7 +5297,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"transform_values(x, lambdafunction((k + 1), k, v))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"x\"", "inputType" -> "\"ARRAY\"", "requiredType" -> "\"MAP\""), @@ -5315,7 +5315,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> """"transform_values\(x, lambdafunction\(`\+`\(x_\d+, 1\), x_\d+, y_\d+\)\)"""", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"x\"", "inputType" -> "\"ARRAY\"", "requiredType" -> "\"MAP\""), @@ -5441,7 +5441,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"zip_with(i, a2, lambdafunction(x, acc, x))\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\""), @@ -5459,7 +5459,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> """"zip_with\(i, a2, lambdafunction\(y_\d+, x_\d+, y_\d+\)\)"""", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"i\"", "inputType" -> "\"INT\"", "requiredType" -> "\"ARRAY\""), @@ -5838,7 +5838,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"array_compact(a)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "requiredType" -> "\"ARRAY\"", "inputSql" -> "\"a\"", "inputType" -> "\"INT\"" @@ -5912,7 +5912,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"array_append(a, b)\"", - "paramIndex" -> "0", + "paramIndex" -> "first", "requiredType" -> "\"ARRAY\"", "inputSql" -> "\"a\"", "inputType" -> "\"STRING\"" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index 810c66bdc37d0..8ef95e6fd129b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -352,7 +352,7 @@ class DataFrameSuite extends QueryTest errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"explode(csv)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql"-> "\"csv\"", "inputType" -> "\"STRING\"", "requiredType" -> "(\"ARRAY\" or \"MAP\")"), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala index 25287cce368a7..da93c70a5b074 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala @@ -62,7 +62,7 @@ class GeneratorFunctionSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"stack(1.1, 1, 2, 3)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"1.1\"", "inputType" -> "\"DECIMAL(2,1)\"", "requiredType" -> "\"INT\""), @@ -290,7 +290,7 @@ class GeneratorFunctionSuite extends QueryTest with SharedSparkSession { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"inline(array())\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"array()\"", "inputType" -> "\"ARRAY\"", "requiredType" -> "\"ARRAY\""), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala index 38a6b9a50272b..db157f667e412 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala @@ -787,7 +787,7 @@ class StringFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"regexp_replace(collect_list(1), 1, 2, 1)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"collect_list(1)\"", "inputType" -> "\"ARRAY\"", "requiredType" -> "\"STRING\""), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala index 6481a3f3a8910..95bdb2543e376 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala @@ -470,9 +470,9 @@ class DataSourceV2FunctionSuite extends DatasourceV2SQLBase { checkAnswer(sql(s"SELECT testcat.ns.$name(42, 58L)"), Row(100) :: Nil) val paramIndex = name match { - case "add" => "1" - case "add2" => "2" - case "add3" => "1" + case "add" => "first" + case "add2" => "second" + case "add3" => "first" } // can't cast date time interval to long @@ -640,7 +640,7 @@ class DataSourceV2FunctionSuite extends DatasourceV2SQLBase { errorClass = "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", parameters = Map( "sqlExpr" -> "\"v2aggregator(col1)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"col1\"", "inputType" -> "\"INTERVAL DAY\"", "requiredType" -> "\"DECIMAL(38,18)\"" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index cfa17bdf1e0d1..f21c0c2b52fab 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -3057,7 +3057,7 @@ class DataSourceV2SQLSuiteV1Filter sqlState = None, parameters = Map( "sqlExpr" -> "\"abs(true)\"", - "paramIndex" -> "1", + "paramIndex" -> "first", "inputSql" -> "\"true\"", "inputType" -> "\"BOOLEAN\"", "requiredType" ->