Skip to content

Commit

Permalink
Revert "[SPARK-42702][SPARK-42623][SQL] Support parameterized query i…
Browse files Browse the repository at this point in the history
…n subquery and CTE"

This reverts commit a780703.
  • Loading branch information
HyukjinKwon committed Mar 10, 2023
1 parent a780703 commit b36966f
Show file tree
Hide file tree
Showing 11 changed files with 86 additions and 247 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ import org.apache.spark.connect.proto.ExecutePlanResponse.SqlCommandResult
import org.apache.spark.connect.proto.Parse.ParseFormat
import org.apache.spark.sql.{Column, Dataset, Encoders, SparkSession}
import org.apache.spark.sql.catalyst.{expressions, AliasIdentifier, FunctionIdentifier}
import org.apache.spark.sql.catalyst.analysis.{GlobalTempView, LocalTempView, MultiAlias, ParameterizedQuery, UnresolvedAlias, UnresolvedAttribute, UnresolvedExtractValue, UnresolvedFunction, UnresolvedRegex, UnresolvedRelation, UnresolvedStar}
import org.apache.spark.sql.catalyst.analysis.{GlobalTempView, LocalTempView, MultiAlias, UnresolvedAlias, UnresolvedAttribute, UnresolvedExtractValue, UnresolvedFunction, UnresolvedRegex, UnresolvedRelation, UnresolvedStar}
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.parser.{CatalystSqlParser, ParseException, ParserUtils}
Expand Down Expand Up @@ -209,12 +209,8 @@ class SparkConnectPlanner(val session: SparkSession) {
private def transformSql(sql: proto.SQL): LogicalPlan = {
val args = sql.getArgsMap.asScala.toMap
val parser = session.sessionState.sqlParser
val parsedPlan = parser.parsePlan(sql.getQuery)
if (args.nonEmpty) {
ParameterizedQuery(parsedPlan, args.mapValues(parser.parseExpression).toMap)
} else {
parsedPlan
}
val parsedArgs = args.mapValues(parser.parseExpression).toMap
Parameter.bind(parser.parsePlan(sql.getQuery), parsedArgs)
}

private def transformSubqueryAlias(alias: proto.SubqueryAlias): LogicalPlan = {
Expand Down
5 changes: 0 additions & 5 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -1730,11 +1730,6 @@
"Pandas user defined aggregate function in the PIVOT clause."
]
},
"PARAMETER_MARKER_IN_UNEXPECTED_STATEMENT" : {
"message" : [
"Parameter markers in unexpected statement: <statement>. Parameter markers must only be used in a query, or DML statement."
]
},
"PIVOT_AFTER_GROUP_BY" : {
"message" : [
"PIVOT clause following a GROUP BY clause. Consider pushing the GROUP BY into a subquery."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,6 @@ class Analyzer(override val catalogManager: CatalogManager) extends RuleExecutor
// at the beginning of analysis.
OptimizeUpdateFields,
CTESubstitution,
BindParameters,
WindowsSubstitution,
EliminateUnions,
SubstituteUnresolvedOrdinals),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB
case p: Parameter =>
p.failAnalysis(
errorClass = "UNBOUND_SQL_PARAMETER",
messageParameters = Map("name" -> p.name))
messageParameters = Map("name" -> toSQLId(p.name)))

case _ =>
})
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.catalyst.expressions

import org.apache.spark.SparkException
import org.apache.spark.sql.catalyst.analysis.AnalysisErrorAt
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.trees.TreePattern.{PARAMETER, TreePattern}
import org.apache.spark.sql.errors.QueryErrorsBase
import org.apache.spark.sql.types.DataType

/**
* The expression represents a named parameter that should be replaced by a literal.
*
* @param name The identifier of the parameter without the marker.
*/
case class Parameter(name: String) extends LeafExpression with Unevaluable {
override lazy val resolved: Boolean = false

private def unboundError(methodName: String): Nothing = {
throw SparkException.internalError(
s"Cannot call `$methodName()` of the unbound parameter `$name`.")
}
override def dataType: DataType = unboundError("dataType")
override def nullable: Boolean = unboundError("nullable")

final override val nodePatterns: Seq[TreePattern] = Seq(PARAMETER)
}


/**
* Finds all named parameters in the given plan and substitutes them by literals of `args` values.
*/
object Parameter extends QueryErrorsBase {
def bind(plan: LogicalPlan, args: Map[String, Expression]): LogicalPlan = {
if (!args.isEmpty) {
args.filter(!_._2.isInstanceOf[Literal]).headOption.foreach { case (name, expr) =>
expr.failAnalysis(
errorClass = "INVALID_SQL_ARG",
messageParameters = Map("name" -> toSQLId(name)))
}
plan.transformAllExpressionsWithPruning(_.containsPattern(PARAMETER)) {
case Parameter(name) if args.contains(name) => args(name)
}
} else {
plan
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,6 @@ object TreePattern extends Enumeration {
val OR: Value = Value
val OUTER_REFERENCE: Value = Value
val PARAMETER: Value = Value
val PARAMETERIZED_QUERY: Value = Value
val PIVOT: Value = Value
val PLAN_EXPRESSION: Value = Value
val PYTHON_UDF: Value = Value
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1346,21 +1346,17 @@ class AnalysisSuite extends AnalysisTest with Matchers {
}

test("SPARK-41271: bind named parameters to literals") {
CTERelationDef.curId.set(0)
val actual1 = ParameterizedQuery(
child = parsePlan("WITH a AS (SELECT 1 c) SELECT * FROM a LIMIT :limitA"),
args = Map("limitA" -> Literal(10))).analyze
CTERelationDef.curId.set(0)
val expected1 = parsePlan("WITH a AS (SELECT 1 c) SELECT * FROM a LIMIT 10").analyze
comparePlans(actual1, expected1)
comparePlans(
Parameter.bind(
plan = parsePlan("SELECT * FROM a LIMIT :limitA"),
args = Map("limitA" -> Literal(10))),
parsePlan("SELECT * FROM a LIMIT 10"))
// Ignore unused arguments
CTERelationDef.curId.set(0)
val actual2 = ParameterizedQuery(
child = parsePlan("WITH a AS (SELECT 1 c) SELECT c FROM a WHERE c < :param2"),
args = Map("param1" -> Literal(10), "param2" -> Literal(20))).analyze
CTERelationDef.curId.set(0)
val expected2 = parsePlan("WITH a AS (SELECT 1 c) SELECT c FROM a WHERE c < 20").analyze
comparePlans(actual2, expected2)
comparePlans(
Parameter.bind(
plan = parsePlan("SELECT c FROM a WHERE c < :param2"),
args = Map("param1" -> Literal(10), "param2" -> Literal(20))),
parsePlan("SELECT c FROM a WHERE c < 20"))
}

test("SPARK-41489: type of filter expression should be a bool") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.parser

import org.apache.spark.SparkThrowable
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, Parameter, RelationTimeTravel, UnresolvedAlias, UnresolvedAttribute, UnresolvedFunction, UnresolvedGenerator, UnresolvedInlineTable, UnresolvedRelation, UnresolvedStar, UnresolvedSubqueryColumnAliases, UnresolvedTableValuedFunction, UnresolvedTVFAliases}
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, RelationTimeTravel, UnresolvedAlias, UnresolvedAttribute, UnresolvedFunction, UnresolvedGenerator, UnresolvedInlineTable, UnresolvedRelation, UnresolvedStar, UnresolvedSubqueryColumnAliases, UnresolvedTableValuedFunction, UnresolvedTVFAliases}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate.{PercentileCont, PercentileDisc}
import org.apache.spark.sql.catalyst.plans._
Expand Down
12 changes: 4 additions & 8 deletions sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,9 @@ import org.apache.spark.rdd.RDD
import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd}
import org.apache.spark.sql.catalog.Catalog
import org.apache.spark.sql.catalyst._
import org.apache.spark.sql.catalyst.analysis.{ParameterizedQuery, UnresolvedRelation}
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
import org.apache.spark.sql.catalyst.encoders._
import org.apache.spark.sql.catalyst.expressions.AttributeReference
import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Parameter}
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, Range}
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.connector.ExternalCommandRunner
Expand Down Expand Up @@ -623,12 +623,8 @@ class SparkSession private(
val tracker = new QueryPlanningTracker
val plan = tracker.measurePhase(QueryPlanningTracker.PARSING) {
val parser = sessionState.sqlParser
val parsedPlan = parser.parsePlan(sqlText)
if (args.nonEmpty) {
ParameterizedQuery(parsedPlan, args.mapValues(parser.parseExpression).toMap)
} else {
parsedPlan
}
val parsedArgs = args.mapValues(parser.parseExpression).toMap
Parameter.bind(parser.parsePlan(sqlText), parsedArgs)
}
Dataset.ofRows(self, plan, tracker)
}
Expand Down
Loading

0 comments on commit b36966f

Please sign in to comment.