Skip to content

Commit

Permalink
rollback to spark 2.3
Browse files Browse the repository at this point in the history
  • Loading branch information
binglihub committed Feb 21, 2019
1 parent 9217af0 commit 8eb0377
Show file tree
Hide file tree
Showing 7 changed files with 13 additions and 13 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ matrix:
include:
- jdk: openjdk8
scala: 2.11.12
env: SPARK_VERSION="2.4.0" INTEGRATION_TESTS="true"
env: SPARK_VERSION="2.3.0" INTEGRATION_TESTS="true"
script:
- "./dev/run-tests-travis.sh"
after_success:
Expand Down
4 changes: 2 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import sbtrelease.ReleasePlugin.autoImport._

import scala.util.Properties

val sparkVersion = "2.4.0"
val sparkVersion = "2.3.0"
val testSparkVersion = sys.props.get("spark.testVersion").getOrElse(sparkVersion)

lazy val ItTest = config("it") extend(Test)
Expand All @@ -34,7 +34,7 @@ lazy val root = Project("spark-snowflake", file("."))
.settings(
name := "spark-snowflake",
organization := "net.snowflake",
version := "2.4.13-spark_2.4",
version := "2.4.13-spark_2.3",
scalaVersion := sys.props.getOrElse("SPARK_SCALA_VERSION", default = "2.11.12"),
crossScalaVersions := Seq("2.11.12"),
javacOptions ++= Seq("-source", "1.8", "-target", "1.8"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ object SnowflakeConnectorUtils {
/**
* Check Spark version, if Spark version matches SUPPORT_SPARK_VERSION enable PushDown, otherwise disable it.
*/
private val SUPPORT_SPARK_VERSION = "2.4"
private val SUPPORT_SPARK_VERSION = "2.3"

def checkVersionAndEnablePushdown(session: SparkSession): Boolean =
if(session.version.startsWith(SUPPORT_SPARK_VERSION)){
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ private[querygeneration] case class QueryHelper(
p.map(e =>
colSet.find(c => c.exprId == e.exprId) match {
case Some(a) =>
AttributeReference(a.name, a.dataType, a.nullable, a.metadata)(a.exprId)
AttributeReference(a.name, a.dataType, a.nullable, a.metadata)(a.exprId, None)
case None => e
}))
.map(p => renameColumns(p, alias))
Expand All @@ -63,7 +63,7 @@ private[querygeneration] case class QueryHelper(

val outputWithQualifier = output.map(
a =>
AttributeReference(a.name, a.dataType, a.nullable, a.metadata)(a.exprId, Seq[String](alias)))
AttributeReference(a.name, a.dataType, a.nullable, a.metadata)(a.exprId, Some(alias)))

val sourceStatement: SnowflakeSQLStatement =
if(children.nonEmpty)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ private[querygeneration] abstract sealed class SnowflakeQuery {
} else col.name

Alias(Cast(col, col.dataType), orig_name)(col.exprId,
Seq.empty[String],
None,
Some(col.metadata))
}.map(_.toAttribute)
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package net.snowflake.spark.snowflake.pushdowns.querygeneration

import net.snowflake.spark.snowflake.{ConstantString, SnowflakeSQLStatement}
import org.apache.spark.sql.catalyst.expressions.{Ascii, Attribute, Concat, Expression, Like, Lower, StringLPad, StringRPad, StringTranslate, StringTrim, StringTrimLeft, StringTrimRight, Substring, Upper}
import org.apache.spark.sql.catalyst.expressions.{StringReverse, Ascii, Attribute, Concat, Expression, Like, Lower, StringLPad, StringRPad, StringTranslate, StringTrim, StringTrimLeft, StringTrimRight, Substring, Upper}

/** Extractor for boolean expressions (return true or false). */
private[querygeneration] object StringStatement {
Expand All @@ -23,7 +23,7 @@ private[querygeneration] object StringStatement {
Option(
expr match {
case _: Ascii | _: Lower | _: Substring | _: StringLPad |
_: StringRPad | _: StringTranslate |
_: StringRPad | _: StringTranslate | _: StringReverse |
_: StringTrim | _: StringTrimLeft | _: StringTrimRight |
_: Substring | _: Upper =>
ConstantString(expr.prettyName.toUpperCase) +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ package object querygeneration {
}

/** Qualifies identifiers with that of the subquery to which it belongs */
private[querygeneration] final def qualifiedAttribute(alias: Seq[String],
private[querygeneration] final def qualifiedAttribute(alias: Option[String],
name: String): String = {
val str = if(alias.isEmpty) ""
else alias.map(wrap).mkString(".") + "."
Expand All @@ -59,7 +59,7 @@ package object querygeneration {
}

private[querygeneration] final def qualifiedAttributeStatement(
alias: Seq[String],
alias: Option[String],
name: String
): SnowflakeSQLStatement =
ConstantString(qualifiedAttribute(alias, name)) !
Expand Down Expand Up @@ -118,9 +118,9 @@ package object querygeneration {

expr match {
case a @ Alias(child: Expression, name: String) =>
Alias(child, altName)(a.exprId, Seq.empty[String], Some(metadata))
Alias(child, altName)(a.exprId, None, Some(metadata))
case _ =>
Alias(expr, altName)(expr.exprId, Seq.empty[String], Some(metadata))
Alias(expr, altName)(expr.exprId, None, Some(metadata))
}
}
}
Expand Down

0 comments on commit 8eb0377

Please sign in to comment.