forked from byzer-org/byzer-lang
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
5da742a
commit 8e0cd42
Showing
15 changed files
with
201 additions
and
349 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
18 changes: 0 additions & 18 deletions
18
streamingpro-spark-2.0/src/main/java/org/apache/spark/sql/DefaultSparkSessionAdaptor.scala
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
65 changes: 0 additions & 65 deletions
65
streamingpro-spark-2.0/src/main/java/streaming/core/common/SQLContextHolder.scala
This file was deleted.
Oops, something went wrong.
90 changes: 90 additions & 0 deletions
90
....0/src/main/java/streaming/core/compositor/spark/ss/output/MultiSQLOutputCompositor.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,90 @@ | ||
package streaming.core.compositor.spark.ss.output | ||
|
||
import java.util | ||
|
||
import java.util.concurrent.TimeUnit | ||
import org.apache.log4j.Logger | ||
import org.apache.spark.sql.streaming.ProcessingTime | ||
import serviceframework.dispatcher.{Compositor, Processor, Strategy} | ||
import streaming.core.CompositorHelper | ||
import streaming.core.strategy.ParamsValidator | ||
|
||
import scala.collection.JavaConversions._ | ||
|
||
/** | ||
* 5/11/16 WilliamZhu([email protected]) | ||
*/ | ||
class MultiSQLOutputCompositor[T] extends Compositor[T] with CompositorHelper with ParamsValidator { | ||
|
||
private var _configParams: util.List[util.Map[Any, Any]] = _ | ||
val logger = Logger.getLogger(classOf[MultiSQLOutputCompositor[T]].getName) | ||
|
||
|
||
override def initialize(typeFilters: util.List[String], configParams: util.List[util.Map[Any, Any]]): Unit = { | ||
this._configParams = configParams | ||
} | ||
|
||
def path = { | ||
config[String]("path", _configParams) | ||
} | ||
|
||
def format = { | ||
config[String]("format", _configParams) | ||
} | ||
|
||
def mode = { | ||
config[String]("mode", _configParams) | ||
} | ||
|
||
override def result(alg: util.List[Processor[T]], ref: util.List[Strategy[T]], middleResult: util.List[T], params: util.Map[Any, Any]): util.List[T] = { | ||
|
||
val spark = sparkSession(params) | ||
_configParams.foreach { config => | ||
|
||
try { | ||
val name = config.getOrElse("name", "").toString | ||
val _cfg = config.map(f => (f._1.toString, f._2.toString)).map { f => | ||
(f._1, params.getOrElse(s"streaming.sql.out.${name}.${f._1}", f._2).toString) | ||
}.toMap | ||
|
||
val tableName = _cfg("inputTableName") | ||
val options = _cfg - "path" - "mode" - "format" | ||
val _resource = _cfg("path") | ||
val mode = _cfg.getOrElse("mode", "ErrorIfExists") | ||
val format = _cfg("format") | ||
val outputFileNum = _cfg.getOrElse("outputFileNum", "-1").toInt | ||
|
||
val dbtable = if (options.containsKey("dbtable")) options("dbtable") else _resource | ||
|
||
|
||
var newTableDF = spark.table(tableName) | ||
|
||
if (outputFileNum != -1) { | ||
newTableDF = newTableDF.repartition(outputFileNum) | ||
} | ||
|
||
val ssStream = newTableDF.writeStream | ||
|
||
if (_cfg.containsKey("checkpoint")) { | ||
val checkpointDir = _cfg.get("checkpoint").toString | ||
ssStream.option("checkpointLocation", checkpointDir) | ||
} | ||
|
||
val query = ssStream.options(options).outputMode(mode).format(format) | ||
query.trigger(ProcessingTime(_cfg.getOrElse("duration","10").toInt, TimeUnit.SECONDS)).start() | ||
|
||
} catch { | ||
case e: Exception => e.printStackTrace() | ||
} | ||
|
||
} | ||
spark.streams.awaitAnyTermination() | ||
|
||
params.remove("sql") | ||
new util.ArrayList[T]() | ||
} | ||
|
||
override def valid(params: util.Map[Any, Any]): (Boolean, String) = { | ||
(true, "") | ||
} | ||
} |
84 changes: 0 additions & 84 deletions
84
...ark-2.0/src/main/java/streaming/core/compositor/spark/ss/output/SQLOutputCompositor.scala
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -12,8 +12,8 @@ import streaming.core.strategy.platform.SparkStructuredStreamingRuntime | |
import scala.collection.JavaConversions._ | ||
|
||
/** | ||
* 11/21/16 WilliamZhu([email protected]) | ||
*/ | ||
* 11/21/16 WilliamZhu([email protected]) | ||
*/ | ||
class MockSQLSourceCompositor[T] extends Compositor[T] with CompositorHelper { | ||
private var _configParams: util.List[util.Map[Any, Any]] = _ | ||
|
||
|
@@ -24,17 +24,17 @@ class MockSQLSourceCompositor[T] extends Compositor[T] with CompositorHelper { | |
} | ||
|
||
def data = { | ||
_configParams(0).map(f => f._2.asInstanceOf[JSONArray].map(k => k.asInstanceOf[String]).toSeq).toSeq | ||
_configParams(1).map(f => f._2.asInstanceOf[JSONArray].map(k => k.asInstanceOf[String]).toSeq).toSeq | ||
} | ||
|
||
override def result(alg: util.List[Processor[T]], ref: util.List[Strategy[T]], middleResult: util.List[T], params: util.Map[Any, Any]): util.List[T] = { | ||
val sparkSSRt = params.get("_runtime_").asInstanceOf[SparkStructuredStreamingRuntime] | ||
val ss = sparkSSRt.sparkSessionAdaptor.sparkSession | ||
val ss = sparkSession(params) | ||
import ss.implicits._ | ||
implicit val sqlContext = ss.sqlContext | ||
val inputData = MemoryStream[String] | ||
inputData.addData(data.flatMap(f => f).seq) | ||
val df = inputData.toDS() | ||
df.createOrReplaceTempView(_configParams(0)("outputTable").toString) | ||
List(df.asInstanceOf[T]) | ||
} | ||
} |
Oops, something went wrong.