forked from byzer-org/byzer-lang
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
52dd982
commit 5c28090
Showing
5 changed files
with
57 additions
and
10 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
15 changes: 15 additions & 0 deletions
15
streamingpro-opencv/src/main/java/streaming/dsl/mmlib/algs/processing/UDFFunctions.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
package streaming.dsl.mmlib.algs.processing | ||
|
||
import org.apache.spark.sql.{Row, UDFRegistration} | ||
import streaming.dsl.mmlib.algs.processing.image.ImageSchema | ||
|
||
/** | ||
* Created by allwefantasy on 29/5/2018. | ||
*/ | ||
object UDFFunctions { | ||
def imageVec(uDFRegistration: UDFRegistration) = { | ||
uDFRegistration.register("vec_image", (a: Row) => { | ||
ImageSchema.toSeq(a) | ||
}) | ||
} | ||
} |
39 changes: 39 additions & 0 deletions
39
...ark-2.0/src/main/java/org/apache/spark/sql/execution/streaming/NewFileStreamSinkLog.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,39 @@ | ||
package org.apache.spark.sql.execution.streaming | ||
|
||
import org.apache.spark.sql.SparkSession | ||
import org.apache.spark.sql.internal.SQLConf | ||
import org.json4s.NoTypeHints | ||
import org.json4s.jackson.Serialization | ||
|
||
/** | ||
* Created by allwefantasy on 29/5/2018. | ||
*/ | ||
class NewFileStreamSinkLog( | ||
metadataLogVersion: Int, | ||
sparkSession: SparkSession, | ||
path: String | ||
) | ||
extends CompactibleFileStreamLog[SinkFileStatus](metadataLogVersion, sparkSession, path) { | ||
|
||
private implicit val formats = Serialization.formats(NoTypeHints) | ||
|
||
protected override val fileCleanupDelayMs = sparkSession.sessionState.conf.fileSinkLogCleanupDelay | ||
|
||
protected override val isDeletingExpiredLog = sparkSession.sessionState.conf.fileSinkLogDeletion | ||
|
||
protected override val defaultCompactInterval = | ||
sparkSession.sessionState.conf.fileSinkLogCompactInterval | ||
|
||
require(defaultCompactInterval > 0, | ||
s"Please set ${SQLConf.FILE_SINK_LOG_COMPACT_INTERVAL.key} (was $defaultCompactInterval) " + | ||
"to a positive value.") | ||
|
||
override def compactLogs(logs: Seq[SinkFileStatus]): Seq[SinkFileStatus] = { | ||
val deletedFiles = logs.filter(_.action == FileStreamSinkLog.DELETE_ACTION).map(_.path).toSet | ||
if (deletedFiles.isEmpty) { | ||
logs | ||
} else { | ||
logs.filter(f => !deletedFiles.contains(f.path)) | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters