Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
allwefantasy committed Nov 28, 2018
1 parent ce5f359 commit aa03fbc
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 5 deletions.
4 changes: 1 addition & 3 deletions examples/sklearn_elasticnet_wine/conda.yaml
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
name: tutorial
channels:
- defaults
dependencies:
- python=3.6
- pip:
- numpy==1.14.3
- kafka-python==1.4.3
- pyspark==2.3.2
- pyspark==${SPARK_VERSION}
- pandas==0.22.0
- scikit-learn==0.19.1
- scipy==1.1.0
Original file line number Diff line number Diff line change
@@ -1,14 +1,23 @@
package streaming.test.pythonalg

import java.io.File
import java.nio.charset.Charset
import java.util.UUID

import com.google.common.io.Files
import net.sf.json.JSONArray
import org.apache.http.client.fluent.{Form, Request}
import org.apache.spark.SparkCoreVersion
import org.apache.spark.streaming.BasicSparkOperation
import streaming.core.strategy.platform.SparkRuntime
import streaming.core.{BasicMLSQLConfig, SpecFunctions}
import streaming.dsl.ScriptSQLExec
import streaming.dsl.template.TemplateMerge
import streaming.test.pythonalg.code.ScriptCode
import streaming.common.ScalaMethodMacros._
import streaming.common.shell.ShellCommand

import scala.io.Source

/**
* Created by allwefantasy on 26/5/2018.
Expand All @@ -30,9 +39,20 @@ class PythonMLSpec2 extends BasicSparkOperation with SpecFunctions with BasicMLS
withBatchContext(setupBatchContext(batchParamsWithAPI, "classpath:///test/empty.json")) { runtime: SparkRuntime =>
//执行sql
implicit val spark = runtime.sparkSession

//SPARK_VERSION
val sq = createSSEL(spark, "")
val projectName = "sklearn_elasticnet_wine"
val projectPath = getExampleProject(projectName)
var projectPath = getExampleProject(projectName)

var newpath = s"/tmp/${UUID.randomUUID().toString}"
ShellCommand.execCmd(s"cp -r ${projectPath} $newpath")

val newcondafile = TemplateMerge.merge(Source.fromFile(new File(newpath + "/conda.yaml")).getLines().mkString("\n"), Map("SPARK_VERSION" -> SparkCoreVersion.exactVersion))
Files.write(newcondafile, new File(newpath + "/conda.yaml"), Charset.forName("utf-8"))

projectPath = newpath

val scriptCode = ScriptCode(s"/tmp/${projectName}", projectPath)

val config = Map(
Expand Down Expand Up @@ -80,7 +100,16 @@ class PythonMLSpec2 extends BasicSparkOperation with SpecFunctions with BasicMLS
implicit val spark = runtime.sparkSession
val sq = createSSEL(spark, "")
val projectName = "sklearn_elasticnet_wine"
val projectPath = getExampleProject(projectName)
var projectPath = getExampleProject(projectName)

var newpath = s"/tmp/${UUID.randomUUID().toString}"
ShellCommand.execCmd(s"cp -r ${projectPath} $newpath")

val newcondafile = TemplateMerge.merge(Source.fromFile(new File(newpath + "/conda.yaml")).getLines().mkString("\n"), Map("SPARK_VERSION" -> SparkCoreVersion.exactVersion))
Files.write(newcondafile, new File(newpath + "/conda.yaml"), Charset.forName("utf-8"))

projectPath = newpath

val scriptCode = ScriptCode(s"/tmp/${projectName}", projectPath)

val config = Map(
Expand Down

0 comments on commit aa03fbc

Please sign in to comment.