Skip to content

Commit

Permalink
Fix a few build warnings; move test files in wrong dir; up surefire m…
Browse files Browse the repository at this point in the history
…emory
  • Loading branch information
srowen authored and qiuxin2012 committed Apr 21, 2017
1 parent 9adab50 commit 92f3317
Show file tree
Hide file tree
Showing 6 changed files with 28 additions and 39 deletions.
1 change: 1 addition & 0 deletions spark/dl/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,7 @@
<systemProperties>
<bigdl.disableCheckSysEnv>true</bigdl.disableCheckSysEnv>
</systemProperties>
<argLine>-Xmx6g</argLine>
</configuration>
<executions>
<execution>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
package com.intel.analytics.bigdl.utils

import java.io.InputStream
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.atomic.AtomicBoolean
import org.apache.log4j.Logger
import org.apache.spark.{SparkConf, SparkContext}

Expand All @@ -30,12 +30,14 @@ case object MklBlas extends EngineType


object Engine {
@deprecated
@deprecated(
"See https://github.com/intel-analytics/BigDL/wiki/Programming-Guide#engine",
"0.1.0")
def init(nExecutor: Int,
executorCores: Int,
onSpark: Boolean): Option[SparkConf] = {
logger.warn("Engine.init(nExecutor, executorCores, onSpark) is deprecated. " +
"Please refer " +
"Please refer to " +
"https://github.com/intel-analytics/BigDL/wiki/Programming-Guide#engine")
setNodeAndCore(nExecutor, executorCores)
val res = if (onSpark) {
Expand All @@ -53,15 +55,15 @@ object Engine {
}

/**
* BigDL need some spark conf values to be set correctly to have a better performance.
* BigDL need some Spark conf values to be set correctly to have better performance.
*
* This method will create a spark conf, or use the existing one if you provide.
* This method will create a SparkConf, or use the existing one if you provide one.
* Populate it with correct values.
*
* We recommand you use this method instead of setting spark conf values directly. This can
* make the spark conf values changes transparent to you. However, if you use spark-shell or
* Jupiter notebook, as the spark context is created before your code, you have to
* set them directly(through command line options or properties-file)
* We recommend you use this method instead of setting spark conf values directly. This can
* make the Spark conf values changes transparent to you. However, if you use spark-shell or
* Jupyter notebook, as the Spark context is created before your code, you have to
* set them directly (through command line options or properties file)
*
* @return
*/
Expand All @@ -75,9 +77,9 @@ object Engine {
}

/**
* This method should be call before any BigDL procedure and after spark context created.
* This method should be call before any BigDL procedure and after the Spark context is created.
*
* BigDL need some spark conf values to be set correctly to have a better performance. There's
* BigDL needs some Spark conf values to be set correctly to have a better performance. There's
* also multi-thread engines so executor number and core number per executor need to be known
* to set the parameter of these engines correctly.
*
Expand All @@ -102,15 +104,8 @@ object Engine {
}

private val logger = Logger.getLogger(getClass)
private val singletonCounter: AtomicInteger = new AtomicInteger(0)
private[bigdl] var localMode: Boolean = {
val env = System.getenv("BIGDL_LOCAL_MODE")
if(env == null) {
false
} else {
true
}
}
private val singletonCounter = new AtomicBoolean()
private[bigdl] var localMode = System.getenv("BIGDL_LOCAL_MODE") != null
private var physicalCoreNumber = -1
private var nodeNum: Int = -1

Expand Down Expand Up @@ -160,21 +155,15 @@ object Engine {
}

/**
* Check if current execution is a singleton on the JVM
*
* @return
* @return true if current execution is a singleton on the JVM
*/
private[bigdl] def checkSingleton(): Boolean = {
val count = singletonCounter.incrementAndGet()
(count == 1)
}
private[bigdl] def checkSingleton(): Boolean = singletonCounter.compareAndSet(false, true)

/**
* Reset the singleton flag
*/
private[bigdl] def resetSingletonFlag(): Unit = {
singletonCounter.set(0)
}
private[bigdl] def resetSingletonFlag(): Unit = singletonCounter.set(false)

/**
* Return number of cores, the engine.init must be called before use this method or an exception
* will be thrown
Expand Down Expand Up @@ -367,7 +356,7 @@ object Engine {
val patternLocalStar = "local\\[\\*\\]".r
master match {
case patternLocalN(n) => Some(1, n.toInt)
case patternLocalStar => Some(1, getCoreNumberFromEnv)
case patternLocalStar(_*) => Some(1, getCoreNumberFromEnv)
case _ => throw new IllegalArgumentException(s"Can't parser master $master")
}
} else if (master.toLowerCase.startsWith("spark")) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,11 @@ package com.intel.analytics.bigdl.dataset.text

import java.io.PrintWriter

import com.intel.analytics.bigdl.dataset.{DataSet, LocalArrayDataSet}
import com.intel.analytics.bigdl.dataset.DataSet
import com.intel.analytics.bigdl.utils.Engine
import org.apache.spark.SparkContext
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}

import scala.collection.mutable.ArrayBuffer
import scala.io.Source

@com.intel.analytics.bigdl.tags.Serial
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,9 @@
*/
package com.intel.analytics.bigdl.nn

import com.intel.analytics.bigdl.nn.JoinTable
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.{RandomGenerator, T, Table}
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import com.intel.analytics.bigdl.utils.RandomGenerator
import org.scalatest.{FlatSpec, Matchers}

import scala.collection.mutable

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
package com.intel.analytics.bigdl.python.api

import java.util
import java.util.{ArrayList => JArrayList, HashMap => JHashMap, List => JList, Map => JMap}
import java.util.{List => JList, Map => JMap}

import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.nn._
Expand Down Expand Up @@ -127,12 +127,13 @@ class PythonSpec extends FlatSpec with Matchers with BeforeAndAfter {
valRdd = data.toJavaRDD(),
vMethods = util.Arrays.asList("Top1Accuracy", "Loss"))

val trainSummary = TrainSummary(sc.appName, "lenet")
val logdir = com.google.common.io.Files.createTempDir()
val trainSummary = TrainSummary(logdir.getPath, "lenet")
.setSummaryTrigger("LearningRate", Trigger.severalIteration(1))
.setSummaryTrigger("Loss", Trigger.severalIteration(1))
.setSummaryTrigger("Throughput", Trigger.severalIteration(1))
.setSummaryTrigger("Parameters", Trigger.severalIteration(20))
val validationSummary = ValidationSummary(sc.appName, "lenet")
val validationSummary = ValidationSummary(logdir.getPath, "lenet")

pp.setTrainSummary(optimizer, trainSummary)
pp.setValSummary(optimizer, validationSummary)
Expand Down

0 comments on commit 92f3317

Please sign in to comment.