Skip to content

Commit

Permalink
Support Tensorflow Preprocessing (intel#1572)
Browse files Browse the repository at this point in the history
* fix tensor bug

* preprocessing

* test

* fix tests

* refine

* fix tests

* fix style

* fix tests

* meet code review

* meet code review

* meet code review

* add doc
  • Loading branch information
yangw1234 authored Sep 25, 2017
1 parent 67435cc commit d341813
Show file tree
Hide file tree
Showing 17 changed files with 17,830 additions and 107 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ class ColorJitter extends Transformer[LabeledBGRImage, LabeledBGRImage] {
val order = Tensor.randperm[Float](3)
var i = 1
while (i <= order.size(1)) {
val idx = order(i).valueAt(1).toInt
val idx = order(i).value().toInt
ts(idx)(input)
i += 1
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -297,8 +297,7 @@ class Graph[T: ClassTag](val inputs : Seq[ModuleNode[T]],
/**
* Execution plan
*/
private val forwardNodes = backGraph.DFS
.filterNot(_.element.isInstanceOf[ControlDependency[T]]).toArray
private val forwardNodes = backGraph.DFS.toArray
private val forwardScheduler = new Scheduler(
forwardNodes.filter(_.prevNodes.length == 0),
Seq(dummyOutput)
Expand Down Expand Up @@ -347,7 +346,8 @@ class Graph[T: ClassTag](val inputs : Seq[ModuleNode[T]],
require(forwardNodes.map(_.element.getName()).distinct.length == forwardNodes.length,
"the name of node in the graph should be unique")
val roots = forwardNodes.filter(_.prevNodes.size == 0)
.filter(node => !node.element.isInstanceOf[WithoutInput])
.filter(node => !node.element.isInstanceOf[WithoutInput]
&& !node.element.isInstanceOf[ControlDependency[_]])
require(roots.size == inputs.length,
s"There're ${inputs.length} inputs, but graph has ${roots.size} roots")
inputs.foreach(n =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,11 @@ class Rank[T: ClassTag]()
(implicit ev: TensorNumeric[T]) extends Operation[Tensor[_], Tensor[Int], T] {

override def updateOutput(input: Tensor[_]): Tensor[Int] = {
output.resizeAs(input(1))
output.setValue(1, input.nDimension())
if (output.getType() != IntType) {
output = Tensor[Int]()
}
output.resize(Array[Int]())
output.setValue(input.nDimension())

output
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1716,12 +1716,13 @@ class PythonBigDL[T: ClassTag](implicit ev: TensorNumeric[T]) extends Serializab
samples: JavaRDD[Sample],
optMethod: OptimMethod[T],
criterion: Criterion[T],
batchSize: Int, endWhen: Trigger): AbstractModule[Activity, Activity, T] = {
batchSize: Int,
endWhen: Trigger): AbstractModule[Activity, Activity, T] = {
val nodeList = parse(modelPath)

val context =
new mutable.HashMap[String, (Tensor[T], Tensor[T], Option[Seq[(Int, Int)]])]()
val session = new BigDLSessionImpl[T](nodeList.asScala, context)
val session = new BigDLSessionImpl[T](nodeList.asScala, samples.sparkContext, context)
val dataset = batching(samples, batchSize)

val model = session.train(Seq(output), dataset,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,7 @@ private[tensor] class DenseTensor[@specialized(Float, Double) T: ClassTag](

override def isEmpty: Boolean = this.storage() == null || this.storage().length() == 0

override def isScalar: Boolean =
this.nDimension == 0 &&
this._storage.length() == 1
override def isScalar: Boolean = !this.isEmpty && this.nDimension == 0

override def storage(): Storage[T] = _storage

Expand Down Expand Up @@ -397,14 +395,10 @@ private[tensor] class DenseTensor[@specialized(Float, Double) T: ClassTag](
val _dimension = dim - 1
val _sliceIndex = index - 1

if (this.nDimension > 1) {
val result = DenseTensor.newWithTensor(this)
DenseTensor.select(result, null, _dimension, _sliceIndex)
result
} else {
require(this.nDimension == 1, "empty tensor")
this.narrow(1, index, 1)
}
require(this.nDimension > 0, "empty or scalar tensor cannot be selected")
val result = DenseTensor.newWithTensor(this)
DenseTensor.select(result, null, _dimension, _sliceIndex)
result
}

override def clone(): Tensor[T] = {
Expand Down Expand Up @@ -491,13 +485,9 @@ private[tensor] class DenseTensor[@specialized(Float, Double) T: ClassTag](
require(_index >= 0 && _index < this._size(0),
s"out of range, ${_index}: 0 to ${this._size(0)}")

if (this.nDimension == 1) {
this.narrow(1, index, 1)
} else {
val result = DenseTensor.newWithTensor(this)
DenseTensor.select(result, null, 0, _index)
result
}
val result = DenseTensor.newWithTensor(this)
DenseTensor.select(result, null, 0, _index)
result
}

override def apply(table: Table): Tensor[T] = {
Expand Down Expand Up @@ -2289,7 +2279,7 @@ object DenseTensor {
self: DenseTensor[T], source: Tensor[T], _dimension: Int, _sliceIndex: Int): Unit = {
var src = source
if (src == null) src = self
require(src.nDimension() > 1, "cannot select on a vector")
require(src.nDimension() > 0, "cannot select on a scalar")
require(_dimension >= 0 && _dimension < src.nDimension(), "out of range")
require(_sliceIndex >= 0 && _sliceIndex < src.size(_dimension + 1),
s"${_sliceIndex} out of range 0 to ${src.size(_dimension + 1)}")
Expand Down Expand Up @@ -2362,7 +2352,7 @@ object DenseTensor {
private[tensor] def copy[@specialized(Float, Double) T](
self: DenseTensor[T], src: Tensor[T]): Unit = {
require(self.nElement() == src.nElement())
if (self.nDimension == 0) {
if (self.isEmpty) {
return
}
if (self.isContiguous() && src.isContiguous() && sameStride(self.stride(), src.stride())) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1016,6 +1016,15 @@ object Tensor {
apply(Storage(matrix.toArray), 1, Array(matrix.numRows, matrix.numCols), strides)
}

/**
* Create a scalar tensor of this value
* @return the created scalar tensor
*/
def scalar[T: ClassTag](value: T)(
implicit ev: TensorNumeric[T]): Tensor[T] = {
Tensor[T](Array(value), Array[Int]())
}

/**
* This is equivalent to DenseTensor.randperm[T](size)
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -289,6 +289,25 @@ class Table private[bigdl](
new Table(newState)
}

/**
* Return the elements of this table as a Seq.
* This method assumes the key of this table are all
* the integers between 1 to this.length(),
* the values are all Tensor[T]
*/
def toSeq[T]: Seq[Tensor[T]] = {
for (i <- 0 until this.length()) yield {
try {
this(i + 1).asInstanceOf[Tensor[T]]
} catch {
case e: NoSuchElementException =>
throw new UnsupportedOperationException("toSeq requires the key of this table are" +
" all the integers between 1 to this.length()", e)
}

}
}

override def toTensor[D]
(implicit ev: TensorNumeric[D]): Tensor[D] =
throw new IllegalArgumentException("Table cannot be cast to Tensor")
Expand Down Expand Up @@ -324,6 +343,18 @@ object T {
new Table(data)
}

/**
* Construct a table from an array
*
* The index + 1 will be used as the key
*
* @param data
* @return
*/
def seq(data: Seq[Any]): Table = {
new Table(data.toArray)
}

/**
* Construct a table from a sequence of pair.
*/
Expand Down
Loading

0 comments on commit d341813

Please sign in to comment.