Skip to content

Commit

Permalink
Merge commit '9a04db5536154b9cf26eded9bab34f267ab7446f' into mahilleb…
Browse files Browse the repository at this point in the history
…/cuDNN5
  • Loading branch information
mahilleb-msft committed Aug 26, 2016
2 parents f076227 + 9a04db5 commit aecdfc7
Show file tree
Hide file tree
Showing 6 changed files with 17 additions and 17 deletions.
2 changes: 1 addition & 1 deletion Source/CNTK/BrainScript/CNTKCoreLib/CNTK.core.bs
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ DelayLayer {T=1, defaultHiddenActivation=0} =
# BatchNormalizationLayer -- create a batch-normalization layer
BatchNormalizationLayer {spatialRank = 0, # reduce over these dims. E.g. 2 to reduce over (w,h) in a [W x H x C]-shaped input
initialScale = 1,
normalizationTimeConstant = 0, blendTimeConstant = 0, # TODO: normTimeConst should be INF, not 0
normalizationTimeConstant = 5000, blendTimeConstant = 0,
epsilon = 0.00001, useCntkEngine = true} =
{
#normShape = _ConcatArrays (Repeat (spatialRank, 1), 0) # spatial dims get a dimension of 1 (broadcasting, while all others are inferred from input)
Expand Down
2 changes: 1 addition & 1 deletion Tutorials/ImageHandsOn/ImageHandsOn_Solution3.cntk
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ TrainConvNet = {
{
c = ConvolutionalLayer {depth, (5:5), pad = true, ##### no activation=ReLU
init = "gaussian", initValueScale = initValueScale} (x)
b = BatchNormalizationLayer {spatialRank = 2, normalizationTimeConstant = 4096} (c)
b = BatchNormalizationLayer {spatialRank = 2} (c)
r = ReLU (b) ##### now called explicitly
p = MaxPoolingLayer {(3:3), stride = (2:2)} (r)
}.p
Expand Down
4 changes: 2 additions & 2 deletions Tutorials/ImageHandsOn/ImageHandsOn_Solution4.cntk
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,13 @@ TrainConvNet = {
MySubSampleBN (x, depth, stride) =
{
s = Splice ((MaxPoolingLayer {(1:1), stride = (stride:stride)} (x) : ConstantTensor (0, (1:1:depth/stride))), axis = 3) # sub-sample and pad: [W x H x depth/2] --> [W/2 x H/2 x depth]
b = BatchNormalizationLayer {spatialRank = 2, normalizationTimeConstant = 4096} (s)
b = BatchNormalizationLayer {spatialRank = 2} (s)
}.b
MyConvBN (x, depth, initValueScale, stride) =
{
c = ConvolutionalLayer {depth, (3:3), pad = true, stride = (stride:stride), bias = false,
init = "gaussian", initValueScale = initValueScale} (x)
b = BatchNormalizationLayer {spatialRank = 2, normalizationTimeConstant = 4096} (c)
b = BatchNormalizationLayer {spatialRank = 2} (c)
}.b
ResNetNode (x, depth) =
{
Expand Down
4 changes: 2 additions & 2 deletions Tutorials/ImageHandsOn/ImageHandsOn_Solution5.cntk
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,13 @@ TrainConvNet = {
MySubSampleBN (x, depth, stride) =
{
s = Splice ((MaxPoolingLayer {(1:1), stride = (stride:stride)} (x) : ConstantTensor (0, (1:1:depth/stride))), axis = 3) # sub-sample and pad: [W x H x depth/2] --> [W/2 x H/2 x depth]
b = BatchNormalizationLayer {spatialRank = 2, normalizationTimeConstant = 4096} (s)
b = BatchNormalizationLayer {spatialRank = 2} (s)
}.b
MyConvBN (x, depth, initValueScale, stride) =
{
c = ConvolutionalLayer {depth, (3:3), pad = true, stride = (stride:stride), bias = false,
init = "gaussian", initValueScale = initValueScale} (x)
b = BatchNormalizationLayer {spatialRank = 2, normalizationTimeConstant = 4096} (c)
b = BatchNormalizationLayer {spatialRank = 2} (c)
}.b
ResNetNode (x, depth) =
{
Expand Down
4 changes: 2 additions & 2 deletions Tutorials/ImageHandsOn/ImageHandsOn_Task4_Start.cntk
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,13 @@ TrainConvNet = {
MySubSampleBN (x, depth, stride) =
{
s = Splice ((MaxPoolingLayer {(1:1), stride = (stride:stride)} (x) : ConstantTensor (0, (1:1:depth/stride))), axis = 3) # sub-sample and pad: [W x H x depth/2] --> [W/2 x H/2 x depth]
b = BatchNormalizationLayer {spatialRank = 2, normalizationTimeConstant = 4096} (s)
b = BatchNormalizationLayer {spatialRank = 2} (s)
}.b
MyConvBN (x, depth, initValueScale, stride) =
{
c = ConvolutionalLayer {depth, (3:3), pad = true, stride = (stride:stride), bias = false,
init = "gaussian", initValueScale = initValueScale} (x)
b = BatchNormalizationLayer {spatialRank = 2, normalizationTimeConstant = 4096} (c)
b = BatchNormalizationLayer {spatialRank = 2} (c)
}.b
ResNetNode (x, depth) =
{
Expand Down
18 changes: 9 additions & 9 deletions Tutorials/ImageHandsOn/ImageHandsOn_Task6.cntk
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,13 @@ TrainConvNet = {
MySubSampleBN (x, depth, stride) =
{
s = Splice ((MaxPoolingLayer {(1:1), stride = (stride:stride)} (x) : ConstantTensor (0, (1:1:depth/stride))), axis = 3) # sub-sample and pad: [W x H x depth/2] --> [W/2 x H/2 x depth]
b = BatchNormalizationLayer {spatialRank = 2, normalizationTimeConstant = 4096} (s)
b = BatchNormalizationLayer {spatialRank = 2} (s)
}.b
MyConvBN (x, depth, initValueScale, stride) =
{
c = ConvolutionalLayer {depth, (3:3), pad = true, stride = (stride:stride), bias = false,
init = "gaussian", initValueScale = initValueScale} (x)
b = BatchNormalizationLayer {spatialRank = 2, normalizationTimeConstant = 4096} (c)
b = BatchNormalizationLayer {spatialRank = 2} (c)
}.b
ResNetNode (x, depth) =
{
Expand Down Expand Up @@ -99,14 +99,14 @@ TrainConvNet = {
parallelizationMethod = "DataParallelSGD"
parallelizationStartEpoch = 1
distributedMBReading = true
dataParallelSGD = { gradientBits = 64 }
dataParallelSGD = { gradientBits = 2 }
}
AutoAdjust = {
autoAdjustMinibatch = true # enable automatic growing of minibatch size
minibatchSizeTuningFrequency = 10 # try to enlarge after this many epochs
numMiniBatch4LRSearch = 200
minibatchSizeTuningMax = 15000 # out of memory above this
}
#AutoAdjust = {
# autoAdjustMinibatch = true # enable automatic growing of minibatch size
# minibatchSizeTuningFrequency = 5 # try to enlarge after this many epochs
# numMiniBatch4LRSearch = 500
# minibatchSizeTuningMax = 15000 # out of memory above this
#}
}

reader = {
Expand Down

0 comments on commit aecdfc7

Please sign in to comment.