Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/Microsoft/CNTK
Browse files Browse the repository at this point in the history
  • Loading branch information
Dong Yu committed Feb 11, 2016
2 parents cb5529a + 7652ef0 commit f05935c
Show file tree
Hide file tree
Showing 108 changed files with 1,467 additions and 1,607 deletions.
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ Makefile text
*.vcxproj.filters text
*.vssettings text
*.csproj text
*.props text

*.h text
*.cpp text
Expand Down
23 changes: 23 additions & 0 deletions CNTK.Cpp.props
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<!-- Note: SolutionDir / RepoRootPath are the same in current setup -->
<RepoRootPath>$(MSBuildThisFileDirectory)</RepoRootPath>
<RelativeProjectPath>$(MSBuildProjectDirectory.Substring($(MSBuildThisFileDirectory.Length)))</RelativeProjectPath>

<OutDir>$(RepoRootPath)$(Platform)\$(Configuration)\</OutDir>
<IntDir>$(RepoRootPath)$(Platform)\.build\$(Configuration)\$(RelativeProjectPath)\</IntDir>

<DebugBuild>false</DebugBuild>
<DebugBuild Condition="$(Configuration.StartsWith('Debug'))">true</DebugBuild>

<ReleaseBuild>false</ReleaseBuild>
<ReleaseBuild Condition="!$(DebugBuild)">true</ReleaseBuild>

<GpuBuild>true</GpuBuild>
<GpuBuild Condition="$(Configuration.EndsWith('_CpuOnly'))">false</GpuBuild>

<CpuOnlyBuild>true</CpuOnlyBuild>
<CpuOnlyBuild Condition="$(GpuBuild)">false</CpuOnlyBuild>
</PropertyGroup>
</Project>
14 changes: 5 additions & 9 deletions Examples/Image/MNIST/Config/01_OneHidden.cntk
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ imageLayout = "cudnn"
# deviceId = -1
# imageLayout = "legacy"

command = train:test
command = MNISTtrain:MNISTtest

precision = "float"
modelPath = "$ModelDir$/01_OneHidden"
Expand All @@ -30,7 +30,7 @@ numMBsToShowResult=500
# TRAINING CONFIG #
#######################################

train = [
MNISTtrain = [
action = "train"

NDLNetworkBuilder = [
Expand All @@ -47,6 +47,8 @@ train = [

reader = [
readerType = "UCIFastReader"
# To get the data (Train-28x28.txt) please run `python mnist_convert.py`
# from the 'AdditionalFiles' folder. See REAMDE.md for details.
file = "$DataDir$/Train-28x28.txt"

features = [
Expand All @@ -67,18 +69,12 @@ train = [
# TEST CONFIG #
#######################################

test = [
MNISTtest = [
action = "test"
minibatchSize = 16

NDLNetworkBuilder=[
networkDescription = "$ConfigDir$/01_OneHidden.ndl"
]

reader = [
readerType = "UCIFastReader"
# To get the data (Train-28x28.txt) please run `python mnist_convert.py`
# from the 'AdditionalFiles' folder. See REAMDE.md for details.
file = "$DataDir$/Test-28x28.txt"

features = [
Expand Down
18 changes: 16 additions & 2 deletions Examples/Image/MNIST/Config/03_ConvBatchNorm.cntk
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ imageLayout = "cudnn"
# deviceId = -1
# imageLayout = "legacy"

command = train:test
command = train:CreateEvalModel:test

precision = "float"
modelPath = "$ModelDir$/03_ConvBatchNorm"
Expand All @@ -40,7 +40,7 @@ train = [
minibatchSize = 32
learningRatesPerMB = 0.5
momentumPerMB = 0*10:0.7
maxEpochs = 8
maxEpochs = 2
]

reader = [
Expand All @@ -63,12 +63,26 @@ train = [
]
]

#######################################
# Edit model #
#######################################

CreateEvalModel=[
action=edit
CurModel=$ModelDir$/03_ConvBatchNorm
NewModel=$ModelDir$/03_ConvBatchNorm.Eval
editPath=$ConfigDir$/03_ConvBatchNorm.mel
]

#######################################
# TEST CONFIG #
#######################################

test = [
action = "test"
minibatchSize = 32

modelPath=$ModelDir$/03_ConvBatchNorm.Eval

NDLNetworkBuilder = [
networkDescription = "$ConfigDir$/03_ConvBatchNorm.ndl"
Expand Down
6 changes: 6 additions & 0 deletions Examples/Image/MNIST/Config/03_ConvBatchNorm.mel
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
m=LoadModel($CurModel$, format=cntk)
SetDefaultModel(m)

SetPropertyForSubTree(CE, batchNormEvalMode, true)

SaveModel(m, $NewModel$, format=cntk)
24 changes: 16 additions & 8 deletions Examples/Image/MNIST/Config/03_ConvBatchNorm.ndl
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,16 @@ ndlMnistMacros = [
imageH = 28
labelDim = 10

features = ImageInput(imageW, imageH, 1)
features = ImageInput(imageW, imageH, 1, imageLayout=$imageLayout$)
featScale = Const(0.00390625)
featScaled = Scale(featScale, features)
labels = Input(labelDim)

scValue = 1
expAvg = 1

convWScale = 10
convBValue = 0
]

DNN = [
Expand All @@ -23,15 +29,15 @@ DNN = [
hStride1 = 1
vStride1 = 1
# weight[cMap1, kW1 * kH1 * inputChannels]
# ConvReLUBNLayer is defined in Macros.ndl
conv1 = ConvReLUBNLayer(featScaled, cMap1, 25, kW1, kH1, hStride1, vStride1, 10)
# ConvBNReLULayer is defined in Macros.ndl
conv1 = ConvBNReLULayer(featScaled, cMap1, 25, kW1, kH1, hStride1, vStride1, convWScale, convBValue, scValue, expAvg)

# pool1
pool1W = 2
pool1H = 2
pool1hStride = 2
pool1vStride = 2
pool1 = MaxPooling(conv1, pool1W, pool1H, pool1hStride, pool1vStride)
pool1 = MaxPooling(conv1, pool1W, pool1H, pool1hStride, pool1vStride, imageLayout=$imageLayout$)

# conv2
kW2 = 5
Expand All @@ -40,18 +46,20 @@ DNN = [
hStride2 = 1
vStride2 = 1
# weight[cMap2, kW2 * kH2 * cMap1]
conv2 = ConvReLUBNLayer(pool1, cMap2, 400, kW2, kH2, hStride2, vStride2, 10)
conv2 = ConvBNReLULayer(pool1, cMap2, 400, kW2, kH2, hStride2, vStride2, convWScale, convBValue, scValue, expAvg)

# pool2
pool2W = 2
pool2H = 2
pool2hStride = 2
pool2vStride = 2
pool2 = MaxPooling(conv2, pool2W, pool2H, pool2hStride, pool2vStride)
pool2 = MaxPooling(conv2, pool2W, pool2H, pool2hStride, pool2vStride, imageLayout=$imageLayout$)

h1Dim = 128
# DnnBNSigmoidLayer is defined in Macros.ndl
h1 = DnnBNSigmoidLayer(512, h1Dim, pool2, 1)
fcWScale = 1
fcBValue = 0
# DnnBNReLULayer is defined in Macros.ndl
h1 = DnnBNReLULayer(1568, h1Dim, pool2, fcWScale, fcBValue, scValue, expAvg)
ol = DNNLayer(h1Dim, labelDim, h1, 1)

ce = CrossEntropyWithSoftmax(labels, ol)
Expand Down
31 changes: 30 additions & 1 deletion Examples/Image/MNIST/Config/Macros.ndl
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,17 @@ DNNLayer(inDim, outDim, x, parmScale) = [
z = Plus(t, b)
]

DnnBNReLULayer(inDim, outDim, x, wScale, bValue, scValue, expAvg) = [
W = Parameter(outDim, inDim, init = Gaussian, initValueScale = wScale)
b = Parameter(outDim, 1, init = fixedValue, value = bValue)
sc = Parameter(outDim, 1, init = fixedValue, value = scValue)
m = Parameter(outDim, 1, init = fixedValue, value = 0, needGradient = false)
isd = Parameter(outDim, 1, init = fixedValue, value = 0, needGradient = false)
t = Times(W, x)
bn = BatchNormalization(t, sc, b, m, isd, eval = false, spatial = false, expAvgFactor = expAvg)
y = RectifiedLinear(bn)
]

ConvReLULayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue) = [
convW = Parameter(outMap, inWCount, init="uniform", initValueScale=wScale)
convB = ImageParameter(1, 1, outMap, init="fixedValue", value=bValue, imageLayout=$imageLayout$)
Expand All @@ -21,4 +32,22 @@ ConvReLULayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue) =
act = RectifiedLinear(convPlusB);
]

# BUGBUG: need to re-add DnnBNSigmoidLayer macro
ConvBNLayerW(W, inp, outMap, kW, kH, hStride, vStride, bValue, scValue, expAvg) = [
b = Parameter(outMap, 1, init=fixedValue, value=bValue)
sc = Parameter(outMap, 1, init=fixedValue, value=scValue)
m = Parameter(outMap, 1, init=fixedValue, value=0, needGradient=false)
isd = Parameter(outMap, 1, init=fixedValue, value=0, needGradient=false)

c = Convolution(W, inp, kW, kH, outMap, hStride, vStride, zeroPadding=true, imageLayout=$imageLayout$)
y = BatchNormalization(c, sc, b, m, isd, eval=false, spatial=true, expAvgFactor=expAvg, imageLayout=$imageLayout$)
]

ConvBNLayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue, scValue, expAvg) = [
W = Parameter(outMap, inWCount, init=Gaussian, initValueScale=wScale)
c = ConvBNLayerW(W, inp, outMap, kW, kH, hStride, vStride, bValue, scValue, expAvg)
]

ConvBNReLULayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue, scValue, expAvg) = [
c = ConvBNLayer(inp, outMap, inWCount, kW, kH, hStride, vStride, wScale, bValue, scValue, expAvg)
y = RectifiedLinear(c)
]
5 changes: 2 additions & 3 deletions Examples/Image/MNIST/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,8 @@ To run the sample, navigate to the Data folder and run the following command:
`cntk configFile=../Config/02_Convolution.cntk`

3. 03_ConvBatchNorm.ndl is almost identical to 02_Convolution.ndl
except that it uses batch normalization for the fully connected layer h1.
Note that batch normalization is implemented using just NDL (see Macros.ndl for details).
As a result, it uses less epochs (8 vs 15 in 02_Convolution) to achieve the same accuracy.
except that it uses batch normalization for the convolutional and fully connected layers.
As a result, it achieves around 0.92% of error after training for just 2 epochs (and less than 30 seconds).
To run the sample, navigate to the Data folder and run the following command:
`cntk configFile=../Config/03_ConvBatchNorm.cntk`

Expand Down
12 changes: 12 additions & 0 deletions Examples/Image/Miscellaneous/ImageNet/ResNet/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# CNTK example: ImageNet ResNet

**Disclaimer: network configurations and experiment settings in this this folder try to follow those published in the [ResNet paper](http://arxiv.org/abs/1512.03385) as close as possible. However, these samples are NOT endorsed or verified by the researchers who published the original work. It is NOT guaranteed that you get the same (or even close) results as those in the paper.**

## Overview

|Data: |The ILSVRC2012 dataset (http://www.image-net.org/challenges/LSVRC/2012/) of images.
|:---------|:---
|Purpose |This example demonstrates usage of the NDL (Network Description Language) to define networks similar to ResNet.
|Network |NDLNetworkBuilder, deep convolutional networks resembling ResNet networks.
|Training |Stochastic gradient descent with momentum.

10 changes: 5 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -395,13 +395,13 @@ SPARSEPCREADER_SRC =\
$(SOURCEDIR)/Readers/SparsePCReader/Exports.cpp \
$(SOURCEDIR)/Readers/SparsePCReader/SparsePCReader.cpp \
LIBSPARSEPCREADER_OBJ := $(patsubst %.cpp, $(OBJDIR)/%.o, $(LIBSPARCEPCREADER_SRC))
SPARSEPCREADER_OBJ := $(patsubst %.cpp, $(OBJDIR)/%.o, $(SPARSEPCREADER_SRC))
LIBSPARSEPCREADER:=$(LIBDIR)/SparsePCReader.so
ALL += $(LIBSPARSEPCREADER)
SRC+=$(LIBSPARSEPCREADER_SRC)
SPARSEPCREADER:=$(LIBDIR)/SparsePCReader.so
ALL += $(SPARSEPCREADER)
SRC+=$(SPARSEPCREADER_SRC)
$(LIBSPARSEPCREADER): $(LIBSPARSEPCREADER_OBJ) | $(CNTKMATH_LIB)
$(SPARSEPCREADER): $(SPARSEPCREADER_OBJ) | $(CNTKMATH_LIB)
@echo $(SEPARATOR)
$(CXX) $(LDFLAGS) -shared $(patsubst %,-L%, $(LIBDIR) $(LIBPATH)) $(patsubst %,$(RPATH)%, $(ORIGINDIR) $(LIBPATH)) -o $@ $^ -l$(CNTKMATH)
Expand Down
Loading

0 comments on commit f05935c

Please sign in to comment.