Skip to content

Commit

Permalink
Merge remote-tracking branch 'cntk/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
diyessi committed Aug 24, 2015
2 parents e896c41 + 4124018 commit c066e48
Show file tree
Hide file tree
Showing 44 changed files with 4,015 additions and 2,139 deletions.
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
run-test text eol=lf
561 changes: 296 additions & 265 deletions CNTK.sln

Large diffs are not rendered by default.

Empty file modified DataReader/LMSequenceReader/SequenceReader.cpp
100755 → 100644
Empty file.
66 changes: 66 additions & 0 deletions ExampleSetups/Image/MNIST/01_OneHidden.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
WorkDir=.
ModelDir=$WorkDir$\_out\$ConfigName$
stderr=$WorkDir$\_out\$ConfigName$

ndlMacros=$WorkDir$\Macros.ndl

precision=float
deviceId=Auto

command=Train:Test

Train=[
action=train
modelPath=$ModelDir$\01_OneHidden

NDLNetworkBuilder=[
networkDescription=$WorkDir$\01_OneHidden.ndl
]

SGD=[
epochSize=60000
minibatchSize=32
learningRatesPerMB=0.1
momentumPerMB=0
maxEpochs=30
]

reader=[
readerType=UCIFastReader
file=$WorkDir$\Train-28x28.txt
features=[
dim=784
start=1
]
labels=[
dim=1
start=0
labelDim=10
labelMappingFile=$WorkDir$\labelsmap.txt
]
]
]

Test=[
action=test
modelPath=$ModelDir$\01_OneHidden

NDLNetworkBuilder=[
networkDescription=$WorkDir$\01_OneHidden.ndl
]

reader=[
readerType=UCIFastReader
file=$WorkDir$\Test-28x28.txt
features=[
dim=784
start=1
]
labels=[
dim=1
start=0
labelDim=10
labelMappingFile=$WorkDir$\labelsmap.txt
]
]
]
24 changes: 24 additions & 0 deletions ExampleSetups/Image/MNIST/01_OneHidden.ndl
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
load=ndlMnistMacros
run=DNN

ndlMnistMacros = [
FeatDim = 784
LabelDim = 10

features = Input(FeatDim, tag = feature)
featScale = Const(0.00390625)
featScaled = Scale(featScale, features)
labels = Input(LabelDim, tag = label)
]

DNN=[
hiddenDim = 200

h1=DNNLayer(FeatDim, hiddenDim, featScaled, 1)
ol=DNNLastLayer(labelDim, hiddenDim, h1, 1)

CE = CrossEntropyWithSoftmax(labels, ol, tag = Criteria)
Err = ErrorPrediction(labels, ol, tag = Eval)
OutputNodes = ol
]

66 changes: 66 additions & 0 deletions ExampleSetups/Image/MNIST/02_Conv.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
WorkDir=.
ModelDir=$WorkDir$\_out\$ConfigName$
stderr=$WorkDir$\_out\$ConfigName$

ndlMacros=$WorkDir$\Macros.ndl

precision=float
deviceId=Auto

command=Train:Test

Train=[
action=train
modelPath=$ModelDir$\02_Convolution

NDLNetworkBuilder=[
networkDescription=$WorkDir$\02_Convolution.ndl
]

SGD=[
epochSize=60000
minibatchSize=32
learningRatesPerMB=0.5
momentumPerMB=0*10:0.7
maxEpochs=20
]

reader=[
readerType=UCIFastReader
file=$WorkDir$\Train-28x28.txt
features=[
dim=784
start=1
]
labels=[
dim=1
start=0
labelDim=10
labelMappingFile=$WorkDir$\labelsmap.txt
]
]
]

Test=[
action=test
modelPath=$ModelDir$\02_Convolution

NDLNetworkBuilder=[
networkDescription=$WorkDir$\02_Convolution.ndl
]

reader=[
readerType=UCIFastReader
file=$WorkDir$\Test-28x28.txt
features=[
dim=784
start=1
]
labels=[
dim=1
start=0
labelDim=10
labelMappingFile=$WorkDir$\labelsmap.txt
]
]
]
63 changes: 63 additions & 0 deletions ExampleSetups/Image/MNIST/02_Convolution.ndl
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
load=ndlMnistMacros
run=DNN

ndlMnistMacros = [
ImageW = 28
ImageH = 28
LabelDim = 10

features = ImageInput(ImageW, ImageH, 1, tag = feature)
featScale = Const(0.00390625)
featScaled = Scale(featScale, features)
labels = Input(LabelDim, tag = label)
]

DNN=[
# conv1
kW1 = 5
kH1 = 5
cMap1 = 16
hStride1 = 1
vStride1 = 1
# weight[cMap1, kW1 * kH1 * inputChannels]
conv1W = Parameter(cMap1, 25, init = Uniform, initValueScale = 10)
conv1 = Convolution(conv1W, featScaled, kW1, kH1, cMap1, hStride1, vStride1, zeroPadding = false)
conv1bias = Parameter(cMap1, 1)
conv1plusbias = Plus(conv1, conv1bias);
conv1_act = Sigmoid(conv1plusbias);

# pool1
pool1W = 2
pool1H = 2
pool1hStride = 2
pool1vStride = 2
pool1 = MaxPooling(conv1_act, pool1W, pool1H, pool1hStride, pool1vStride)

# conv2
kW2 = 5
kH2 = 5
cMap2 = 32
hStride2 = 1
vStride2 = 1
# weight[cMap2, kW2 * kH2 * cMap1]
conv2W = Parameter(cMap2, 400, initValueScale = 10)
conv2 = Convolution(conv2W, pool1, kW2, kH2, cMap2, hStride2, vStride2, zeroPadding = false)
conv2bias = Parameter(cMap2, 1)
conv2plusbias = Plus(conv2, conv2bias);
conv2_act = Sigmoid(conv2plusbias);

# pool2
pool2W = 2
pool2H = 2
pool2hStride = 2
pool2vStride = 2
pool2 = MaxPooling(conv2_act, pool2W, pool2H, pool2hStride, pool2vStride)

h1 = DNNLayer(512, 128, pool2, 1)
ol = DNNLastLayer(labelDim, 128, h1, 1)

CE = CrossEntropyWithSoftmax(labels, ol, tag = Criteria)
Err = ErrorPrediction(labels, ol, tag = Eval)
OutputNodes = ol
]

16 changes: 16 additions & 0 deletions ExampleSetups/Image/MNIST/Macros.ndl
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
DNNLayer(inDim, outDim, x, parmScale)
{
W = Parameter(outDim, inDim, init = Uniform, initValueScale = parmScale)
b = Parameter(outDim, init = Uniform, initValueScale = parmScale)
t = Times(W, x)
z = Plus(t, b)
y = sigmoid(z)
}

DNNLastLayer(LabelDim, hiddenDim, x, parmScale)
{
W = Parameter(LabelDim, hiddenDim, init = Uniform, initValueScale = parmScale)
b = Parameter(LabelDim, init = Uniform, initValueScale = parmScale)
t = Times(W, x)
z = Plus(t, b)
}
12 changes: 12 additions & 0 deletions ExampleSetups/Image/MNIST/labelsmap.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
0
1
2
3
4
5
6
7
8
9


66 changes: 66 additions & 0 deletions ExampleSetups/Image/MNIST/mnist_convert.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import sys
import urllib
import gzip
import shutil
import os
import struct
import numpy as np

def loadData(src, cimg):
print 'Downloading ' + src
gzfname, h = urllib.urlretrieve(src, './delete.me')
print 'Done.'
try:
with gzip.open(gzfname) as gz:
n = struct.unpack('I', gz.read(4))
# Read magic number.
if n[0] != 0x3080000:
raise Exception('Invalid file: unexpected magic number.')
# Read number of entries.
n = struct.unpack('>I', gz.read(4))[0]
if n != cimg:
raise Exception('Invalid file: expected {0} entries.'.format(cimg))
crow = struct.unpack('>I', gz.read(4))[0]
ccol = struct.unpack('>I', gz.read(4))[0]
if crow != 28 or ccol != 28:
raise Exception('Invalid file: expected 28 rows/cols per image.')
# Read data.
res = np.fromstring(gz.read(cimg * crow * ccol), dtype = np.uint8)
finally:
os.remove(gzfname)
return res.reshape((cimg, crow * ccol))

def loadLabels(src, cimg):
print 'Downloading ' + src
gzfname, h = urllib.urlretrieve(src, './delete.me')
print 'Done.'
try:
with gzip.open(gzfname) as gz:
n = struct.unpack('I', gz.read(4))
# Read magic number.
if n[0] != 0x1080000:
raise Exception('Invalid file: unexpected magic number.')
# Read number of entries.
n = struct.unpack('>I', gz.read(4))
if n[0] != cimg:
raise Exception('Invalid file: expected {0} rows.'.format(cimg))
# Read labels.
res = np.fromstring(gz.read(cimg), dtype = np.uint8)
finally:
os.remove(gzfname)
return res.reshape((cimg, 1))


if __name__ == "__main__":
trnData = loadData('http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz', 60000)
trnLbl = loadLabels('http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz', 60000)
trn = np.hstack((trnLbl, trnData))
print 'Writing train text file...'
np.savetxt(r'./Train-28x28.txt', trn, fmt = '%u', delimiter='\t')
print 'Done.'
testData = loadData('http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz', 10000)
testLbl = loadLabels('http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz', 10000)
test = np.hstack((testLbl, testData))
print 'Writing test text file...'
np.savetxt(r'./Test-28x28.txt', test, fmt = '%u', delimiter='\t')
print 'Done.'
17 changes: 17 additions & 0 deletions ExampleSetups/Image/MNIST/readme.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
This example demonstrates usage of NDL to train 2 neural networks on MNIST dataset (http://yann.lecun.com/exdb/mnist/).
MNIST dataset is not included in CNTK distribution but can be easily downloaded and converted by running the following command from this folder:
python mnist_convert.py
The script will download all required files and convert them to CNTK-supported format.

Short description of the networks:

1. 01_OneHidden.ndl is a simple, one hidden layer network that produces 2.3% of error.
To run the sample, navigate to this folder and run the following command:
<path to CNTK executable> configFile=01_OneHidden.config configName=01_OneHidden

2. 02_Convolution.ndl is more interesting, convolutional network which has 2 convolutional and 2 max pooling layers. The network produces 0.87% of error after training for about 2 minutes on GPU.
To run the sample, navigate to this folder and run the following command:
<path to CNTK executable> configFile=02_Conv.config configName=02_Conv

For more details, refer to .ndl and corresponding .config files.

4 changes: 2 additions & 2 deletions MachineLearning/CNTK/CNTK.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1475,8 +1475,8 @@ int wmain(int argc, wchar_t* argv[])
fprintf(fp, "successfully finished at %s on %s\n", TimeDateStamp().c_str(), GetHostName().c_str());
fcloseOrDie(fp);
}
fprintf(stderr, "COMPLETED\n");
}
fprintf(stderr, "COMPLETED\n"), fflush(stderr);
}
catch (const std::exception &err)
{
fprintf(stderr, "EXCEPTION occurred: %s\n", err.what());
Expand Down
Loading

0 comments on commit c066e48

Please sign in to comment.