Skip to content

Commit

Permalink
Updating the config + baselines
Browse files Browse the repository at this point in the history
  • Loading branch information
eldakms committed Feb 1, 2016
1 parent 7818cad commit da0f233
Show file tree
Hide file tree
Showing 6 changed files with 27,276 additions and 8,850 deletions.
4 changes: 2 additions & 2 deletions Tests/EndToEndTests/Image/AlexNet/AlexNet.config
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ Train=[
minibatchSize=8
learningRatesPerMB=0.01*20:0.003*12:0.001*28:0.0003
momentumPerMB=0.9
maxEpochs=10
maxEpochs=3
gradUpdateType=None
L2RegWeight=0.0005
dropoutRate=0*5:0.5
Expand Down Expand Up @@ -89,7 +89,7 @@ Test=[
action=test
modelPath=$ModelDir$/AlexNet.Top5
# Set minibatch size for testing.
minibatchSize=128
minibatchSize=8

NDLNetworkBuilder=[
networkDescription=$ConfigDir$/AlexNet.ndl
Expand Down
8 changes: 4 additions & 4 deletions Tests/EndToEndTests/Image/ResNet/ResNet_34.config
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ command=Train:CreateEval:Test
parallelTrain=false

traceLevel=1
numMBsToShowResult=500
numMBsToShowResult=1

Proj64to128Filename = $ConfigDir$/64to128.txt
Proj128to256Filename = $ConfigDir$/128to256.txt
Expand All @@ -25,12 +25,12 @@ Train=[

SGD=[
epochSize=0
minibatchSize=64
minibatchSize=8
# Note that learning rates are 10x more than in the paper due to a different
# momentum update rule in CNTK: v{t + 1} = lr*(1 - momentum)*g{t + 1} + momentum*v{t}
learningRatesPerMB=1.0*35:0.1*35:0.01
momentumPerMB=0.9
maxEpochs=125
maxEpochs=3
gradUpdateType=None
L2RegWeight=0.0001
dropoutRate=0
Expand Down Expand Up @@ -92,7 +92,7 @@ Test=[
action=test
modelPath=$RunDir$/models/ResNet_34.Eval
# Set minibatch size for testing.
minibatchSize=64
minibatchSize=8

NDLNetworkBuilder=[
networkDescription=$ConfigDir$/ResNet_34.ndl
Expand Down
Loading

0 comments on commit da0f233

Please sign in to comment.