Skip to content

Commit

Permalink
addressed review feedback
Browse files Browse the repository at this point in the history
  • Loading branch information
frankseide committed Aug 8, 2016
1 parent d6fb378 commit 595c7ab
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 9 deletions.
1 change: 1 addition & 0 deletions .gitattributes
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ Dockerfile-GPU text
*.counts text
*.labels text
*.feats text
*.ctf text
*.post text
*.cpu text
*.gpu text
Expand Down
6 changes: 4 additions & 2 deletions Source/CNTK/BrainScript/BrainScriptParser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -666,7 +666,9 @@ class Parser : public Lexer
return id;
}

map<wstring, int> infixPrecedence; // precedence level of infix operators
map<wstring, int> infixPrecedence; // precedence level of infix operators
static const int unaryPrecedence = 90; // for unary "-" and "!". 90 is below x., x[, x(, and x{, but above all others
// TODO: Would be more direct to fold this into the table below as well.
public:
Parser(SourceFile&& sourceFile, vector<wstring>&& includePaths)
: Lexer(move(includePaths))
Expand Down Expand Up @@ -719,7 +721,7 @@ class Parser : public Lexer
{
operand = make_shared<Expression>(tok.beginLocation, tok.symbol + L"("); // encoded as +( -( !(
ConsumeToken();
operand->args.push_back(ParseExpression(90, stopAtNewline)); // 90 is below x., x[, x(, and x{, but above all others
operand->args.push_back(ParseExpression(unaryPrecedence, stopAtNewline));
}
else if (tok.symbol == L"new") // === new class instance
{
Expand Down
13 changes: 7 additions & 6 deletions Source/CNTK/BrainScript/CNTKCoreLib/CNTK.core.bs
Original file line number Diff line number Diff line change
Expand Up @@ -150,10 +150,10 @@ StabilizerLayer {} =
f(x) = Stabilize (x)
}.f

# CorpusNormalizationLayer -- create a corpus-level feature-normalization layer
# FeatureMVNLayer -- create a corpus-level feature-normalization layer
# This can only be applied to features. Statistics are not shared across invocations,
# which is semantically OK because the values are the same. However, it is not efficient.
CorpusNormalizationLayer {} = MeanVarNorm
FeatureMVNLayer {} = MeanVarNorm

# Layers that exist in other tools that we will not have:
# FlattenLayer{}: Not needed since DenseLayer() can handle tensors just fine.
Expand Down Expand Up @@ -202,8 +202,8 @@ Log = CNTK2.Log
Minus = CNTK2.Minus
Pass = CNTK2.Identity
Plus = CNTK2.Plus
RectifiedLinear = CNTK2.Relu
ReLU = CNTK2.Relu
RectifiedLinear = CNTK2.ReLU # deprecated
ReLU = CNTK2.ReLU
ReduceSum = CNTK2.ReduceSum
ReduceLogSum = CNTK2.ReduceLogSum
ReduceMin = CNTK2.ReduceMin
Expand Down Expand Up @@ -315,8 +315,9 @@ CNTK2 = [
PastValue(_, shape, timeStep = 1, defaultHiddenActivation = 0.1, tag='') = new ComputationNode [ operation = 'PastValue' ; inputs = _ ; shape = new TensorShape [ /*shape*/ ] /*plus the function args*/ ]
// 10. NN-specific operations
// Changes: input -> _, RectifiedLinear -> Relu. [Use Relu to arrive at relu() in snake_case]
Relu(_, tag='') = new ComputationNode [ operation = 'RectifiedLinear' ; inputs = _ /*plus the function args*/ ]
// Changes: input -> _, RectifiedLinear -> ReLU
ReLU(_, tag='') = new ComputationNode [ operation = 'RectifiedLinear' ; inputs = _ /*plus the function args*/ ]
Relu = ReLU // [Use Relu to arrive at relu() in snake_case]
Sigmoid(_, tag='') = new ComputationNode [ operation = 'Sigmoid' ; inputs = _ /*plus the function args*/ ]
Softmax(_, tag='') = new ComputationNode [ operation = 'Softmax' ; inputs = _ /*plus the function args*/ ]
Dropout(_, tag='') = new ComputationNode [ operation = 'Dropout' ; inputs = _ /*plus the function args*/ ]
Expand Down
2 changes: 1 addition & 1 deletion Tests/EndToEndTests/Speech/QuickE2E/cntk.cntk
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ speechTrain = [

# --- define the model
model = Sequential (
CorpusNormalizationLayer{} :
FeatureMVNLayer{} :
DenseLayer {H, activation=Sigmoid} :
DenseLayer {H, activation=Sigmoid} :
DenseLayer {J}
Expand Down

0 comments on commit 595c7ab

Please sign in to comment.