Skip to content

Commit

Permalink
further renaming:
Browse files Browse the repository at this point in the history
GradientValues -> Gradient;
Output -> Value (Input(i)->Output didn't look good);
  • Loading branch information
frankseide committed Dec 5, 2015
1 parent f0eed2f commit 778b900
Show file tree
Hide file tree
Showing 27 changed files with 814 additions and 835 deletions.
2 changes: 1 addition & 1 deletion DataReader/LUSequenceReader/LUSequenceReader.h
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ class LUSequenceReader : public IDataReader<ElemType>

// feature and label data are parallel arrays
// The following two hold the actual MB data internally, created by EnsureDataAvailable().
std::vector<std::vector<vector<LabelIdType>>> m_featureWordContext; // [parSeq + t * numParSeq] word n-tuple in order of storage in m_output matrix
std::vector<std::vector<vector<LabelIdType>>> m_featureWordContext; // [parSeq + t * numParSeq] word n-tuple in order of storage in m_value matrix
std::vector<LabelIdType> m_labelIdData;

std::vector<ElemType> m_labelData;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3118,7 +3118,7 @@ The Validate function is used to validate the inputs and outputs of the
node.
It also sets the function value matrix's size and copy the image size informati
on from it's inputs.
Note that here Output() function returns the matrix that stores
Note that here Value() function returns the matrix that stores
the value of the current computation node.
\end_layout

Expand Down Expand Up @@ -3164,7 +3164,7 @@ virtual void Validate()

\begin_layout Plain Layout

if (Input(0)->Output().GetNumElements() == 0 || Input(1)->FunctionV
if (Input(0)->Value().GetNumElements() == 0 || Input(1)->FunctionV
alues().GetNumElements() == 0)
\end_layout

Expand All @@ -3180,7 +3180,7 @@ alues().GetNumElements() == 0)

\begin_layout Plain Layout

if (Input(0)->Output().GetNumRows() != 1 || Input(0)->FunctionValue
if (Input(0)->Value().GetNumRows() != 1 || Input(0)->FunctionValue
s().GetNumCols() != 1)
\end_layout

Expand All @@ -3197,7 +3197,7 @@ s().GetNumCols() != 1)

\begin_layout Plain Layout

Output().Resize(Input(1)->Output().GetNumRows(), Input(1)->F
Value().Resize(Input(1)->Value().GetNumRows(), Input(1)->F
unctionValues().GetNumCols());
\end_layout

Expand Down Expand Up @@ -3263,8 +3263,8 @@ virtual void ForwardProp()

\begin_layout Plain Layout

ForwardPropS(Output(), Input(0)->Output(), Input(1)->
Output());
ForwardPropS(Value(), Input(0)->Value(), Input(1)->
Value());
\end_layout

\begin_layout Plain Layout
Expand All @@ -3288,20 +3288,20 @@ virtual void ForwardProp(const size_t timeIdxInSeq)

\begin_layout Plain Layout

Matrix<ElemType> sliceInput1Value = Input(1)->Output().ColumnSlice(t
Matrix<ElemType> sliceInput1Value = Input(1)->Value().ColumnSlice(t
imeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);

\end_layout

\begin_layout Plain Layout

Matrix<ElemType> sliceOutputValue = m_output.ColumnSlice(timeIdxInSeq
Matrix<ElemType> sliceOutputValue = m_value.ColumnSlice(timeIdxInSeq
* m_samplesInRecurrentStep, m_samplesInRecurrentStep);
\end_layout

\begin_layout Plain Layout

ForwardPropS(sliceOutputValue, Input(0)->Output(), sliceInput1
ForwardPropS(sliceOutputValue, Input(0)->Value(), sliceInput1
Value);
\end_layout

Expand Down Expand Up @@ -3435,8 +3435,8 @@ virtual void BackpropTo(const size_t inputIndex)

\begin_layout Plain Layout

BackpropToLeft(Input(1)->Output(), Input(0)->Gradient
Values(), GradientValues());
BackpropToLeft(Input(1)->Value(), Input(0)->Gradient
Values(), Gradient());
\end_layout

\begin_layout Plain Layout
Expand All @@ -3456,8 +3456,8 @@ Values(), GradientValues());

\begin_layout Plain Layout

BackpropToRight(Input(0)->Output(), Input(1)->GradientVal
ues(), GradientValues());
BackpropToRight(Input(0)->Value(), Input(1)->GradientVal
ues(), Gradient());
\end_layout

\begin_layout Plain Layout
Expand Down Expand Up @@ -3519,19 +3519,19 @@ Seq)

\begin_layout Plain Layout

Matrix<ElemType> sliceOutputGrad = GradientValues().ColumnSlice(timeIdxIn
Matrix<ElemType> sliceOutputGrad = Gradient().ColumnSlice(timeIdxIn
Seq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
\end_layout

\begin_layout Plain Layout

Matrix<ElemType> sliceInput1Value = Input(1)->Output().ColumnSlic
Matrix<ElemType> sliceInput1Value = Input(1)->Value().ColumnSlic
e(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
\end_layout

\begin_layout Plain Layout

BackpropToLeft(sliceInput1Value, Input(0)->GradientValues(),
BackpropToLeft(sliceInput1Value, Input(0)->Gradient(),
sliceOutputGrad);
\end_layout

Expand All @@ -3552,19 +3552,19 @@ e(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);

\begin_layout Plain Layout

Matrix<ElemType> sliceInput1Grad = Input(1)->GradientValues().ColumnSlic
Matrix<ElemType> sliceInput1Grad = Input(1)->Gradient().ColumnSlic
e(timeIdxInSeq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
\end_layout

\begin_layout Plain Layout

Matrix<ElemType> sliceOutputGrad = GradientValues().ColumnSlice(timeIdxIn
Matrix<ElemType> sliceOutputGrad = Gradient().ColumnSlice(timeIdxIn
Seq * m_samplesInRecurrentStep, m_samplesInRecurrentStep);
\end_layout

\begin_layout Plain Layout

BackpropToRight(Input(0)->Output(), sliceInput1Grad,
BackpropToRight(Input(0)->Value(), sliceInput1Grad,
sliceOutputGrad);
\end_layout

Expand Down Expand Up @@ -4431,7 +4431,7 @@ for (size_t i=0; i<labelNodes.size(); i++)

\begin_layout Plain Layout

inputMatrices[labelNodes[i]->NodeName()] = &labelNodes[i]->Output();
inputMatrices[labelNodes[i]->NodeName()] = &labelNodes[i]->Value();

\end_layout

Expand Down Expand Up @@ -4703,7 +4703,7 @@ entIter());

\begin_layout Plain Layout

//compute the gradients, which will be stored in the GradientValues() of
//compute the gradients, which will be stored in the Gradient() of
each node
\end_layout

Expand Down
2 changes: 1 addition & 1 deletion MachineLearning/CNTK/ModelEditLanguage.h
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ class MELScript: public ConfigParser
auto nodePtr = builder.CreateLearnableParameter(name, 1, 1);
ndlNode->SetEvalValue(nodePtr.get());
ElemType val = ndlNode->GetScalar();
nodePtr->Output().SetValue(val);
nodePtr->Value().SetValue(val);
}
}
}
Expand Down
58 changes: 29 additions & 29 deletions MachineLearning/CNTK/SimpleNetworkBuilder.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -576,10 +576,10 @@ namespace Microsoft { namespace MSR { namespace CNTK {
m_net->InitLearnableParameters(e, m_uniformInit, randomSeed++, m_initValueScale);

columnStride = builder.CreateLearnableParameter(L"columnStride", 1, 1);
columnStride->Output().SetValue(1);
columnStride->Value().SetValue(1);
columnStride->SetParameterUpdateRequired(false);
rowStride = builder.CreateLearnableParameter(L"rowStride", 1, 1);
rowStride->Output().SetValue(0);
rowStride->Value().SetValue(0);
rowStride->SetParameterUpdateRequired(false);
alignoutput = builder.StrideTimes(encoderOutput, builder.Softmax(builder.StrideTimes(builder.Times(builder.Transpose(encoderOutput), e), pastValue, rowStride)), columnStride);

Expand Down Expand Up @@ -705,10 +705,10 @@ namespace Microsoft { namespace MSR { namespace CNTK {
m_net->InitLearnableParameters(e, m_uniformInit, randomSeed++, m_initValueScale);

columnStride = builder.CreateLearnableParameter(L"columnStride", 1, 1);
columnStride->Output().SetValue(1);
columnStride->Value().SetValue(1);
columnStride->SetParameterUpdateRequired(false);
rowStride = builder.CreateLearnableParameter(L"rowStride", 1, 1);
rowStride->Output().SetValue(0);
rowStride->Value().SetValue(0);
rowStride->SetParameterUpdateRequired(false);
alignoutput = builder.StrideTimes(encoderOutput, builder.Softmax(builder.StrideTimes(builder.Times(builder.Transpose(encoderOutput), e), pastValue, rowStride)), columnStride);

Expand Down Expand Up @@ -1044,7 +1044,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
directOutput = ApplyNonlinearFunction(builder.Times(directWIO, input),i);

ComputationNodePtr scalar = builder.CreateLearnableParameter(msra::strfun::wstrprintf(L"SV%d", i), 1, 1);
scalar->Output().SetValue((ElemType)0.01);
scalar->Value().SetValue((ElemType)0.01);
ComputationNodePtr scaled = builder.Scale(scalar, directOutput, msra::strfun::wstrprintf(L"S%d", i));

mergedNode = builder.Plus(toNode, scaled);
Expand Down Expand Up @@ -1086,11 +1086,11 @@ namespace Microsoft { namespace MSR { namespace CNTK {
bi = builder.CreateLearnableParameter(msra::strfun::wstrprintf (L"bi%d", iLayer), outputDim, 1);
bf = builder.CreateLearnableParameter(msra::strfun::wstrprintf (L"bf%d", iLayer), outputDim, 1);
//if (m_forgetGateInitVal > 0)
bf->Output().SetValue(m_forgetGateInitVal);
bf->Value().SetValue(m_forgetGateInitVal);
//if (m_inputGateInitVal > 0)
bi->Output().SetValue(m_inputGateInitVal);
bi->Value().SetValue(m_inputGateInitVal);
//if (m_outputGateInitVal > 0)
bo->Output().SetValue(m_outputGateInitVal);
bo->Value().SetValue(m_outputGateInitVal);

Whi = builder.CreateLearnableParameter(msra::strfun::wstrprintf (L"WHI%d", iLayer), outputDim, outputDim);
m_net->InitLearnableParameters(Whi, m_uniformInit, randomSeed++, m_initValueScale);
Expand Down Expand Up @@ -1124,7 +1124,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
{
//it = builder.CreateLearnableParameter(msra::strfun::wstrprintf (L"CONSTIT%d", iLayer), outputDim, mbSize);
//it->SetParameterUpdateRequired(false);
//it->Output().SetValue(m_constInputGateValue);
//it->Value().SetValue(m_constInputGateValue);
it = nullptr;
}
else
Expand Down Expand Up @@ -1314,7 +1314,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
output = builder.Times(w, input, L"outputsBeforeSoftmax");

trans = builder.CreateLearnableParameter(msra::strfun::wstrprintf(L"TransProb%d", numHiddenLayers), m_layerSizes[numHiddenLayers + 1], m_layerSizes[numHiddenLayers + 1]);
trans->Output().SetValue((ElemType)1.0 / m_layerSizes[numHiddenLayers + 1]);
trans->Value().SetValue((ElemType)1.0 / m_layerSizes[numHiddenLayers + 1]);
// m_net->InitLearnableParameters(trans, m_uniformInit, randomSeed++, m_initValueScale);
trans->SetParameterUpdateRequired(true);
label = builder.CreateInputNode(L"labels", m_layerSizes[numHiddenLayers + 1], mbSize);
Expand Down Expand Up @@ -1446,24 +1446,24 @@ namespace Microsoft { namespace MSR { namespace CNTK {
size_t nDim = inputDim + outputDim + 2;
wInputGate = builder.CreateLearnableParameter(msra::strfun::wstrprintf(L"WINPUTGATE%d", iLayer), outputDim, nDim);
m_net->InitLearnableParameters(wInputGate, m_uniformInit, randomSeed++, m_initValueScale);
wInputGate->Output().ColumnSlice(0, 1).SetValue(m_inputGateInitVal); /// init to input gate bias
wInputGate->Value().ColumnSlice(0, 1).SetValue(m_inputGateInitVal); /// init to input gate bias
wForgetGate = builder.CreateLearnableParameter(msra::strfun::wstrprintf(L"WFORGETGATE%d", iLayer), outputDim, nDim);
m_net->InitLearnableParameters(wForgetGate, m_uniformInit, randomSeed++, m_initValueScale);
wForgetGate->Output().ColumnSlice(0, 1).SetValue(m_forgetGateInitVal); /// init to forget gate bias
wForgetGate->Value().ColumnSlice(0, 1).SetValue(m_forgetGateInitVal); /// init to forget gate bias
wOutputGate = builder.CreateLearnableParameter(msra::strfun::wstrprintf(L"WOUTPUTGATE%d", iLayer), outputDim, nDim);
m_net->InitLearnableParameters(wOutputGate, m_uniformInit, randomSeed++, m_initValueScale);
wOutputGate->Output().ColumnSlice(0, 1).SetValue(m_outputGateInitVal);/// init to output gate bias
wOutputGate->Value().ColumnSlice(0, 1).SetValue(m_outputGateInitVal);/// init to output gate bias
wMemoryCellMatrix = builder.CreateLearnableParameter(msra::strfun::wstrprintf(L"WMEMORYCELLWEIGHT%d", iLayer), outputDim, inputDim + outputDim + 1);
m_net->InitLearnableParameters(wMemoryCellMatrix, m_uniformInit, randomSeed++, m_initValueScale);
wMemoryCellMatrix->Output().ColumnSlice(0, 1).SetValue(0);/// init to memory cell bias
wMemoryCellMatrix->Value().ColumnSlice(0, 1).SetValue(0);/// init to memory cell bias
output = builder.LSTM(inputObs, wInputGate, wForgetGate, wOutputGate, wMemoryCellMatrix, msra::strfun::wstrprintf(L"LSTM%d", iLayer));
#ifdef DEBUG_DECODER
wInputGate->Output().SetValue((ElemType)0.01);
wForgetGate->Output().SetValue((ElemType)0.01);
wOutputGate->Output().SetValue((ElemType)0.01);
wMemoryCellMatrix->Output().SetValue((ElemType)0.01);
wInputGate->Value().SetValue((ElemType)0.01);
wForgetGate->Value().SetValue((ElemType)0.01);
wOutputGate->Value().SetValue((ElemType)0.01);
wMemoryCellMatrix->Value().SetValue((ElemType)0.01);
#endif
if (m_addDropoutNodes)
Expand Down Expand Up @@ -1518,7 +1518,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
m_net->InitLearnableParameters(e, m_uniformInit, randomSeed++, m_initValueScale);
output = builder.LookupTable(e, input, L"LookupTable");
#ifdef DEBUG_DECODER
e->Output().SetValue((ElemType)0.01);
e->Value().SetValue((ElemType)0.01);
#endif

if (m_addDropoutNodes)
Expand Down Expand Up @@ -1573,7 +1573,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
w = builder.CreateLearnableParameter(msra::strfun::wstrprintf(L"W%d", numHiddenLayers), m_layerSizes[numHiddenLayers + 1], m_layerSizes[numHiddenLayers]);
m_net->InitLearnableParameters(w, m_uniformInit, randomSeed++, m_initValueScale);
#ifdef DEBUG_DECODER
w->Output().SetValue((ElemType)0.01);
w->Value().SetValue((ElemType)0.01);
#endif
label = builder.CreateInputNode(L"labels", m_layerSizes[numHiddenLayers + 1], mbSize);
AddTrainAndEvalCriterionNodes(input, label, w);
Expand Down Expand Up @@ -1649,7 +1649,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
m_net->InitLearnableParameters(e, m_uniformInit, randomSeed++, m_initValueScale);
output = builder.LookupTable(e, input, L"EncoderLookupTable");
#ifdef DEBUG_DECODER
e->Output().SetValue((ElemType)0.01);
e->Value().SetValue((ElemType)0.01);
#endif

if (m_addDropoutNodes)
Expand Down Expand Up @@ -1870,11 +1870,11 @@ namespace Microsoft { namespace MSR { namespace CNTK {
bi = builder.CreateLearnableParameter(msra::strfun::wstrprintf(L"bi%d", iLayer), outputDim, 1);
bf = builder.CreateLearnableParameter(msra::strfun::wstrprintf(L"bf%d", iLayer), outputDim, 1);
//if (m_forgetGateInitVal > 0)
bf->Output().SetValue(m_forgetGateInitVal);
bf->Value().SetValue(m_forgetGateInitVal);
//if (m_inputGateInitVal > 0)
bi->Output().SetValue(m_inputGateInitVal);
bi->Value().SetValue(m_inputGateInitVal);
//if (m_outputGateInitVal > 0)
bo->Output().SetValue(m_outputGateInitVal);
bo->Value().SetValue(m_outputGateInitVal);

Whi = builder.CreateLearnableParameter(msra::strfun::wstrprintf(L"WHI%d", iLayer), outputDim, outputDim);
m_net->InitLearnableParameters(Whi, m_uniformInit, randomSeed++, m_initValueScale);
Expand Down Expand Up @@ -1908,7 +1908,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
{
//it = builder.CreateLearnableParameter(msra::strfun::wstrprintf (L"CONSTIT%d", iLayer), outputDim, mbSize);
//it->SetParameterUpdateRequired(false);
//it->Output().SetValue(m_constInputGateValue);
//it->Value().SetValue(m_constInputGateValue);
it = nullptr;
}
else
Expand Down Expand Up @@ -2271,7 +2271,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
label = builder.CreateInputNode(L"labels", 2 * (this->nce_noises + 1), mbSize);

bias = builder.CreateLearnableParameter(L"BiasVector", 1, m_layerSizes[m_layerSizes.size() - 1]);
bias->Output().SetValue((ElemType)-std::log(m_layerSizes[m_layerSizes.size() - 1]));
bias->Value().SetValue((ElemType)-std::log(m_layerSizes[m_layerSizes.size() - 1]));
//m_net->InitLearnableParameters(bias, m_uniformInit, randomSeed++, std::log(m_layerSizes[m_layerSizes.size() - 1])* m_initValueScale);
//clslogpostprob = builder.Times(clsweight, input, L"ClassPostProb");

Expand Down Expand Up @@ -2385,10 +2385,10 @@ namespace Microsoft { namespace MSR { namespace CNTK {
wstring nameOfH = msra::strfun::wstrprintf(L"H%d", i + 1);

w = builder.Parameter(wts.GetNumRows(), wts.GetNumCols(), nameOfW);
w->Output().SetValue(wts);
w->Value().SetValue(wts);

b = builder.Parameter(bias.GetNumRows(), 1, nameOfB);
b->Output().SetValue(bias);
b->Value().SetValue(bias);

if (layerType == "perceptron")
{
Expand Down Expand Up @@ -2460,7 +2460,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
Matrix<ElemType> zeros = Matrix<ElemType>::Zeros(outputLayerSize, 1, m_deviceId);
prior = builder.Mean(label, L"Prior");
static_pointer_cast<PreComputedNode<ElemType>>(prior)->MarkComputed(false);
prior->Output().SetValue(zeros);
prior->Value().SetValue(zeros);
}
}

Expand Down
8 changes: 4 additions & 4 deletions MachineLearning/CNTK/SynchronousExecutionEngine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
int forcedRandomSeed = node->GetOptionalParameter("randomSeed", "-1"/*disabled*/);

if (!_wcsicmp(initString.c_str(), L"fixedValue"))
nodePtr->Output().SetValue(value);
nodePtr->Value().SetValue(value);
else if (!_wcsicmp(initString.c_str(), L"uniform"))
m_net->InitLearnableParameters(nodePtr, true, forcedRandomSeed < 0 ? randomSeed++ : (unsigned long)forcedRandomSeed, initValueScale, initOnCPUOnly);
else if (!_wcsicmp(initString.c_str(), L"gaussian"))
Expand Down Expand Up @@ -206,7 +206,7 @@ namespace Microsoft { namespace MSR { namespace CNTK {
ElemType value = node->GetOptionalParameter("value", "0");

if (!_wcsicmp(initString.c_str(), L"fixedValue"))
nodePtr->Output().SetValue(value);
nodePtr->Value().SetValue(value);
else if (!_wcsicmp(initString.c_str(), L"uniform"))
m_net->InitLearnableParameters(nodePtr, true, randomSeed++, initValueScale);
else if (!_wcsicmp(initString.c_str(), L"gaussian"))
Expand Down Expand Up @@ -241,10 +241,10 @@ namespace Microsoft { namespace MSR { namespace CNTK {
nodePtr = builder.CreateLearnableParameter(name, rows, cols);
nodePtr->SetParameterUpdateRequired(false);
}
else if (pass == ndlPassFinal || nodePtr->Output().GetNumElements() != 0)
else if (pass == ndlPassFinal || nodePtr->Value().GetNumElements() != 0)
{
ElemType val = parameter[0]->GetScalar();
nodePtr->Output().SetValue(val);
nodePtr->Value().SetValue(val);
}
}
else if (cnNodeType == OperationNameOf(RowSliceNode))
Expand Down
Loading

0 comments on commit 778b900

Please sign in to comment.