Skip to content

Commit

Permalink
Whitespace
Browse files Browse the repository at this point in the history
  • Loading branch information
Clemens Marschner committed Sep 14, 2016
1 parent c0c4c5d commit 1a199c5
Showing 3 changed files with 10 additions and 10 deletions.
10 changes: 5 additions & 5 deletions Source/ComputationNetworkLib/LinearAlgebraNodes.h
Original file line number Diff line number Diff line change
@@ -143,7 +143,7 @@ class MinusNode : public BinaryElementWiseNode<ElemType>
virtual void /*ComputationNode::*/ ForwardProp(const FrameRange& fr) override
{
size_t rank = DetermineElementwiseTensorRank();
auto result = ValueTensorFor(rank, fr);
auto result = ValueTensorFor(rank, fr);
auto input0 = InputRef(0).ValueTensorFor(rank, fr.AllowBroadcast());
auto input1 = InputRef(1).ValueTensorFor(rank, fr.AllowBroadcast());
result.AssignDifferenceOf(input0, input1);
@@ -193,7 +193,7 @@ class ElementTimesNode : public BinaryElementWiseNode<ElemType>
virtual void /*ComputationNode::*/ ForwardProp(const FrameRange& fr) override
{
size_t rank = DetermineElementwiseTensorRank();
auto result = ValueTensorFor(rank, fr);
auto result = ValueTensorFor(rank, fr);
auto input0 = InputRef(0).ValueTensorFor(rank, fr.AllowBroadcast());
auto input1 = InputRef(1).ValueTensorFor(rank, fr.AllowBroadcast());
result.AssignElementwiseProductOf(input0, input1);
@@ -203,7 +203,7 @@ class ElementTimesNode : public BinaryElementWiseNode<ElemType>
{
size_t rank = DetermineElementwiseTensorRank();
auto gradient = GradientTensorFor(rank, fr);
auto inputGradient = Input(inputIndex)->GradientTensorFor(rank, fr.AllowBroadcast());
auto inputGradient = Input(inputIndex)->GradientTensorFor(rank, fr.AllowBroadcast());
auto otherInputValue = Input(1 - inputIndex)->ValueTensorFor(rank, fr.AllowBroadcast());

// if reduction then mask the respective input(s) (zero out the gaps)
@@ -689,15 +689,15 @@ class TransposeDimensionsNode : public ComputationNode /*ComputationNode*/<ElemT
virtual void /*ComputationNode::*/ ForwardProp(const FrameRange& fr) override
{
size_t rank = DetermineElementwiseTensorRank();
auto output = ValueTensorFor( rank, fr);
auto output = ValueTensorFor( rank, fr);
auto input = TensorView<ElemType>(InputRef(0).ValuePtr(), GetTransposedTensorSliceFor(rank, fr));
output.AssignCopyOf(input);
}

virtual void /*ComputationNode::*/ BackpropTo(const size_t inputIndex, const FrameRange& fr) override
{
size_t rank = DetermineElementwiseTensorRank();
auto outputGradient = GradientTensorFor( rank, fr);
auto outputGradient = GradientTensorFor( rank, fr);
auto inputGradient = TensorView<ElemType>(InputRef(0).GradientPtr(), GetTransposedTensorSliceFor(rank, fr));
inputGradient.AddCopyOf(outputGradient);
}
8 changes: 4 additions & 4 deletions Source/ComputationNetworkLib/NonlinearityNodes.h
Original file line number Diff line number Diff line change
@@ -50,7 +50,7 @@ class UnaryElementWiseWithOpCodeNodeBase : public ComputationNode<ElemType>, pub
virtual void /*ComputationNode::*/ ForwardProp(const FrameRange& fr) override
{
size_t rank = DetermineElementwiseTensorRank();
auto result = ValueTensorFor(rank, fr);
auto result = ValueTensorFor(rank, fr);
auto input = InputRef(0).ValueTensorFor(rank, fr);
result.DoUnaryOpOf(0, input, 1, opForward, opSum);
}
@@ -61,7 +61,7 @@ class UnaryElementWiseWithOpCodeNodeBase : public ComputationNode<ElemType>, pub

// get the args
size_t rank = DetermineElementwiseTensorRank();
auto sliceOutputGrad = GradientTensorFor(rank, fr); // propagate from this one...
auto sliceOutputGrad = GradientTensorFor(rank, fr); // propagate from this one...
auto sliceInputGrad = InputRef(0).GradientTensorFor(rank, fr); // ...to this one

GradientOperationType opTypeHolder = opType; // preventing pragma warning C4127
@@ -544,10 +544,10 @@ class ClipNode : public ComputationNode<ElemType>, public NumInputs<3>
if (inputIndex == 2)
{
size_t rank = DetermineElementwiseTensorRank();
auto gradient = GradientTensorFor(rank, fr);
auto gradient = GradientTensorFor(rank, fr);
auto inputGradient = InputRef(inputIndex).GradientTensorFor(rank, fr.AllowBroadcast());
auto input = InputRef(inputIndex).ValueTensorFor(rank, fr.AllowBroadcast());
auto output = ValueTensorFor(rank, fr.AllowBroadcast());
auto output = ValueTensorFor(rank, fr.AllowBroadcast());

inputGradient.AddCopyIfEqualOf(input, output, gradient);
}
2 changes: 1 addition & 1 deletion Source/ComputationNetworkLib/SpecialPurposeNodes.cpp
Original file line number Diff line number Diff line change
@@ -126,7 +126,7 @@ template <class ElemType>
fprintf(stderr, "] %ls %s--> %s\n", m_message.c_str(), logGradientInstead ? "(gradient) " : "", InputRef(0).FormatOperationPrototype("").c_str());
InputRef(0).WriteMinibatchWithFormatting(stderr, fr, m_onlyUpToRow, m_onlyUpToT, m_formattingOptions.transpose, m_formattingOptions.isCategoryLabel, m_formattingOptions.isSparse, m_labelMapping,
sequenceSeparator, sequencePrologue, sequenceEpilogue, elementSeparator, sampleSeparator,
valueFormatString, logGradientInstead);
valueFormatString, logGradientInstead);
}
}

0 comments on commit 1a199c5

Please sign in to comment.