Skip to content

Commit

Permalink
Formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
Mark Hillebrand authored and Mark Hillebrand committed Jan 22, 2016
1 parent 50c0e8e commit 6f59eb1
Show file tree
Hide file tree
Showing 58 changed files with 6,060 additions and 6,060 deletions.
2 changes: 1 addition & 1 deletion Source/CNTK/BrainScript/BrainScriptEvaluator.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1022,4 +1022,4 @@ static ScriptableObjects::ConfigurableRuntimeTypeRegister::Add<Debug> registerDe
// - macro arg expressions get their path assigned when their thunk is created, the thunk remembers it
// - however, really, the thunk should get the expression path from the context it is executed in, not the context it was created in
// - maybe there is some clever scheme of overwriting when a result comes back? E.g. we retrieve a value but its name is not right, can we patch it up? Very tricky to find the right rules/conditions
} } } // namespaces
} } } // namespaces
2 changes: 1 addition & 1 deletion Source/CNTK/BrainScript/BrainScriptEvaluator.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,4 @@ shared_ptr<Object> EvaluateField(ExpressionPtr e, const wstring& id); // for exp

// some simple tests
void SomeTests();
} } } // end namespaces
} } } // end namespaces
2 changes: 1 addition & 1 deletion Source/CNTK/BrainScript/BrainScriptParser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -952,4 +952,4 @@ ExpressionPtr ParseConfigExpression(const wstring& sourceText, vector<wstring>&&
parser.VerifyAtEnd();
return expr;
}
} } } // namespaces
} } } // namespaces
2 changes: 1 addition & 1 deletion Source/CNTK/BrainScript/BrainScriptParser.h
Original file line number Diff line number Diff line change
Expand Up @@ -137,4 +137,4 @@ typedef Expression::ExpressionPtr ExpressionPtr; // circumvent some circular def
ExpressionPtr ParseConfigDictFromString(wstring text, vector<wstring>&& includePaths); // parses a list of dictionary members, returns a dictionary expression
ExpressionPtr ParseConfigDictFromFile(wstring path, vector<wstring>&& includePaths); // likewise, but from a file path
ExpressionPtr ParseConfigExpression(const wstring& sourceText, vector<wstring>&& includePaths); // parses a single expression from sourceText, which is meant to contain an include statement, hence includePaths
} } } // namespaces
} } } // namespaces
2 changes: 1 addition & 1 deletion Source/CNTK/BrainScript/BrainScriptTest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -190,4 +190,4 @@ void SomeTests()
err.PrintError();
}
}
} } } // namespaces
} } } // namespaces
2 changes: 1 addition & 1 deletion Source/CNTK/ModelEditLanguage.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ void MELScript<ElemType>::CallFunction(const std::string& p_name, const ConfigPa
std::wstring modelFormat = GetOptionalModelFormat(params, numFixedParams);

auto cn = make_shared<ComputationNetwork>(CPUDEVICE);
#if 1 // support for a specific kind of legacy format, for the sole purpose of allowing users to convert (=load & save) them
#if 1 // support for a specific kind of legacy format, for the sole purpose of allowing users to convert (=load & save) them
if (modelFormat == L"cntk_legacy_no_tensorlib")
{
cn->Read<ElemType>(params[1]);
Expand Down
4 changes: 1 addition & 3 deletions Source/CNTK/NDLUtil.h
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,4 @@ class NDLUtil

template class NDLUtil<float>;
template class NDLUtil<double>;
}
}
}
} } }
4 changes: 2 additions & 2 deletions Source/Common/BestGpu.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -265,9 +265,9 @@ void BestGpu::Init()
// get the count of objects
cudaError_t err = cudaGetDeviceCount(&m_deviceCount);
if (err != cudaSuccess)
m_deviceCount = 0; // if this fails, we have no GPUs
m_deviceCount = 0; // if this fails, we have no GPUs

ProcessorData pdEmpty = { 0 };
ProcessorData pdEmpty = {0};
for (int i = 0; i < m_deviceCount; i++)
{
ProcessorData* data = new ProcessorData();
Expand Down
4 changes: 1 addition & 3 deletions Source/Common/Config.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -304,6 +304,4 @@ void TrimQuotes(std::string& str)
if (str.front() == '"' && str.back() == '"')
str = str.substr(1, str.size() - 2);
}
}
}
}
} } }
4 changes: 1 addition & 3 deletions Source/Common/DebugUtil.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,4 @@ void DebugUtil::PrintCallStack()
free(symbolList);
#endif
}
}
}
}
} } }
1 change: 0 additions & 1 deletion Source/Common/Include/ProgressTracing.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ namespace Microsoft { namespace MSR { namespace CNTK {
} // wrap static state in an accessor, so we won't need a CPP file

public:

static bool IsEnabled()
{
return GetStaticInstance().m_enabled;
Expand Down
2 changes: 1 addition & 1 deletion Source/Common/Include/ScriptableObjects.h
Original file line number Diff line number Diff line change
Expand Up @@ -896,4 +896,4 @@ template <class V>
{
return static_cast<const std::vector<typename V::value_type> &>(vec);
} // use this specifically for XXXargvector
} } } // end namespaces
} } } // end namespaces
9 changes: 6 additions & 3 deletions Source/Common/Include/TensorShape.h
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,10 @@ struct TensorShape
{
return m_dims == other.m_dims;
}
bool operator!=(const TensorShape& other) const { return !operator==(other); } // duh!
bool operator!=(const TensorShape& other) const
{
return !operator==(other);
} // duh!

// verify that this refers to a dense matrix (no strides)
void VerifyIsDense() const
Expand Down Expand Up @@ -622,13 +625,13 @@ struct TensorShape
}

// compare two TensorShapes, whether they are compatible, considering padding and broadcasting
bool IsElementwiseCompatibleWith(const TensorShape & other) const
bool IsElementwiseCompatibleWith(const TensorShape& other) const
{
for (size_t i = 0; i < m_dims.size(); i++)
{
size_t dim = m_dims[i];
size_t otherDim = i < other.size() ? other[i] : 1;
if (dim != otherDim && dim != 1 && otherDim != 1) // dims mismatch, and neither is broadcasting
if (dim != otherDim && dim != 1 && otherDim != 1) // dims mismatch, and neither is broadcasting
return false;
}
return true;
Expand Down
3 changes: 1 addition & 2 deletions Source/Common/Include/latticesource.h
Original file line number Diff line number Diff line change
Expand Up @@ -76,5 +76,4 @@ class latticesource
denlattices.setverbosity(veb);
}
};
}
}
} }
Loading

0 comments on commit 6f59eb1

Please sign in to comment.