Skip to content
This repository has been archived by the owner on Jan 27, 2021. It is now read-only.

Commit

Permalink
Merge remote-tracking branch 'origin/master' into nikosk/keras_support2
Browse files Browse the repository at this point in the history
  • Loading branch information
n17s committed Feb 2, 2017
2 parents 6a1ed29 + 62654a1 commit cb4ef42
Show file tree
Hide file tree
Showing 24 changed files with 351 additions and 132 deletions.
3 changes: 3 additions & 0 deletions CNTK.sln
Original file line number Diff line number Diff line change
Expand Up @@ -1452,6 +1452,9 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "PerformanceProfilerDll", "S
EndProjectSection
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CNTKLibraryCSEvalExamplesTest", "Tests\EndToEndTests\EvalClientTests\CNTKLibraryCSEvalExamplesTest\CNTKLibraryCSEvalExamplesTest.csproj", "{3500A847-E024-4E7D-92DD-CC587C17460B}"
ProjectSection(ProjectDependencies) = postProject
{50EF9EE6-5018-453E-A063-F77044EF1A97} = {50EF9EE6-5018-453E-A063-F77044EF1A97}
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "GoogLeNet", "GoogLeNet", "{789B4AB8-40F1-4A37-823A-BC20D80C8BF1}"
ProjectSection(SolutionItems) = preProject
Expand Down
1 change: 1 addition & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -506,6 +506,7 @@ CNTKLIBRARY_TESTS_SRC =\
$(CNTKLIBRARY_TESTS_SRC_PATH)/BlockTests.cpp \
$(CNTKLIBRARY_TESTS_SRC_PATH)/TensorTests.cpp \
$(CNTKLIBRARY_TESTS_SRC_PATH)/ValueTests.cpp \
$(CNTKLIBRARY_TESTS_SRC_PATH)/LoadLegacyModelTests.cpp \
$(CNTKLIBRARY_TESTS_SRC_PATH)/TrainerTests.cpp \
$(CNTKLIBRARY_TESTS_SRC_PATH)/CifarResNet.cpp \
$(CNTKLIBRARY_TESTS_SRC_PATH)/SerializationTests.cpp \
Expand Down
29 changes: 21 additions & 8 deletions Source/CNTKv2LibraryDll/API/CNTKLibrary.h
Original file line number Diff line number Diff line change
Expand Up @@ -1861,8 +1861,11 @@ namespace CNTK

private:
explicit Parameter(const NDArrayViewPtr& value, const std::wstring& name, const std::wstring& uid)
: Variable(value->Shape(), VariableKind::Parameter, value->GetDataType(), value->DeepClone(false), true, {}, name, uid)
{}
: Variable(value->Shape(), VariableKind::Parameter, value->GetDataType(), value, true, {}, name, uid)
{
if (value->IsReadOnly())
InvalidArgument("Parameter cannot be constructed from a read-only NDArrayView value; you can create a non read-only clone of the value and use that instead!");
}
};

// Implementation note: The Variable type is a value type and not polymorphic in nature.
Expand Down Expand Up @@ -1943,7 +1946,7 @@ namespace CNTK

private:
Constant(const NDArrayViewPtr& value, const std::wstring& name, const std::wstring& uid)
: Variable(value->Shape(), VariableKind::Constant, value->GetDataType(), value->DeepClone(), false, {}, name, uid)
: Variable(value->Shape(), VariableKind::Constant, value->GetDataType(), value, false, {}, name, uid)
{}

///
Expand Down Expand Up @@ -2603,6 +2606,7 @@ namespace CNTK
CNTK_API virtual void Backward(const BackPropStatePtr& state,
const std::unordered_map<Variable, ValuePtr>& rootGradientValues,
std::unordered_map<Variable, ValuePtr>& backPropagatedGradientValuesForInputs);

///
/// Returns the name of the operation that this Function denotes
///
Expand Down Expand Up @@ -2631,8 +2635,11 @@ namespace CNTK
///
/// Infers the shape, data type and dynamic axes of the outputs of 'this' function based on the
/// Function's inputs, and returns Output Variable objects containing the inferred information
/// Result cannot exceed the max number of outputs (128).
/// The passed "outputs" vector should also reserve 128 elements in order to not cause memory allocation during
/// crossing of dll boundary.
///
CNTK_API virtual std::vector<Variable> InferOutputs() = 0;
CNTK_API virtual void InferOutputs(std::vector<Variable>& outputs) = 0;

public:

Expand Down Expand Up @@ -2737,9 +2744,9 @@ namespace CNTK
///
/// Returns all Input variables of 'this' Function.
///
std::vector<Variable> Inputs() const
std::vector<Variable> Inputs(bool pythonOperandOrder = false) const
{
return *(InputsImpl().get());
return *(InputsImpl(pythonOperandOrder).get());
}

///
Expand Down Expand Up @@ -2837,6 +2844,11 @@ namespace CNTK
///
CNTK_API void PrintGraph() const;

///
/// Maimum number of outputs that is currently supported.
///
static const int MaxNumOutputs = 64;

protected:
///
/// Protected constructor for derived 'Function' types to specify the actual input and output variables for the (primitive) Function instance.
Expand All @@ -2858,6 +2870,7 @@ namespace CNTK

// Returns a outputs without ref-counting the owner.
CNTK_API std::vector<Variable>& RawOutputs() const;

private:
CNTK_API std::shared_ptr<std::vector<std::pair<Variable, Variable>>> BlockArgumentsMappingImpl() const;

Expand All @@ -2882,10 +2895,10 @@ namespace CNTK
return filteredInputs;
}

CNTK_API std::shared_ptr<std::vector<Variable>> InputsImpl() const;
CNTK_API std::shared_ptr<std::vector<Variable>> InputsImpl(bool pythonOperandOrder = false) const;
CNTK_API std::shared_ptr<std::vector<Variable>> OutputsImpl() const;

void ValidateOrUpdateOutputs(std::unordered_map<const Function*, size_t>& visitedFunctions, bool& recurrentNodeOutputModified);
void ValidateOrUpdateOutputs(std::unordered_map<const Function*, size_t>& visitedFunctions, bool& recurrentNodeOutputModified, std::vector<Variable>& buffer);

static void ReplacePlaceholderInPlace(Variable& var,
const std::unordered_map<Variable, Variable>& placeholderReplacements,
Expand Down
56 changes: 42 additions & 14 deletions Source/CNTKv2LibraryDll/BackCompat.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@
#include "ReshapingNodes.h"
#include "DeprecatedNodes.h"
#include "RNNNodes.h"
#include "PreComputeNodes.h"
#include "DeprecatedNodes.h"

using namespace Microsoft::MSR::CNTK;

Expand Down Expand Up @@ -49,9 +51,7 @@ namespace CNTK

Variable var;
if (node->IsLeaf())
{
var = ResolveLeaf<ElementType>(node);
}
else
{
// This is a non-leaf node and maps to a primitive Function
Expand All @@ -78,6 +78,22 @@ namespace CNTK
}

private:

template<class ElementType>
Variable CreateParameterOrConstantFromNodeValue(const ComputationNodeBasePtr& node, bool isConstant)
{
auto& matrix = node->As<ComputationNode<ElementType>>()->Value();
auto tensorView = new TensorView<ElementType>(std::make_shared<Matrix<ElementType>>(matrix.AsReference()), AsTensorViewShape(node->GetSampleLayout()));
NDArrayViewPtr value = MakeSharedObject<NDArrayView>(AsDataType<ElementType>(), AsDeviceDescriptor(matrix.GetDeviceId()), AsStorageFormat(matrix.GetFormat()), AsNDShape(node->GetSampleLayout()), false, tensorView);

auto kind = isConstant ? VariableKind::Constant : VariableKind::Parameter;

std::wstring varUid, varName;
std::tie(varUid, varName) = UidAndNameFromCNTKInternalNodeName(node->NodeName(), kind);

return isConstant ? (Variable)Constant(value, varName, varUid) : Parameter(value, varName, varUid);
}

template<class ElementType>
Variable ResolveLeaf(const ComputationNodeBasePtr& node)
{
Expand All @@ -104,13 +120,7 @@ namespace CNTK
if (node->Is<LearnableParameter<ElementType>>())
{
bool isConstant = (node->GetLearningRateMultiplier() == 0);
auto& matrix = node->As<ComputationNode<ElementType>>()->Value();
auto tensorView = new TensorView<ElementType>(std::make_shared<Matrix<ElementType>>(matrix.AsReference()), AsTensorViewShape(node->GetSampleLayout()));
NDArrayViewPtr value = MakeSharedObject<NDArrayView>(AsDataType<ElementType>(), AsDeviceDescriptor(matrix.GetDeviceId()), AsStorageFormat(matrix.GetFormat()), variableShape, false, tensorView);

auto kind = isConstant ? VariableKind::Constant : VariableKind::Parameter;
std::tie(varUid, varName) = UidAndNameFromCNTKInternalNodeName(node->NodeName(), kind);
return isConstant ? (Variable)Constant(value, varName, varUid) : Parameter(value, varName, varUid);
return CreateParameterOrConstantFromNodeValue<ElementType>(node, isConstant);
}

LogicError("CNTK::LoadLegacyModel: Unsupported legacy CNTK node named '%S'", node->NodeName().c_str());
Expand Down Expand Up @@ -311,7 +321,7 @@ namespace CNTK
// tensor dimensions flattended into the column dimension of the 2D paramater matrix
// We need to recover the actual tensor shape of the parameter in this case
auto& convolutionMapVar = inputVars[0];
if (convolutionNode->IsConvolution2D())
if (convolutionNode->IsConvolution2D() || (convolutionMapVar.Shape().Rank() == 2))
{
assert(convolutionMapVar.Shape().Rank() == 2);
assert(convolutionMapVar.IsConstant() || convolutionMapVar.IsParameter());
Expand Down Expand Up @@ -444,6 +454,24 @@ namespace CNTK

opType = PrimitiveOpType::EditDistanceError;
}
else if ((node->OperationName() == OperationNameOf(MeanNode)) || (node->OperationName() == OperationNameOf(InvStdDevNode)))
{
auto precomputeNode = node->As<MeanInvStdDevNodeBase<ElementType>>();
if (!precomputeNode->HasComputed())
InvalidArgument("Loading a CNTK legacy V1 model containing a Mean/InvStdDev precompute node, whose computation is unfinished, is not supported!");

return CreateParameterOrConstantFromNodeValue<ElementType>(node, /* isConstant =*/ true);
}
else if (node->OperationName() == OperationNameOf(PerDimMeanVarNormalizationNode))
{
auto meanValue = Constant(inputVars[1]).Value();
auto invStdDevValue = Constant(inputVars[2]).Value();

std::wstring uid, name;
std::tie(uid, name) = UidAndNameFromCNTKInternalNodeName(node->NodeName());

return PerDimMeanVarianceNormalize(inputVars[0], meanValue, invStdDevValue, name);
}
else
LogicError("Unsupported ComputationNode with OperationName='%S' found when loading legacy CNTK model", node->OperationName().c_str());

Expand Down Expand Up @@ -500,16 +528,16 @@ namespace CNTK

if (ComputationNetwork::IsNodePtr<ComputationNode<float>>(rootNode))
{
rootVariables.push_back(resolver.GetVariable<float>(rootNode).Owner());
auto var = resolver.GetVariable<float>(rootNode);
rootVariables.push_back(var.IsOutput() ? (Variable)var.Owner() : var);
}
else if (ComputationNetwork::IsNodePtr<ComputationNode<double>>(rootNode))
{
rootVariables.push_back(resolver.GetVariable<double>(rootNode).Owner());
auto var = resolver.GetVariable<double>(rootNode);
rootVariables.push_back(var.IsOutput() ? (Variable)var.Owner() : var);
}
else
{
LogicError("LoadLegacyModel(): invalid computation node element type.");
}
}

auto rootComposite = Combine(rootVariables);
Expand Down
7 changes: 2 additions & 5 deletions Source/CNTKv2LibraryDll/BlockFunction.h
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ namespace CNTK
return blockFunctionInputs;
}

virtual std::vector<Variable> InferOutputs() override
void InferOutputs(std::vector<Variable>& outputs) override
{
// We determine the outputs by replacing the arguments of the composite with new placeholders with updated
// shape etc. information matching the corresponding mapped input
Expand All @@ -148,17 +148,14 @@ namespace CNTK

m_composite->ReplacePlaceholders(replacementMap);

std::vector<Variable> blockFunctionOutputs;
auto compositeOutputs = m_composite->RawOutputs();
for (auto compositeOutput : compositeOutputs)
{
auto output = OutputVariable(compositeOutput.Shape(), compositeOutput.GetDataType(), compositeOutput.DynamicAxes(), Name());
output.m_dataFields->m_blockFunctionVariableMapping = compositeOutput;

blockFunctionOutputs.push_back(output);
outputs.push_back(output);
}

return blockFunctionOutputs;
}

private:
Expand Down
23 changes: 12 additions & 11 deletions Source/CNTKv2LibraryDll/CompositeFunction.h
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,10 @@ namespace CNTK
NOT_IMPLEMENTED;
}

virtual std::vector<Variable> InferOutputs() override
void InferOutputs(std::vector<Variable>& outputs) override
{
return m_rootFunction->InitOutputs();
auto& inferred = m_rootFunction->InitOutputs();
outputs.assign(inferred.begin(), inferred.end());
}

virtual void Backward(const BackPropStatePtr& state,
Expand Down Expand Up @@ -146,29 +147,29 @@ namespace CNTK
}

template <typename FunctionType>
static void PreorderTraverseVariables(const FunctionPtr& rootFunction, const FunctionType& functor)
static void PreorderTraverseVariables(const FunctionPtr& rootFunction, const FunctionType& functor, bool pythonOperandOrder = false)
{
std::unordered_set<FunctionPtr> visitedFunctions;
PreorderTraverseVariables(rootFunction, visitedFunctions, functor);
PreorderTraverseVariables(rootFunction, visitedFunctions, functor, pythonOperandOrder);
}

// Recursively traverses the Function graph underlying the 'rootFunction' invoking the provided functor for all visited nodes in the graph.
template <typename FunctionType>
static void PreorderTraverseVariables(const FunctionPtr& rootFunction, std::unordered_set<FunctionPtr>& visitedFunctions, const FunctionType& functor)
static void PreorderTraverseVariables(const FunctionPtr& rootFunction, std::unordered_set<FunctionPtr>& visitedFunctions, const FunctionType& functor, bool pythonOperandOrder = false)
{
visitedFunctions.insert(rootFunction);
auto rootFunctionOutputs = rootFunction->InitOutputs();
for (const auto& rootOutput : rootFunctionOutputs)
functor(rootOutput);

auto rootFunctionInputs = rootFunction->Inputs();
auto rootFunctionInputs = rootFunction->Inputs(pythonOperandOrder);
for (const auto& rootInput : rootFunctionInputs)
{
functor(rootInput);
if (rootInput.IsOutput() && visitedFunctions.find(rootInput.Owner()) == visitedFunctions.end())
{
const auto& function = rootInput.Owner();
PreorderTraverseVariables(function, visitedFunctions, functor);
PreorderTraverseVariables(function, visitedFunctions, functor, pythonOperandOrder);
}
}
}
Expand Down Expand Up @@ -201,11 +202,11 @@ namespace CNTK
m_allPrimitiveFunctions(std::move(allPrimitiveFunctions)), m_networkMatricesAllocated(false)
{}

std::vector<Variable> DetermineInputs() const
std::vector<Variable> DetermineInputs(bool pythonOperandOrder = false) const
{
const auto& root = RootFunction();
std::unordered_set<FunctionPtr> visitedFunctions;
return DetermineInputs(root, visitedFunctions);
return DetermineInputs(root, visitedFunctions, pythonOperandOrder);
}

// Recursively traverses the Function graph and populates the provided set of functions.
Expand All @@ -216,7 +217,7 @@ namespace CNTK
}

// Recursively traverses the Function graph underlying the 'rootFunction' to determine all the leaves (aka inputs) of the graph
static std::vector<Variable> DetermineInputs(const FunctionPtr& rootFunction, std::unordered_set<FunctionPtr>& visitedFunctions)
static std::vector<Variable> DetermineInputs(const FunctionPtr& rootFunction, std::unordered_set<FunctionPtr>& visitedFunctions, bool pythonOperandOrder = false)
{
vector<FunctionPtr> functions;
std::vector<Variable> inputs;
Expand All @@ -227,7 +228,7 @@ namespace CNTK
inputs.push_back(var);
uniqueInputs.insert(var);
}
});
}, pythonOperandOrder);

return inputs;
}
Expand Down
Loading

0 comments on commit cb4ef42

Please sign in to comment.