Skip to content

Commit

Permalink
Enable PrintBuildInfo for both Windows and Linux
Browse files Browse the repository at this point in the history
  • Loading branch information
Yongqiang Wang committed Nov 25, 2015
1 parent 2043cd1 commit 33cf6a0
Show file tree
Hide file tree
Showing 3 changed files with 250 additions and 123 deletions.
250 changes: 129 additions & 121 deletions MachineLearning/CNTK/CNTK.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
#include <algorithm>
#if defined(_WIN32)
#include "io.h"
#include "buildinfo.h"
#endif
#include "buildinfo.h"
#include "hostname.h"
#ifdef LEAKDETECT
#include "vld.h" // for memory leak detection
Expand Down Expand Up @@ -50,12 +50,12 @@
#include "ProgressTracing.h"
#include "fileutil.h"
#include "ScriptableObjects.h"
#include "BrainScriptEvaluator.h"
#include "BrainScriptParser.h"
#include "BrainScriptEvaluator.h"
#include "BrainScriptParser.h"

#ifndef let
#define let const auto
#endif
#ifndef let
#define let const auto
#endif

// TODO: Get rid of this global
Microsoft::MSR::CNTK::MPIWrapper *g_mpi = nullptr;
Expand Down Expand Up @@ -775,80 +775,80 @@ void DoWriteWordAndClassInfo(const ConfigParameters& config)
}
}

template<class ElemType>
class BrainScriptNetworkBuilder : public IComputationNetBuilder<ElemType>
{
typedef shared_ptr<ComputationNetwork> ComputationNetworkPtr;
ComputationNetworkPtr m_net;
ScriptableObjects::ConfigLambdaPtr m_createNetworkFn;
DEVICEID_TYPE m_deviceId;
public:
// the constructor remembers the config lambda
// TODO: Really this should just take the lambda itself, or rather, this class should just be replaced by a lambda. But we need the IConfigRecord for templates to be compile-compatible with old CNTK config.
BrainScriptNetworkBuilder(const ScriptableObjects::IConfigRecord & config)
{
m_deviceId = config[L"deviceId"]; // TODO: only needed for LoadNetworkFromFile() which should go away anyway
template<class ElemType>
class BrainScriptNetworkBuilder : public IComputationNetBuilder<ElemType>
{
typedef shared_ptr<ComputationNetwork> ComputationNetworkPtr;
ComputationNetworkPtr m_net;
ScriptableObjects::ConfigLambdaPtr m_createNetworkFn;
DEVICEID_TYPE m_deviceId;
public:
// the constructor remembers the config lambda
// TODO: Really this should just take the lambda itself, or rather, this class should just be replaced by a lambda. But we need the IConfigRecord for templates to be compile-compatible with old CNTK config.
BrainScriptNetworkBuilder(const ScriptableObjects::IConfigRecord & config)
{
m_deviceId = config[L"deviceId"]; // TODO: only needed for LoadNetworkFromFile() which should go away anyway
m_createNetworkFn = config[L"createNetwork"].AsPtr<ScriptableObjects::ConfigLambda>();
}
// not supported for old CNTK
BrainScriptNetworkBuilder(const ConfigParameters & config) { NOT_IMPLEMENTED; }

// build a ComputationNetwork from description language
virtual /*IComputationNetBuilder::*/ComputationNetworkPtr BuildNetworkFromDescription(ComputationNetwork* = nullptr) override
{
vector<ScriptableObjects::ConfigValuePtr> args; // this lambda has no arguments
ScriptableObjects::ConfigLambda::NamedParams namedArgs;
let netValue = m_createNetworkFn->Apply(move(args), move(namedArgs), L"BuildNetworkFromDescription");
m_net = netValue.AsPtr<ComputationNetwork>();
if (m_net->GetDeviceId() < 0)
fprintf(stderr, "BrainScriptNetworkBuilder using CPU\n");
else
fprintf(stderr, "BrainScriptNetworkBuilder using GPU %d\n", (int)m_net->GetDeviceId());
return m_net;
}

// load an existing file--this is the same code as for NDLNetworkBuilder.h (OK to copy it here because this is temporary code anyway)
// TODO: This does not belong into NetworkBuilder, since the code is the same for all. Just create the network and load the darn thing.
virtual /*IComputationNetBuilder::*/ComputationNetwork* LoadNetworkFromFile(const wstring& modelFileName, bool forceLoad = true,
bool bAllowNoCriterionNode = false, ComputationNetwork* anotherNetwork = nullptr) override
{
if (!m_net || m_net->GetTotalNumberOfNodes() == 0 || forceLoad) //not built or force load --TODO: why all these options?
{
auto net = make_shared<ComputationNetwork>(m_deviceId);
net->LoadFromFile<ElemType>(modelFileName, FileOptions::fileOptionsBinary, bAllowNoCriterionNode, anotherNetwork);
m_net = net;
}
m_net->ResetEvalTimeStamp();
return m_net.get();
}
};

}
// not supported for old CNTK
BrainScriptNetworkBuilder(const ConfigParameters & config) { NOT_IMPLEMENTED; }

// build a ComputationNetwork from description language
virtual /*IComputationNetBuilder::*/ComputationNetworkPtr BuildNetworkFromDescription(ComputationNetwork* = nullptr) override
{
vector<ScriptableObjects::ConfigValuePtr> args; // this lambda has no arguments
ScriptableObjects::ConfigLambda::NamedParams namedArgs;
let netValue = m_createNetworkFn->Apply(move(args), move(namedArgs), L"BuildNetworkFromDescription");
m_net = netValue.AsPtr<ComputationNetwork>();
if (m_net->GetDeviceId() < 0)
fprintf(stderr, "BrainScriptNetworkBuilder using CPU\n");
else
fprintf(stderr, "BrainScriptNetworkBuilder using GPU %d\n", (int)m_net->GetDeviceId());
return m_net;
}

// load an existing file--this is the same code as for NDLNetworkBuilder.h (OK to copy it here because this is temporary code anyway)
// TODO: This does not belong into NetworkBuilder, since the code is the same for all. Just create the network and load the darn thing.
virtual /*IComputationNetBuilder::*/ComputationNetwork* LoadNetworkFromFile(const wstring& modelFileName, bool forceLoad = true,
bool bAllowNoCriterionNode = false, ComputationNetwork* anotherNetwork = nullptr) override
{
if (!m_net || m_net->GetTotalNumberOfNodes() == 0 || forceLoad) //not built or force load --TODO: why all these options?
{
auto net = make_shared<ComputationNetwork>(m_deviceId);
net->LoadFromFile<ElemType>(modelFileName, FileOptions::fileOptionsBinary, bAllowNoCriterionNode, anotherNetwork);
m_net = net;
}
m_net->ResetEvalTimeStamp();
return m_net.get();
}
};

// TODO: decide where these should go. Also, do we need three variables?
extern wstring standardFunctions;
extern wstring commonMacros;
extern wstring computationNodes;

// helper that returns 'float' or 'double' depending on ElemType
template<class ElemType> static const wchar_t * ElemTypeName();
template<> /*static*/ const wchar_t * ElemTypeName<float>() { return L"float"; }
template<> /*static*/ const wchar_t * ElemTypeName<double>() { return L"double"; }

function<ComputationNetworkPtr(DEVICEID_TYPE)> GetCreateNetworkFn(const ScriptableObjects::IConfigRecord & config)
{
extern wstring standardFunctions;
extern wstring commonMacros;
extern wstring computationNodes;

// helper that returns 'float' or 'double' depending on ElemType
template<class ElemType> static const wchar_t * ElemTypeName();
template<> /*static*/ const wchar_t * ElemTypeName<float>() { return L"float"; }
template<> /*static*/ const wchar_t * ElemTypeName<double>() { return L"double"; }

function<ComputationNetworkPtr(DEVICEID_TYPE)> GetCreateNetworkFn(const ScriptableObjects::IConfigRecord & config)
{
// createNetwork() is a BrainScript lambda that creates the model
// We create a C++ wrapper around it, which we then pass to Train().
auto createNetworkConfigLambda = config[L"createNetwork"].AsPtr<ScriptableObjects::ConfigLambda>();
return [createNetworkConfigLambda](DEVICEID_TYPE /*deviceId*/)
{
// execute the lambda
vector<ScriptableObjects::ConfigValuePtr> args; // this lambda has no arguments
ScriptableObjects::ConfigLambda::NamedParams namedArgs;
let netValue = createNetworkConfigLambda->Apply(move(args), move(namedArgs), L"BuildNetworkFromDescription");
// typecast the result to the desired type
return netValue.AsPtr<ComputationNetwork>();
vector<ScriptableObjects::ConfigValuePtr> args; // this lambda has no arguments
ScriptableObjects::ConfigLambda::NamedParams namedArgs;
let netValue = createNetworkConfigLambda->Apply(move(args), move(namedArgs), L"BuildNetworkFromDescription");
// typecast the result to the desired type
return netValue.AsPtr<ComputationNetwork>();
};
}
function<ComputationNetworkPtr(DEVICEID_TYPE)> GetCreateNetworkFn(const ConfigParameters &) { NOT_IMPLEMENTED; } // old CNTK config does not support lambdas
}
function<ComputationNetworkPtr(DEVICEID_TYPE)> GetCreateNetworkFn(const ConfigParameters &) { NOT_IMPLEMENTED; } // old CNTK config does not support lambdas

// function to create an object of a certain type, using both old CNTK config and BrainScript
template<class C>
Expand Down Expand Up @@ -901,26 +901,26 @@ void DoTrain(const ConfigRecordType & config)
// legacy test mode for BrainScript. Will go away once we fully integrate with BS.
else if (config.Exists(L"ExperimentalNetworkBuilder"))
{
// We interface with outer old CNTK config by taking the inner part, which we get as a string, as BrainScript.
// We prepend a few standard definitions, and also definition of deviceId and precision, which all objects will pull out again when they are being constructed.
// BUGBUG: We are not getting TextLocations right in this way! Do we need to inject location markers into the source? Moot once we fully switch to BS
wstring sourceCode = config(L"ExperimentalNetworkBuilder");
let expr = BS::ParseConfigDictFromString(standardFunctions + computationNodes + commonMacros
+ msra::strfun::wstrprintf(L"deviceId = %d ; precision = '%ls' ; network = new ComputationNetwork ", (int)deviceId, ElemTypeName<ElemType>()) // TODO: check if typeid needs postprocessing
+ sourceCode, vector<wstring>()); // source code has the form [ ... ]
// We interface with outer old CNTK config by taking the inner part, which we get as a string, as BrainScript.
// We prepend a few standard definitions, and also definition of deviceId and precision, which all objects will pull out again when they are being constructed.
// BUGBUG: We are not getting TextLocations right in this way! Do we need to inject location markers into the source? Moot once we fully switch to BS
wstring sourceCode = config(L"ExperimentalNetworkBuilder");
let expr = BS::ParseConfigDictFromString(standardFunctions + computationNodes + commonMacros
+ msra::strfun::wstrprintf(L"deviceId = %d ; precision = '%ls' ; network = new ComputationNetwork ", (int)deviceId, ElemTypeName<ElemType>()) // TODO: check if typeid needs postprocessing
+ sourceCode, vector<wstring>()); // source code has the form [ ... ]
createNetworkFn = [expr](DEVICEID_TYPE /*deviceId*/)
{
// evaluate the parse tree--specifically the top-level field 'network'--which will create the network
let object = EvaluateField(expr, L"network"); // this comes back as a BS::Object
let network = dynamic_pointer_cast<ComputationNetwork>(object); // cast it
// This should not really fail since we constructed the source code above such that this is the right type.
// However, it is possible (though currently not meaningful) to locally declare a different 'precision' value.
// In that case, the network might come back with a different element type. We need a runtime check for that.
if (!network)
RuntimeError("BuildNetworkFromDescription: network has the wrong element type (float vs. double)");
// success
network->ResetEvalTimeStamp();
return network;
// evaluate the parse tree--specifically the top-level field 'network'--which will create the network
let object = EvaluateField(expr, L"network"); // this comes back as a BS::Object
let network = dynamic_pointer_cast<ComputationNetwork>(object); // cast it
// This should not really fail since we constructed the source code above such that this is the right type.
// However, it is possible (though currently not meaningful) to locally declare a different 'precision' value.
// In that case, the network might come back with a different element type. We need a runtime check for that.
if (!network)
RuntimeError("BuildNetworkFromDescription: network has the wrong element type (float vs. double)");
// success
network->ResetEvalTimeStamp();
return network;
};
}
else
Expand Down Expand Up @@ -948,32 +948,32 @@ void DoTrain(const ConfigRecordType & config)
optimizer->Train(createNetworkFn, deviceId, dataReader.get(), cvDataReader.get(), makeMode);
}

namespace Microsoft { namespace MSR { namespace ScriptableObjects {

using namespace Microsoft::MSR::CNTK;

// -----------------------------------------------------------------------
// register ComputationNode with the ScriptableObject system
// -----------------------------------------------------------------------

namespace Microsoft { namespace MSR { namespace ScriptableObjects {

using namespace Microsoft::MSR::CNTK;

// -----------------------------------------------------------------------
// register ComputationNode with the ScriptableObject system
// -----------------------------------------------------------------------

class TrainAction { };
template<> shared_ptr<Object> MakeRuntimeObject<TrainAction>(const IConfigRecordPtr configp)
{
const IConfigRecord & config = *configp;
template<> shared_ptr<Object> MakeRuntimeObject<TrainAction>(const IConfigRecordPtr configp)
{
const IConfigRecord & config = *configp;
wstring precision = config[L"precision"]; // dispatch on ElemType
if (precision == L"float")
DoTrain<IConfigRecord, float>(config);
DoTrain<IConfigRecord, float>(config);
else if (precision == L"double")
DoTrain<IConfigRecord, double>(config);
DoTrain<IConfigRecord, double>(config);
else
RuntimeError("invalid value '%ls' for 'precision', must be 'float' or 'double'", precision.c_str());

return make_shared<Object>(); // return a dummy object
}

// register ComputationNode with the ScriptableObject system
ScriptableObjects::ConfigurableRuntimeTypeRegister::Add<TrainAction> registerTrainAction(L"TrainAction");
}}}

return make_shared<Object>(); // return a dummy object
}

// register ComputationNode with the ScriptableObject system
ScriptableObjects::ConfigurableRuntimeTypeRegister::Add<TrainAction> registerTrainAction(L"TrainAction");
}}}

template <typename ElemType>
void DoAdapt(const ConfigParameters& config)
Expand Down Expand Up @@ -1373,7 +1373,7 @@ void DoConvertFromDbn(const ConfigParameters& config)
wstring dbnModelPath = config(L"dbnModelPath");

auto netBuilder = make_shared<SimpleNetworkBuilder<ElemType>>(config);
ComputationNetworkPtr net = netBuilder->BuildNetworkFromDbnFile(dbnModelPath);
ComputationNetworkPtr net = netBuilder->BuildNetworkFromDbnFile(dbnModelPath);
net->SaveToFile(modelPath);
}

Expand Down Expand Up @@ -1643,24 +1643,36 @@ std::string TimeDateStamp()
return buf;
}

#ifdef _WIN32
void PrintBuiltInfo()
{
fprintf(stderr, "-------------------------------------------------------------------\n");
fprintf(stderr, "Build info: \n\n");
fprintf(stderr, "\t\tBuilt time: %s %s\n", __DATE__, __TIME__);
fprintf(stderr, "\t\tLast modified date: %s\n", __TIMESTAMP__);
fprintf(stderr, "\t\tBuilt by %s on %s\n", _BUILDER_, _BUILDMACHINE_);
fprintf(stderr, "\t\tBuild Path: %s\n", _BUILDPATH_);
#ifdef _BUILDTYPE_
fprintf(stderr, "\t\tBuild type: %s\n", _BUILDTYPE_);
#endif
#ifdef _MATHLIB_
fprintf(stderr, "\t\tMath lib: %s\n", _MATHLIB_);
#endif
#ifdef _CUDA_PATH_
fprintf(stderr, "\t\tCUDA_PATH: %s\n", _CUDA_PATH_);
#endif
#ifdef _CUB_PATH_
fprintf(stderr, "\t\tCUDA_PATH: %s\n", _CUB_PATH_);
#endif
#ifdef _GIT_EXIST
fprintf(stderr, "\t\tBuild Branch: %s\n", _BUILDBRANCH_);
fprintf(stderr, "\t\tBuild SHA1: %s\n", _BUILDSHA1_);
#endif
#ifdef _BUILDER_
fprintf(stderr, "\t\tBuilt by %s on %s\n", _BUILDER_, _BUILDMACHINE_);
#endif
#ifdef _BUILDPATH_
fprintf(stderr, "\t\tBuild Path: %s\n", _BUILDPATH_);
#endif
fprintf(stderr, "-------------------------------------------------------------------\n");

}
#endif

void PrintUsageInfo()
{
Expand Down Expand Up @@ -1733,7 +1745,7 @@ int wmainWithBS(int argc, wchar_t* argv[]) // called from wmain which is a wra

// change working directory
if (workingDir != L"")
_wchdir(workingDir.c_str());
_wchdir(workingDir.c_str());

// compile the BrainScript
wstring bs = L"[\n";
Expand All @@ -1748,7 +1760,7 @@ int wmainWithBS(int argc, wchar_t* argv[]) // called from wmain which is a wra

let expr = BS::ParseConfigExpression(bs, move(includePaths)); // parse
let valp = BS::Evaluate(expr); // evaluate parse into a dictionary
let & config = valp.AsRef<ScriptableObjects::IConfigRecord>(); // this is the dictionary
let & config = valp.AsRef<ScriptableObjects::IConfigRecord>(); // this is the dictionary

// legacy parameters that have changed spelling
if (config.Find(L"DoneFile")) // variables follow camel case (start with lower-case letters)
Expand Down Expand Up @@ -1779,9 +1791,7 @@ int wmainWithBS(int argc, wchar_t* argv[]) // called from wmain which is a wra
}

// echo config info to log
#ifdef _WIN32
PrintBuiltInfo();
#endif

// execute the actions
//std::string type = config(L"precision", "float");
Expand Down Expand Up @@ -1867,9 +1877,7 @@ int wmainOldCNTKConfig(int argc, wchar_t* argv[]) // called from wmain which i
RedirectStdErr(logpath);
}

#ifdef _WIN32
PrintBuiltInfo();
#endif
std::string timestamp = TimeDateStamp();

//dump config info
Expand Down
Loading

0 comments on commit 33cf6a0

Please sign in to comment.