Skip to content

Commit

Permalink
removed #if-0'ed out code
Browse files Browse the repository at this point in the history
  • Loading branch information
frankseide committed Jan 23, 2016
1 parent 6dff1a8 commit af97079
Show file tree
Hide file tree
Showing 27 changed files with 34 additions and 2,098 deletions.
55 changes: 0 additions & 55 deletions Source/ActionsLib/TrainActions.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -272,58 +272,3 @@ void DoEdit(const ConfigParameters& config)

template void DoEdit<double>(const ConfigParameters& config);
template void DoEdit<float>(const ConfigParameters& config);

#if 0
// ===========================================================================
// DoSequenceTrain() - implements CNTK "trainSequence" command
// ===========================================================================

// TODO: per discussion with Dong Yu, Guoguo Chen, and Yu Zhang, this function can be removed.
template <typename ElemType>
void DoSequenceTrain(const ConfigParameters& config)
{
DEVICEID_TYPE deviceId = DeviceFromConfig(config);

ConfigParameters configSGD(config(L"SGD"));
bool makeMode = config(L"makeMode", "true");

ConfigParameters readerConfig(config(L"reader"));
readerConfig.Insert("traceLevel", config(L"traceLevel", "0"));

IComputationNetBuilder<ElemType>* netBuilder = NULL;
if (config.Exists("NDLNetworkBuilder"))
{
ConfigParameters configNDL(config(L"NDLNetworkBuilder"));
netBuilder = (IComputationNetBuilder<ElemType>*)new NDLBuilder<ElemType>(configNDL);
}
else if (config.Exists("SimpleNetworkBuilder"))
{
ConfigParameters configSNB(config(L"SimpleNetworkBuilder"));
netBuilder = (IComputationNetBuilder<ElemType>*)new SimpleNetworkBuilder<ElemType>(configSNB);
}
else
{
RuntimeError("No network builder found in the config file. NDLNetworkBuilder or SimpleNetworkBuilde must be specified");
}

DataReader<ElemType>* dataReader = new DataReader<ElemType>(readerConfig);

DataReader<ElemType>* cvDataReader = nullptr;
ConfigParameters cvReaderConfig(config(L"cvReader", L""));

if (cvReaderConfig.size() != 0)
{
cvReaderConfig.Insert("traceLevel", config(L"traceLevel", "0"));
cvDataReader = new DataReader<ElemType>(cvReaderConfig);
}

wstring origModelFileName = config(L"origModelFileName", L"");

SGD<ElemType> sgd(configSGD);

sgd.SequenceTrain(netBuilder, origModelFileName, dataReader, cvDataReader, deviceId, makeMode);

delete dataReader;
delete cvDataReader;
}
#endif
23 changes: 1 addition & 22 deletions Source/CNTK/CNTK.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -175,11 +175,7 @@ void DoCommands(const ConfigParameters& config)
// determine the action to perform, and do it
for (int j = 0; j < action.size(); j++)
{
if (action[j] == "train" || action[j] == "trainRNN"
#if 0
|| action[j] == "trainSequence" || action[j] == "trainSequenceRNN"
#endif
)
if (action[j] == "train" || action[j] == "trainRNN")
{
wstring modelPath = commandParams("modelPath");
std::wcerr << "CNTKModelPath: " << modelPath << endl;
Expand Down Expand Up @@ -221,15 +217,6 @@ void DoCommands(const ConfigParameters& config)
std::cerr << "CNTKCommandTrainEnd: " + command[i] << endl;
fullEpochsOffset += GetMaxEpochs(commandParams);
}
#if 0
else if (action[j] == "trainSequence" || action[j] == "trainSequenceRNN")
{
std::cerr << "CNTKCommandTrainBegin: " + command[i] << endl;
DoSequenceTrain<ElemType>(commandParams);
std::cerr << "CNTKCommandTrainEnd: " + command[i] << endl;
fullEpochsOffset += GetMaxEpochs(commandParams);
}
#endif
else if (action[j] == "adapt")
{
DoAdapt<ElemType>(commandParams);
Expand Down Expand Up @@ -291,16 +278,8 @@ void DoCommands(const ConfigParameters& config)

std::string TimeDateStamp()
{
#if 0 // "safe" version for Windows, not needed it seems
__time64_t localtime;

_time64(&localtime);// get current time and date
struct tm now;
_localtime64_s(&now, &localtime); // convert
#else
time_t t = time(NULL);
struct tm now = *localtime(&t);
#endif
char buf[30];
sprintf(buf, "%04d/%02d/%02d %02d:%02d:%02d", now.tm_year + 1900, now.tm_mon + 1, now.tm_mday, now.tm_hour, now.tm_min, now.tm_sec);
return buf;
Expand Down
4 changes: 1 addition & 3 deletions Source/CNTK/SimpleNetworkBuilder.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1559,9 +1559,7 @@ ComputationNetworkPtr SimpleNetworkBuilder<ElemType>::BuildNetworkFromDbnFile(co
if (layerType == "perceptron") // complete network
{
m_net->RenameNode(output, L"HLast");
#if 0
assert(numLayers + 1 == m_layerSizes.size());
#endif

Matrix<ElemType> priorVals = ReadMatrixFromDbnFile(fstream, std::string("Pu"));
assert(priorVals.GetNumCols() == 1 && priorVals.GetNumRows() == m_outputLayerSize);

Expand Down
49 changes: 0 additions & 49 deletions Source/CNTK/SynchronousExecutionEngine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -161,55 +161,6 @@ void SynchronousNodeEvaluator<ElemType>::Evaluate(NDLNode<ElemType>* node, const
RuntimeError("'init' must be one of the values of [ uniform | gaussian | fixedValue ]");
}
}
#if 0 // not functional at present
else if (OperationNameOf(SparseLearnableParameter) == cnNodeType)
{
if (parameter.size() < 1 || parameter.size() > 2)
RuntimeError("%ls should have 1 or 2 parameters[rows, [cols=1]] plus other optional parameters (needGradient=[true|false], init=[uniform|gaussian|fixedvalue], initValueScale=[1|float], value=[0|float]).", cnNodeType.c_str());

if (pass == ndlPassInitial)
{
// evaluate only scalar parameters
vector<void*> params = EvaluateParameters(node, baseName, 0, parameter.size(), pass);
size_t rows = ((NDLNode<ElemType>*)params[0])->GetScalar();
size_t cols = params.size() > 1 ? ((NDLNode<ElemType>*)params[1])->GetScalar() : 1;

bool needGradient = node->GetOptionalParameter("needGradient", "true");

nodePtr = builder.CreateSparseLearnableParameter(name, rows, cols);

nodePtr->SetParameterUpdateRequired(needGradient);
}
else if (pass == ndlPassFinal)
{
static int randomSeed = 1;
wstring initString = node->GetOptionalParameter("init", "uniform");
ElemType initValueScale = node->GetOptionalParameter("initValueScale", "1");
ElemType value = node->GetOptionalParameter("value", "0");

if (!_wcsicmp(initString.c_str(), L"fixedValue"))
nodePtr->Value().SetValue(value);
else if (!_wcsicmp(initString.c_str(), L"uniform"))
m_net->InitLearnableParameters(nodePtr, true, randomSeed++, initValueScale);
else if (!_wcsicmp(initString.c_str(), L"gaussian"))
m_net->InitLearnableParameters(nodePtr, false, randomSeed++, initValueScale);
else if (!_wcsicmp(initString.c_str(), L"fromFile"))
{
std::string initFromFilePath = node->GetOptionalParameter("initFromFilePath", "");
if (initFromFilePath == "")
RuntimeError("initFromFilePath must be set when using \"fromFile\" initialization method");
if(initFromFilePath[0] == '\"' && initFromFilePath[initFromFilePath.size()-1] == '\"')
// remove the opening and closing double quotes
initFromFilePath = initFromFilePath.substr(1, initFromFilePath.size()-2);
if(!fexists(initFromFilePath))
RuntimeError("File pointed to by initFromFilePath does not exist: %s", initFromFilePath.c_str());
dynamic_pointer_cast<SparseLearnableParameter<ElemType>>(nodePtr)->InitFromFile(msra::strfun::utf16(initFromFilePath));
}
else
RuntimeError("init must be one of the values of [ uniform | gaussian | fixedValue ]");
}
}
#endif
else if (cnNodeType == L"Constant")
{
if (parameter.size() != 1)
Expand Down
9 changes: 3 additions & 6 deletions Source/Common/Include/Config.h
Original file line number Diff line number Diff line change
Expand Up @@ -112,13 +112,10 @@ class ConfigValue : public std::string
{
}

// it auto-casts to the common types
// Note: This is meant to read out a parameter once to assign it, instead of over again.
// it auto-casts to the common types
// Note: This is meant to read out a parameter once to assign it, instead of over again.
#if 0
operator std::string() const
{
return *this;
} // TODO: does not seem to work
operator std::string() const { return *this; } // TODO: does not seem to work
#endif

operator const char*() const
Expand Down
56 changes: 5 additions & 51 deletions Source/Common/Include/Sequences.h
Original file line number Diff line number Diff line change
Expand Up @@ -127,12 +127,6 @@ struct MBLayout
// remember the dimensions
m_numParallelSequences = numParallelSequences;
m_numTimeSteps = numTimeSteps;
// allocate lookup tables (note: except at the start, these don't really allocate new memory most of the time)
#if 0
if ((m_distanceToStart.GetNumRows() != m_numParallelSequences || m_distanceToStart.GetNumCols() != m_numTimeSteps) && m_numTimeSteps > 0) // sanity check for debugging a regression
fprintf(stderr, "MBLayout::Init: Resizing m_distanceToStart from %d x %d to %d x %d\n",
(int)m_distanceToStart.GetNumRows(), (int)m_distanceToStart.GetNumCols(), (int)m_numParallelSequences, (int)m_numTimeSteps); // (I really want to know about actual allocations, but this is a necessary condition for them)
#endif
m_distanceToStart.Resize(m_numParallelSequences, m_numTimeSteps);
m_distanceToEnd.Resize(m_numParallelSequences, m_numTimeSteps);
m_distanceToNearestStart.assign(m_numTimeSteps, PTRDIFF_MAX);
Expand Down Expand Up @@ -175,13 +169,7 @@ struct MBLayout
// This is used by MeanNode and InvStdDevNode, and by statistics reporting.
size_t GetActualNumSamples() const;

const Matrix<char> &GetColumnsValidityMask(DEVICEID_TYPE deviceId) const;

#if 0 // in the future we can use the tensor lib to implement this
template<class ElemType> const Matrix<ElemType> GetColumnsValidMask() const;
template<> const Matrix<float> GetColumnsValidMask<float>() const { return m_distanceToStart.Reshaped(1, m_distanceToStart.GetNumElements()); }
template<> const Matrix<double> GetColumnsValidMask<double>() const { NOT_IMPLEMENTED; }
#endif
const Matrix<char>& GetColumnsValidityMask(DEVICEID_TYPE deviceId) const;

// compare whether two layouts are the same
bool operator==(const MBLayout &other) const
Expand Down Expand Up @@ -231,15 +219,8 @@ struct MBLayout
if (beginTime >= (ptrdiff_t) m_numTimeSteps) // no need to test endTime since it is always non-negative (size_t)
LogicError("AddSequence: Sequence added to an MBLayout must overlap with minibatch.");

// remember it
#if 0 // def _DEBUG
auto cap = m_sequences.capacity(); // Some sanity check for debugging a speed regression. This should only show up during the first minibatches, and growing only.
m_sequences.push_back(seqDesc);
if (cap != m_sequences.capacity())
fprintf(stderr, "AddSequence: m_sequences was reallocated from capacity %d to %d\n", (int)cap, (int)m_sequences.capacity());
#else
// remember it
m_sequences.push_back(seqDesc);
#endif

// create all the cached fast-lookup information
const auto seqId = seqDesc.seqId;
Expand Down Expand Up @@ -424,15 +405,12 @@ struct MBLayout
mutable bool m_writable;

public:
// -------------------------------------------------------------------
// special deprecated functions that are result of refactoring (to go away)
// -------------------------------------------------------------------

// only used in sequence training, must be replaced by a different mechanism
// special accessor for sequence training --TODO: must be replaced by a different mechanism
bool IsEnd(size_t s, size_t t) const
{
auto distanceToStart = (ptrdiff_t) m_distanceToStart(s, t);
#if 1 // I don't exactly know what this does, so try assert() fifst
#if 1 // I don't exactly know what this does, so try assert() first
assert(distanceToStart != -1);
distanceToStart;
#else
Expand Down Expand Up @@ -683,31 +661,7 @@ inline bool MBLayout::IsBeyondStartOrEnd(const FrameRange &fr) const
}

// TODO: Remove this version (with sanity checks) after this has been tested. Then the function can be inlined above.
inline size_t MBLayout::GetActualNumSamples() const
{
#if 0 // sanity check --TODO: delete this after a while
size_t n = GetNumCols();
if (HasGaps())
{
for (size_t t = 0; t < GetNumTimeSteps(); t++)
{
FrameRange fr(nullptr, t);
if (IsGap(fr))
{
for (size_t s = 0; s < GetNumParallelSequences(); s++)
{
if (IsGap(fr.Sequence(s)))
n--;
}
}
}
}
if (m_numGapFrames != GetNumCols() - n)
LogicError("GetActualNumSamples: Gap counting broken, measured %d vs. originally counted %d", (int)(GetNumCols() - n), (int)m_numGapFrames);
assert(m_numFramesDeclared - m_numGapFrames == n);
#endif
return m_numFramesDeclared - m_numGapFrames;
}
inline size_t MBLayout::GetActualNumSamples() const { return m_numFramesDeclared - m_numGapFrames; }

// return m_columnsValidityMask(,), which is lazily created here upon first call
// only called from MaskMissingColumnsTo()
Expand Down
20 changes: 0 additions & 20 deletions Source/Common/Include/latticearchive.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,6 @@
// latticearchive.h -- managing lattice archives
//

#if 0
#endif
#pragma once

#undef HACK_IN_SILENCE // [v-hansu] hack to simulate DEL in the lattice
Expand Down Expand Up @@ -449,18 +447,6 @@ class lattice
align.swap(newalign);
edges.shrink_to_fit(); // [v-hansu] might be useful when RAM is out of use
align.shrink_to_fit();

#if 0 // [v-hansu] to dump lattice for checking
static size_t countdump = 0;
FILE *f = fopen ("lattice", "a");
foreach_index (j, edges)
fprintf (f, "S=%d\tE=%d\tunused=%d\n", edges[j].S, edges[j].E, edges[j].unused);
countdump++;
fflush(f);
fclose(f);
if (countdump == 10)
exit(0);
#endif
}
// go back from V2 format to edges and align, so old code can still run
// This will go away one we updated all code to use the new data structures.
Expand Down Expand Up @@ -995,12 +981,6 @@ class lattice
// map align ids to user's symmap --the lattice gets updated in place here
foreach_index (k, align)
align[k].updateunit(idmap); // updates itself
#if 0 // TODO: this is not complete. Enable once we move to more compact5 data structure.
// showstats();
// version 1 is outdated --we build the compact version now
// TODO: once all is converted, edges() will become a local variable here
buildedgegroupstorage();
#endif
}
else if (version == 2)
{
Expand Down
Loading

0 comments on commit af97079

Please sign in to comment.