Skip to content
This repository has been archived by the owner on Jan 27, 2021. It is now read-only.

Commit

Permalink
Address comments from CR
Browse files Browse the repository at this point in the history
  • Loading branch information
ivrodr-msft committed Oct 24, 2016
1 parent d1d02cb commit 8d97413
Show file tree
Hide file tree
Showing 5 changed files with 44 additions and 46 deletions.
16 changes: 8 additions & 8 deletions bindings/python/cntk/initializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def uniform(scale=DefaultParamInitScale, seed=None):
seed (`int`): random seed
Returns:
initializer for `:class:cntk.variables.Parameter`
initializer for :class:`cntk.variables.Parameter`
'''
if seed is None:
seed = SentinelValueForAutoSelectRandomSeed
Expand All @@ -36,7 +36,7 @@ def gaussian(output_rank=SentinelValueForInferParamInitRank, filter_rank=Sentine
seed (`int`): random seed
Returns:
initializer for `:class:cntk.variables.Parameter`
initializer for :class:`cntk.variables.Parameter`
'''
if seed is None:
seed = SentinelValueForAutoSelectRandomSeed
Expand All @@ -54,7 +54,7 @@ def xavier(output_rank=SentinelValueForInferParamInitRank, filter_rank=SentinelV
seed (`int`): random seed
Returns:
initializer for `:class:cntk.variables.Parameter`
initializer for :class:`cntk.variables.Parameter`
'''
if seed is None:
seed = SentinelValueForAutoSelectRandomSeed
Expand All @@ -72,7 +72,7 @@ def glorot_uniform(output_rank=SentinelValueForInferParamInitRank, filter_rank=S
seed (`int`): random seed
Returns:
initializer for `:class:cntk.variables.Parameter`
initializer for :class:`cntk.variables.Parameter`
'''
if seed is None:
seed = SentinelValueForAutoSelectRandomSeed
Expand All @@ -90,7 +90,7 @@ def glorot_normal(output_rank=SentinelValueForInferParamInitRank, filter_rank=Se
seed (`int`): random seed
Returns:
initializer for `:class:cntk.variables.Parameter`
initializer for :class:`cntk.variables.Parameter`
'''
if seed is None:
seed = SentinelValueForAutoSelectRandomSeed
Expand All @@ -108,7 +108,7 @@ def he_uniform(output_rank=SentinelValueForInferParamInitRank, filter_rank=Senti
seed (`int`): random seed
Returns:
initializer for `:class:cntk.variables.Parameter`
initializer for :class:`cntk.variables.Parameter`
'''
if seed is None:
seed = SentinelValueForAutoSelectRandomSeed
Expand All @@ -126,7 +126,7 @@ def he_normal(output_rank=SentinelValueForInferParamInitRank, filter_rank=Sentin
seed (`int`): random seed
Returns:
initializer for `:class:cntk.variables.Parameter`
initializer for :class:`cntk.variables.Parameter`
'''
if seed is None:
seed = SentinelValueForAutoSelectRandomSeed
Expand All @@ -142,7 +142,7 @@ def bilinear(kernel_width, kernel_height):
kernel_height (`int`): kernel height
Returns:
initializer for `:class:cntk.variables.Parameter`
initializer for :class:`cntk.variables.Parameter`
'''
return cntk_py.bilinear_initializer(kernel_width, kernel_height)

Expand Down
16 changes: 8 additions & 8 deletions bindings/python/cntk/ops/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,12 +158,12 @@ def eval(self, arguments=None, device=None):
Evaluate the node using the specified ``arguments`` as input.
Args:
arguments (`dict` or `list` or `tuple`): maps variables to their
input data. The interpretation depends on the input type:
arguments: maps variables to their input data. The interpretation depends on
the input type:
* `dict`: keys are input variable or names, and values are the input data.
* `list`: elements are input data in the order their respective variables have been defined in the network.
* any other type: if node has an unique input, ``arguments`` is mapped to this input.
For nodes with more than one input, only `dict` is allowed.
In both cases, every every sample in the data will be interpreted
as a new sequence. To mark samples as continuations of the
previous sequence, specify ``arguments`` as `tuple`: the
Expand Down Expand Up @@ -209,12 +209,12 @@ def forward(self, arguments, outputs, keep_for_backward=None, device=None):
array([[[[ 1. , 0.5 , 0.25]]]], dtype=float32)
Args:
arguments (`dict` or `list` or `tuple`): maps variables to their
arguments: maps variables to their
input data. The interpretation depends on the input type:
* `dict`: keys are input variable or names, and values are the input data.
* `list`: elements are input data in the order their respective variables have been defined in the network.
* any other type: if node has an unique input, ``arguments`` is mapped to this input.
For nodes with more than one input, only `dict` is allowed.
In both cases, every every sample in the data will be interpreted
as a new sequence. To mark samples as continuations of the
previous sequence, specify ``arguments`` as ``tuple``: the
Expand Down Expand Up @@ -387,7 +387,7 @@ def restore_from_model(self, filename):
Restore the models parameters from a saved model file
Args:
filename (`str`): saved model path
filename (`str`): saved model path
Returns:
`None`: this method only has the side-effect of loading the model parameters from the file
Expand Down
2 changes: 1 addition & 1 deletion bindings/python/cntk/persist.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def save_model(root_op, filename):
Save the network of ``root_op`` in ``filename``.
Args:
root_op (`:class:cntk.functions.Function`): op of the graph to save
root_op (:class:`cntk.functions.Function`): op of the graph to save
filename (`str`): filename to store the model in
'''
cntk_py.save_as_legacy_model(root_op, filename)
Expand Down
25 changes: 12 additions & 13 deletions bindings/python/cntk/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class Trainer(cntk_py.Trainer):
Args:
model (:class:`cntk.ops.functions.Function`): root node of the function to train
loss_function (:class:`cntk.ops.functions.Function`): loss function
loss_function (:class:`cntk.ops.functions.Function`): loss function
eval_function (:class:`cntk.ops.functions.Function`): evaluation function
parameter_learners (`list`): list of learners from :mod:`cntk.learner`
'''
Expand All @@ -41,12 +41,12 @@ def train_minibatch(self, arguments, outputs=None, device=None):
Optimize model parameters using the specified 'arguments' minibatch of training samples.
Args:
arguments (`dict` or `list` or `tuple`): maps variables to their
arguments: maps variables to their
input data. The interpretation depends on the input type:
* `dict`: keys are input variable or names, and values are the input data.
* `list`: elements are input data in the order their respective variables have been defined in the network.
* `dict`: keys are input variable or names, and values are the input data.
* any other type: if node has an unique input, ``arguments`` is mapped to this input.
For nodes with more than one input, only `dict` is allowed.
In both cases, every every sample in the data will be interpreted
as a new sequence. To mark samples as continuations of the
previous sequence, specify ``arguments`` as `tuple`: the
Expand All @@ -61,7 +61,7 @@ def train_minibatch(self, arguments, outputs=None, device=None):
to be performed.
Returns:
`bool` or `tuple`:
`bool` or `tuple`:
If ``outputs`` have not been provided, the returned value is `True`
if updates have been performed, `False` if all parameter learners
indicate end of learning (through their update). Otherwise, the
Expand Down Expand Up @@ -89,16 +89,15 @@ def train_minibatch(self, arguments, outputs=None, device=None):
def test_minibatch(self, arguments, device=None):
'''
Test the model on the specified batch of samples using the evaluation
Function specified during construction of the Trainer.
of samples.
Function specified during construction of the Trainer.
Args:
arguments (`dict` or `list` or `tuple`): maps variables to their
arguments: maps variables to their
input data. The interpretation depends on the input type:
* `dict`: keys are input variable or names, and values are the input data.
* `list`: elements are input data in the order their respective variables have been defined in the network.
* `dict`: keys are input variable or names, and values are the input data.
* any other type: if node has an unique input, ``arguments`` is mapped to this input.
For nodes with more than one input, only `dict` is allowed.
In both cases, every every sample in the data will be interpreted
as a new sequence. To mark samples as continuations of the
previous sequence, specify ``arguments`` as `tuple`: the
Expand Down Expand Up @@ -149,7 +148,7 @@ def model(self):
The model that the trainer is training.
'''
return super(Trainer, self).model()

@property
@typemap
def loss_function(self):
Expand Down
31 changes: 15 additions & 16 deletions bindings/python/cntk/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,13 @@ def sanitize_precision(precision):

def cntk_device(device_id):
'''
Converts the legacy device ID as it was used in CNTK 1 to CNTK
DeviceDescriptor instance.
Converts the legacy device ID as it was used in CNTK 1 to a :class:`cntk.device.DeviceDescriptor` instance.
Args:
device_id (int): device id, -1 for CPU, 0 or higher for GPU
Returns:
CNTK DeviceDescriptor
:class:`cntk.device.DeviceDescriptor`
'''
if device_id == -1:
return cpu()
Expand Down Expand Up @@ -282,7 +281,7 @@ def get_data_type(*args):
dtypes.add(np.float64)
else:
dtypes.add(np.float32)
else:
else:
# We don't know anything so we convert everything to float32. If it
# works, we know the type.
# TODO figure out a better/faster way.
Expand Down Expand Up @@ -366,7 +365,7 @@ def sanitize_batch(var, batch, seq_starts=None, data_type=None, device=None):
use_mask = len(var.dynamic_axes) > 1

if device is None:
device = cntk_py.DeviceDescriptor.use_default_device()
device = use_default_device()

if not use_mask and seq_starts is not None:
raise ValueError('specification of individual sequence begins does not'
Expand Down Expand Up @@ -475,19 +474,18 @@ def sanitize_function(arg):
def sanitize_var_map(op_arguments, arguments, precision=None,
device=None):
'''
Sanitizes a dictionary of `Variable`s to input data such that it can be
handed off to the :meth:`cntk.ops.functions.Function.forward` method.
Sanitizes a dictionary of `Variable` s to input data such that it can be
handed off to the evaluation methods (:meth:`cntk.ops.functions.Function.forward`, :meth:`cntk.ops.functions.Function.backward`, :meth:`cntk.Trainer.train_minibatch` and
:meth:`cntk.Trainer.test_minibatch`).
Args:
op_arguments (:class:`cntk.ops.functions.Function`): arguments of the root function. In
forward pass it is typically `op.arguments`, in backward mode it is
:meth:`cntk.ops.functions.Function.forward` pass it is typically `op.arguments`, in :meth:`cntk.ops.functions.Function.backward` pass it is
`op.outputs`
arguments: maps variables to their
input data. The interpretation depends on the input type:
* `dict`: keys are input variable or names and values are the input data.
* any other type: if node has an unique input, argument is mapped to this input.
* `dict`: keys are input variable or names and values are the input data.
* any other type: if node has an unique input, ``arguments`` is mapped to this input.
For nodes with more than one input, only `dict` is allowed.
In both cases, every sample in the data will be interpreted
as a new sequence. To mark samples as continuations of the
Expand All @@ -499,7 +497,7 @@ def sanitize_var_map(op_arguments, arguments, precision=None,
:class:`cntk.io.MinibatchData` instance.
precision (`str` or `np.float32` or `np.float64`): if string it can be
one of 'float' 'float32, 'double', 'float64', or `None`
device (`DeviceDescriptor` or `None`): CNTK DeviceDescriptor
device (:class:`cntk.device.DeviceDescriptor` or `None`): CNTK DeviceDescriptor
Returns:
`dict` that maps variables to sanitized batches
Expand Down Expand Up @@ -683,6 +681,7 @@ def sanitize_dynamic_axes(axes):
def get_train_loss(trainer):
'''
Fetch the train loss from the last minibatch and copy it to the CPU in case it is on the GPU.
Args:
trainer (:class:`Trainer`): the trainer used.
Returns:
Expand All @@ -696,6 +695,7 @@ def get_train_loss(trainer):
def get_train_eval_criterion(trainer):
'''
Fetch the train evaluation criterion (e.g., classification error) from the last minibatch and copy it to the CPU in case it is on the GPU.
Args:
trainer (:class:`Trainer`): the trainer used.
Returns:
Expand Down Expand Up @@ -746,12 +746,11 @@ def eval(op, arguments=None, precision=None, device=None, backward_pass=False, e
Args:
op (:class:`Function`): operation to evaluate
arguments (`dict` or `list` or `tuple`): maps variables to their
input data. The interpretation depends on the input type:
arguments: maps variables to their input data. The
interpretation depends on the input type:
* `dict`: keys are input variable or names, and values are the input data.
* `list`: elements are input data in the order their respective variables have been defined in the network.
* any other type: if node has an unique input, ``arguments`` is mapped to this input.
For nodes with more than one input, only `dict` is allowed.
In both cases, every every sample in the data will be interpreted
as a new sequence. To mark samples as continuations of the
previous sequence, specify ``arguments`` as `tuple`: the
Expand Down

0 comments on commit 8d97413

Please sign in to comment.