Skip to content

Commit

Permalink
Update NN
Browse files Browse the repository at this point in the history
  • Loading branch information
carefree0910 committed Aug 11, 2017
1 parent 749a431 commit 46e618f
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 28 deletions.
12 changes: 6 additions & 6 deletions NN/Basic/Layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -570,14 +570,14 @@ def _derivative(self, y, *args):
# Special Layer

class Dropout(SubLayer):
def __init__(self, parent, shape, prob=0.5):
if prob < 0 or prob >= 1:
def __init__(self, parent, shape, keep_prob=0.5):
if keep_prob < 0 or keep_prob >= 1:
raise BuildLayerError("Probability of Dropout should be a positive float smaller than 1")
SubLayer.__init__(self, parent, shape)
self._mask = None
self._prob = prob
self._prob_inv = 1 / (1 - prob)
self.description = "(Drop prob: {})".format(prob)
self._prob = keep_prob
self._prob_inv = 1 / keep_prob
self.description = "(Keep prob: {})".format(keep_prob)

def get_params(self):
return self._prob,
Expand All @@ -586,7 +586,7 @@ def _activate(self, x, predict):
if not predict:
# noinspection PyTypeChecker
self._mask = np.random.binomial(
[np.ones(x.shape)], 1 - self._prob
[np.ones(x.shape)], self._prob
)[0].astype(np.float32) * self._prob_inv
return x * self._mask
return x
Expand Down
6 changes: 3 additions & 3 deletions NN/PyTorch/Auto/Layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,16 +182,16 @@ def _activate(self, x, predict):
class Dropout(SubLayer):
def __init__(self, parent, shape, prob=0.5):
if prob < 0 or prob >= 1:
raise BuildLayerError("Probability of Dropout should be a positive float smaller than 1")
raise BuildLayerError("Keep probability of Dropout should be a positive float smaller than 1")
SubLayer.__init__(self, parent, shape)
self._prob = prob
self.description = "(Drop prob: {})".format(prob)
self.description = "(Keep prob: {})".format(prob)

def get_params(self):
return self._prob,

def _activate(self, x, predict):
return F.dropout(x, self._prob, not predict)
return F.dropout(x, 1 - self._prob, not predict)


class Normalize(SubLayer):
Expand Down
16 changes: 7 additions & 9 deletions NN/PyTorch/Basic/Layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,22 +199,20 @@ def _derivative(self, y, delta=None):
# Special Layer

class Dropout(SubLayer):
def __init__(self, parent, shape, prob=0.5):
if prob < 0 or prob >= 1:
raise BuildLayerError("Probability of Dropout should be a positive float smaller than 1")
def __init__(self, parent, shape, keep_prob=0.5):
if keep_prob < 0 or keep_prob >= 1:
raise BuildLayerError("Keep probability of Dropout should be a positive float smaller than 1")
SubLayer.__init__(self, parent, shape)
self._prob = prob
self._prob_inv = 1 / (1 - prob)
self.description = "(Drop prob: {})".format(prob)
self._prob = keep_prob
self._prob_inv = 1 / keep_prob
self.description = "(Keep prob: {})".format(keep_prob)

def get_params(self):
return self._prob,

def _activate(self, x, predict):
if not predict:
return x.mm(torch.diag(
(torch.rand(x.size()[1]) >= self._prob).float() * self._prob_inv
))
return x * (torch.rand(x.size()) < self._prob).float() * self._prob_inv
return x

def _derivative(self, y, delta=None):
Expand Down
15 changes: 7 additions & 8 deletions NN/TF/Layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,20 +350,19 @@ def _activate(self, x, *args):
# Special Layers

class Dropout(SubLayer):
def __init__(self, parent, shape, drop_prob=0.5, **kwargs):
if drop_prob < 0 or drop_prob >= 1:
raise BuildLayerError("(Dropout) Probability of Dropout should be a positive float smaller than 1")
def __init__(self, parent, shape, keep_prob=0.5, **kwargs):
if keep_prob < 0 or keep_prob >= 1:
raise BuildLayerError("(Dropout) Keep probability of Dropout should be a positive float smaller than 1")
SubLayer.__init__(self, parent, shape, **kwargs)
self._drop_prob = drop_prob
self._prob = 1 - tf.constant(self._drop_prob, dtype=tf.float32)
self.description = "(Drop prob: {})".format(drop_prob)
self._keep_prob = keep_prob
self.description = "(Keep prob: {})".format(keep_prob)

def get_params(self):
return self._drop_prob,
return self._keep_prob,

def _activate(self, x, predict):
if not predict:
return tf.nn.dropout(x, self._prob)
return tf.nn.dropout(x, self._keep_prob)
return x


Expand Down
6 changes: 4 additions & 2 deletions NN/TF/Networks.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,8 @@ def name(self):

@NNTiming.timeit(level=4)
def _get_w(self, shape):
if self._w_stds[-1] is None:
self._w_stds[-1] = sqrt(2 / sum(shape))
initial = tf.truncated_normal(shape, stddev=self._w_stds[-1])
return tf.Variable(initial, name="w")

Expand Down Expand Up @@ -223,8 +225,8 @@ def add(self, layer, *args, **kwargs):
kwargs["apply_bias"] = kwargs.get("apply_bias", True)
kwargs["position"] = kwargs.get("position", len(self._layers) + 1)

self._w_stds.append(Util.get_and_pop(kwargs, "std", 0.1))
self._b_inits.append(Util.get_and_pop(kwargs, "init", 0.1))
self._w_stds.append(Util.get_and_pop(kwargs, "w_std", None))
self._b_inits.append(Util.get_and_pop(kwargs, "b_init", 0.1))
if Util.get_and_pop(kwargs, "pop_last_init", False):
self._w_stds.pop()
self._b_inits.pop()
Expand Down

0 comments on commit 46e618f

Please sign in to comment.