Skip to content

Commit

Permalink
Update relu to use Theano's implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
matsuyamax committed Sep 26, 2015
1 parent 200948c commit 7a2e8ce
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 10 deletions.
2 changes: 1 addition & 1 deletion keras/activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def softplus(x):


def relu(x):
return (x + abs(x)) / 2.0
return T.nnet.relu(X)


def tanh(x):
Expand Down
20 changes: 11 additions & 9 deletions keras/layers/advanced_activations.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def __init__(self, alpha=0.3):

def get_output(self, train):
X = self.get_input(train)
return ((X + abs(X)) / 2.0) + self.alpha * ((X - abs(X)) / 2.0)
return T.nnet.relu(X, self.alpha)

def get_config(self):
return {"name": self.__class__.__name__,
Expand All @@ -37,8 +37,8 @@ def __init__(self, input_shape, init='zero', weights=None):

def get_output(self, train):
X = self.get_input(train)
pos = ((X + abs(X)) / 2.0)
neg = self.alphas * ((X - abs(X)) / 2.0)
pos = T.nnet.relu(X)
neg = self.alphas * (X - abs(X)) * 0.5
return pos + neg

def get_config(self):
Expand Down Expand Up @@ -78,6 +78,7 @@ def get_config(self):
"alpha_init": self.alpha_init,
"beta_init": self.beta_init}


class ThresholdedLinear(MaskedLayer):
'''
Thresholded Linear Activation
Expand All @@ -89,14 +90,15 @@ class ThresholdedLinear(MaskedLayer):
def __init__(self, theta=1.0):
super(ThresholdedLinear, self).__init__()
self.theta = theta

def get_output(self, train):
X = self.get_input(train)
return T.switch( abs(X) < self.theta, 0, X )
return T.switch(abs(X) < self.theta, 0, X)

def get_config(self):
return {"name": self.__class__.__name__,
"theta": self.theta}
"theta": self.theta}


class ThresholdedReLu(MaskedLayer):
'''
Expand All @@ -109,11 +111,11 @@ class ThresholdedReLu(MaskedLayer):
def __init__(self, theta=1.0):
super(ThresholdedReLu, self).__init__()
self.theta = theta

def get_output(self, train):
X = self.get_input(train)
return T.switch( X > self.theta, X, 0 )
return T.switch(X > self.theta, X, 0)

def get_config(self):
return {"name": self.__class__.__name__,
"theta": self.theta}
"theta": self.theta}

0 comments on commit 7a2e8ce

Please sign in to comment.