From 2b349696e6845fff9903e06f24c32f122fc01e0b Mon Sep 17 00:00:00 2001 From: Francois Chollet Date: Fri, 5 Jan 2018 15:38:39 -0800 Subject: [PATCH] Add compute_output_shape methods explicitly when shape unchanged. --- keras/layers/advanced_activations.py | 17 ++++++++++++++++- keras/layers/core.py | 12 ++++++++++++ keras/layers/noise.py | 15 ++++++++++++--- keras/layers/normalization.py | 3 +++ 4 files changed, 43 insertions(+), 4 deletions(-) diff --git a/keras/layers/advanced_activations.py b/keras/layers/advanced_activations.py index 53b05253b31..38163df1869 100644 --- a/keras/layers/advanced_activations.py +++ b/keras/layers/advanced_activations.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -"""Advanced activation layers. +"""Layers that act as activation functions. """ from __future__ import absolute_import from __future__ import division @@ -50,6 +50,9 @@ def get_config(self): base_config = super(LeakyReLU, self).get_config() return dict(list(base_config.items()) + list(config.items())) + def compute_output_shape(self, input_shape): + return input_shape + class PReLU(Layer): """Parametric Rectified Linear Unit. @@ -142,6 +145,9 @@ def get_config(self): base_config = super(PReLU, self).get_config() return dict(list(base_config.items()) + list(config.items())) + def compute_output_shape(self, input_shape): + return input_shape + class ELU(Layer): """Exponential Linear Unit. @@ -178,6 +184,9 @@ def get_config(self): base_config = super(ELU, self).get_config() return dict(list(base_config.items()) + list(config.items())) + def compute_output_shape(self, input_shape): + return input_shape + class ThresholdedReLU(Layer): """Thresholded Rectified Linear Unit. @@ -214,6 +223,9 @@ def get_config(self): base_config = super(ThresholdedReLU, self).get_config() return dict(list(base_config.items()) + list(config.items())) + def compute_output_shape(self, input_shape): + return input_shape + class Softmax(Layer): """Softmax activation function. @@ -242,3 +254,6 @@ def get_config(self): config = {'axis': self.axis} base_config = super(Softmax, self).get_config() return dict(list(base_config.items()) + list(config.items())) + + def compute_output_shape(self, input_shape): + return input_shape diff --git a/keras/layers/core.py b/keras/layers/core.py index 3ea47fe1056..4af435449ee 100644 --- a/keras/layers/core.py +++ b/keras/layers/core.py @@ -71,6 +71,9 @@ def get_config(self): base_config = super(Masking, self).get_config() return dict(list(base_config.items()) + list(config.items())) + def compute_output_shape(self, input_shape): + return input_shape + class Dropout(Layer): """Applies Dropout to the input. @@ -127,6 +130,9 @@ def get_config(self): base_config = super(Dropout, self).get_config() return dict(list(base_config.items()) + list(config.items())) + def compute_output_shape(self, input_shape): + return input_shape + class SpatialDropout1D(Dropout): """Spatial 1D version of Dropout. @@ -301,6 +307,9 @@ def get_config(self): base_config = super(Activation, self).get_config() return dict(list(base_config.items()) + list(config.items())) + def compute_output_shape(self, input_shape): + return input_shape + class Reshape(Layer): """Reshapes an output to a certain shape. @@ -902,3 +911,6 @@ def get_config(self): 'l2': self.l2} base_config = super(ActivityRegularization, self).get_config() return dict(list(base_config.items()) + list(config.items())) + + def compute_output_shape(self, input_shape): + return input_shape diff --git a/keras/layers/noise.py b/keras/layers/noise.py index 193d374bd8e..ab8cfadab3a 100644 --- a/keras/layers/noise.py +++ b/keras/layers/noise.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -"""Noise regularization layers. +"""Layers that operate regularization via the addition of noise. """ from __future__ import absolute_import from __future__ import division @@ -9,7 +9,6 @@ from .. import backend as K import numpy as np from ..legacy import interfaces -from ..engine import InputSpec class GaussianNoise(Layer): @@ -52,6 +51,9 @@ def get_config(self): base_config = super(GaussianNoise, self).get_config() return dict(list(base_config.items()) + list(config.items())) + def compute_output_shape(self, input_shape): + return input_shape + class GaussianDropout(Layer): """Apply multiplicative 1-centered Gaussian noise. @@ -96,6 +98,9 @@ def get_config(self): base_config = super(GaussianDropout, self).get_config() return dict(list(base_config.items()) + list(config.items())) + def compute_output_shape(self, input_shape): + return input_shape + class AlphaDropout(Layer): """Applies Alpha Dropout to the input. @@ -142,7 +147,8 @@ def dropped_inputs(inputs=inputs, rate=self.rate, seed=self.seed): scale = 1.0507009873554804934193349852946 alpha_p = -alpha * scale - kept_idx = K.greater_equal(K.random_uniform(noise_shape, seed=seed), rate) + kept_idx = K.greater_equal(K.random_uniform(noise_shape, + seed=seed), rate) kept_idx = K.cast(kept_idx, K.floatx()) # Get affine transformation params @@ -162,3 +168,6 @@ def get_config(self): config = {'rate': self.rate} base_config = super(AlphaDropout, self).get_config() return dict(list(base_config.items()) + list(config.items())) + + def compute_output_shape(self, input_shape): + return input_shape diff --git a/keras/layers/normalization.py b/keras/layers/normalization.py index 692ef0c510f..61e14aef3b1 100644 --- a/keras/layers/normalization.py +++ b/keras/layers/normalization.py @@ -211,3 +211,6 @@ def get_config(self): } base_config = super(BatchNormalization, self).get_config() return dict(list(base_config.items()) + list(config.items())) + + def compute_output_shape(self, input_shape): + return input_shape