diff --git a/keras/backend/cntk_backend.py b/keras/backend/cntk_backend.py index 84ae05343a6..bc202f0b04a 100644 --- a/keras/backend/cntk_backend.py +++ b/keras/backend/cntk_backend.py @@ -447,7 +447,7 @@ def random_normal(shape, mean=0.0, stddev=1.0, dtype=None, seed=None): 'Please provide fixed dimension ' 'instead of `None`.') # how to apply mean and stddev - return random_normal_variable(shape=shape, mean=mean, scale=1.0) + return random_normal_variable(shape=shape, mean=mean, scale=1.0, seed=seed) def truncated_normal(shape, mean=0.0, stddev=1.0, dtype=None, seed=None): diff --git a/keras/engine/topology.py b/keras/engine/topology.py index a06a8001967..0c45f82394f 100644 --- a/keras/engine/topology.py +++ b/keras/engine/topology.py @@ -1943,7 +1943,10 @@ def losses(self): losses += layer.get_losses_for(None) # Add any potential unconditional model-level loss. losses += self.get_losses_for(None) - return losses + + unique_tensors = list(set(x for x in losses if not isinstance(x, (float, int)))) + non_tensors = [x for x in losses if isinstance(x, (float, int))] + return unique_tensors + non_tensors @property def uses_learning_phase(self): diff --git a/tests/keras/backend/backend_test.py b/tests/keras/backend/backend_test.py index 1bc47435a4a..7604b802fb3 100644 --- a/tests/keras/backend/backend_test.py +++ b/tests/keras/backend/backend_test.py @@ -885,7 +885,7 @@ def test_random_normal(self): mean = 0. std = 1. for k in BACKENDS: - rand = k.eval(k.random_normal((300, 200), mean=mean, stddev=std)) + rand = k.eval(k.random_normal((300, 200), mean=mean, stddev=std, seed=1337)) assert rand.shape == (300, 200) assert np.abs(np.mean(rand) - mean) < 0.015 assert np.abs(np.std(rand) - std) < 0.015 diff --git a/tests/keras/regularizers_test.py b/tests/keras/regularizers_test.py index 61e6cec2eb3..aaa8402ddf1 100644 --- a/tests/keras/regularizers_test.py +++ b/tests/keras/regularizers_test.py @@ -1,10 +1,11 @@ import pytest -from keras.models import Sequential -from keras.layers import Dense +from keras.models import Sequential, Model +from keras.layers import Dense, Input, Average from keras.utils import np_utils from keras.utils import test_utils from keras import regularizers +from keras import backend as K data_dim = 5 num_classes = 2 @@ -32,6 +33,19 @@ def create_model(kernel_regularizer=None, activity_regularizer=None): return model +def create_multi_input_model_from(layer1, layer2): + input_1 = Input(shape=(data_dim,)) + input_2 = Input(shape=(data_dim,)) + out1 = layer1(input_1) + out2 = layer2(input_2) + out = Average()([out1, out2]) + model = Model([input_1, input_2], out) + model.add_loss(K.mean(out2)) + model.add_loss(1) + model.add_loss(1) + return model + + def test_kernel_regularization(): x_train, y_train = get_data() for reg in [regularizers.l1(), @@ -52,5 +66,44 @@ def test_activity_regularization(): model.train_on_batch(x_train, y_train) +def test_regularization_shared_layer(): + dense_layer = Dense(num_classes, + kernel_regularizer=regularizers.l1(), + activity_regularizer=regularizers.l1()) + + model = create_multi_input_model_from(dense_layer, dense_layer) + model.compile(loss='categorical_crossentropy', optimizer='sgd') + assert len(model.losses) == 6 + + +def test_regularization_shared_model(): + dense_layer = Dense(num_classes, + kernel_regularizer=regularizers.l1(), + activity_regularizer=regularizers.l1()) + + input_tensor = Input(shape=(data_dim,)) + dummy_model = Model(input_tensor, dense_layer(input_tensor)) + + model = create_multi_input_model_from(dummy_model, dummy_model) + model.compile(loss='categorical_crossentropy', optimizer='sgd') + assert len(model.losses) == 6 + + +def test_regularization_shared_layer_in_different_models(): + shared_dense = Dense(num_classes, + kernel_regularizer=regularizers.l1(), + activity_regularizer=regularizers.l1()) + models = [] + for _ in range(2): + input_tensor = Input(shape=(data_dim,)) + unshared_dense = Dense(num_classes, kernel_regularizer=regularizers.l1()) + out = unshared_dense(shared_dense(input_tensor)) + models.append(Model(input_tensor, out)) + + model = create_multi_input_model_from(*models) + model.compile(loss='categorical_crossentropy', optimizer='sgd') + assert len(model.losses) == 8 + + if __name__ == '__main__': pytest.main([__file__])