Skip to content

Commit

Permalink
Use mean L2 regularization loss rather than sum.
Browse files Browse the repository at this point in the history
  • Loading branch information
waleedka committed Nov 12, 2017
1 parent 31feebc commit 547f1f7
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions model.py
Original file line number Diff line number Diff line change
Expand Up @@ -1930,10 +1930,10 @@ def compile(self, learning_rate, momentum):
self.keras_model.add_loss(tf.reduce_mean(layer.output, keep_dims=True))

# Add L2 Regularization
reg_losses = [keras.regularizers.l2(self.config.WEIGHT_DECAY)(w)
for w in self.keras_model.trainable_weights]
reg_losses = [keras.regularizers.l2(self.config.WEIGHT_DECAY)(w) / tf.cast(tf.size(w), tf.float32)
for w in self.keras_model.trainable_weights]
self.keras_model.add_loss(tf.add_n(reg_losses))

# Compile
self.keras_model.compile(optimizer=optimizer, loss=[None]*len(self.keras_model.outputs))

Expand Down

0 comments on commit 547f1f7

Please sign in to comment.