Skip to content

Commit

Permalink
Add verbose flag to adam optimizer
Browse files Browse the repository at this point in the history
  • Loading branch information
ianwilliamson committed Jan 22, 2020
1 parent e6ceacd commit 139506f
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions ceviche/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import time
from autograd.numpy.numpy_boxes import ArrayBox

def adam_optimize(objective, params, jac, step_size=1e-2, Nsteps=100, bounds=None, direction='min', beta1=0.9, beta2=0.999, callback=None):
def adam_optimize(objective, params, jac, step_size=1e-2, Nsteps=100, bounds=None, direction='min', beta1=0.9, beta2=0.999, callback=None, verbose=True):
"""Performs Nsteps steps of ADAM minimization of function `objective` with gradient `jac`.
The `bounds` are set abruptly by rejecting an update step out of bounds."""
of_list = []
Expand All @@ -24,7 +24,8 @@ def adam_optimize(objective, params, jac, step_size=1e-2, Nsteps=100, bounds=Non

of_list.append(of._value if type(of) is ArrayBox else of)

print("Epoch: %3d/%3d | Duration: %.2f secs | Value: %5e" %(iteration+1, Nsteps, t_elapsed, of_list[-1]))
if verbose:
print("Epoch: %3d/%3d | Duration: %.2f secs | Value: %5e" %(iteration+1, Nsteps, t_elapsed, of_list[-1]))

if iteration == 0:
mopt = np.zeros(grad.shape)
Expand Down

0 comments on commit 139506f

Please sign in to comment.