Skip to content

Commit

Permalink
adams opt
Browse files Browse the repository at this point in the history
  • Loading branch information
Ricardicus committed Oct 20, 2017
1 parent e35892b commit 9eebece
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions python implementation/recurrent_neural_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,9 +260,9 @@ def adam_update(self, M, R, grad, t, beta1=.9, beta2=.999):

def gradient_update(self, grads, t, M, R):
# grads must have the keys and values like those returned from backward_propagate
# self.adam_update(M, R, grads, t)
self.adam_update(M, R, grads, t)

self.gradient_descent(grads, t)
# self.gradient_descent(grads, t)

def print_max_min_parameter(self, key):

Expand Down

0 comments on commit 9eebece

Please sign in to comment.