updated eps value

This commit is contained in:
Chintan Shah 2019-10-08 02:44:13 -04:00
parent 02fb2430f0
commit 46b552e075
1 changed files with 3 additions and 3 deletions

View File

@ -124,11 +124,11 @@ class DCRNNSupervisor:
def _train(self, base_lr,
steps, patience=50, epochs=100, lr_decay_ratio=0.1, log_every=1, save_model=1,
test_every_n_epochs=10, **kwargs):
test_every_n_epochs=10, epsilon=1e-8, **kwargs):
# steps is used in learning rate - will see if need to use it?
min_val_loss = float('inf')
wait = 0
optimizer = torch.optim.Adam(self.dcrnn_model.parameters(), lr=base_lr)
optimizer = torch.optim.Adam(self.dcrnn_model.parameters(), lr=base_lr, eps=epsilon)
lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=steps,
gamma=lr_decay_ratio)
@ -159,7 +159,7 @@ class DCRNNSupervisor:
if batches_seen == 0:
# this is a workaround to accommodate dynamically registered parameters in DCGRUCell
optimizer = torch.optim.Adam(self.dcrnn_model.parameters(), lr=base_lr)
optimizer = torch.optim.Adam(self.dcrnn_model.parameters(), lr=base_lr, eps=epsilon)
loss = self._compute_loss(y, output)