From 5509e9aae5862b73ab725368acbe3a49f54db60c Mon Sep 17 00:00:00 2001 From: Chintan Shah Date: Mon, 7 Oct 2019 09:47:38 -0400 Subject: [PATCH] Ensured all parameters are added to the optimizer --- model/pytorch/dcrnn_supervisor.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/model/pytorch/dcrnn_supervisor.py b/model/pytorch/dcrnn_supervisor.py index 5f78ab3..cb16561 100644 --- a/model/pytorch/dcrnn_supervisor.py +++ b/model/pytorch/dcrnn_supervisor.py @@ -147,6 +147,11 @@ class DCRNNSupervisor: x, y = self._prepare_data(x, y) output = self.dcrnn_model(x, y, batches_seen) + + if batches_seen == 0: + # this is a workaround to accommodate dynamically registered parameters in DCGRUCell + optimizer = torch.optim.Adam(self.dcrnn_model.parameters(), lr=base_lr) + loss = self._compute_loss(y, output) self._logger.debug(loss.item()) @@ -165,6 +170,9 @@ class DCRNNSupervisor: self._logger.info("evaluating now!") val_loss = self.evaluate(dataset='val', batches_seen=batches_seen) + + self.dcrnn_model = self.dcrnn_model.train() + end_time = time.time() self._writer.add_scalar('training loss',