From a1c9af2bad21d49ee5cc258308bafd3e04dae6cb Mon Sep 17 00:00:00 2001 From: Chintan Shah Date: Mon, 30 Sep 2019 21:58:55 -0400 Subject: [PATCH] Setup curriculum learning framework --- model/pytorch/dcrnn_model.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/model/pytorch/dcrnn_model.py b/model/pytorch/dcrnn_model.py index 6b8b489..5612b1d 100644 --- a/model/pytorch/dcrnn_model.py +++ b/model/pytorch/dcrnn_model.py @@ -1,3 +1,4 @@ +import numpy as np import torch import torch.nn as nn from abc import ABC, abstractmethod @@ -122,6 +123,20 @@ class DecoderModel(nn.Module, DCRNNModel): bias=True) for _ in range(self.num_rnn_layers - 1)] + def t_step_forward_pass(self, hidden_state, inputs, output, t): + cell_input = inputs[:, t, :] # (batch_size, input_size) + + if self.is_training: + if t > 0 and self.use_curriculum_learning: + c = np.random.uniform(0, 1) + if c >= self._compute_sampling_threshold(): #todo + cell_input = output[ + t - 1] # todo: this won't work because the linear layer is applied after forward_impl + + cell_output, hidden_state = self._forward_cell(cell_input, hidden_state) + output[t] = cell_output + return hidden_state + def forward(self, inputs, hidden_state=None): """ Decoder forward pass.