How to use the sru.sru_functional.SRUCell function in sru

To help you get started, we’ve selected a few sru examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github asappresearch / sru / sru / sru_functional.py View on Github external
self.input_size = input_size
        self.hidden_size = hidden_size
        self.output_size = hidden_size * 2 if bidirectional else hidden_size
        self.num_layers = num_layers
        self.dropout = dropout
        self.rnn_dropout = rnn_dropout
        self.projection_size = projection_size
        self.rnn_lst = nn.ModuleList()
        self.bidirectional = bidirectional
        self.use_layer_norm = layer_norm
        self.has_skip_term = has_skip_term
        self.num_directions = 2 if bidirectional else 1
        self.nn_rnn_compatible_return = nn_rnn_compatible_return

        for i in range(num_layers):
            l = SRUCell(
                self.input_size if i == 0 else self.output_size,
                self.hidden_size,
                dropout=dropout if i + 1 != num_layers else 0,
                rnn_dropout=rnn_dropout,
                bidirectional=bidirectional,
                n_proj=projection_size,
                use_tanh=use_tanh,
                #is_input_normalized=is_input_normalized or (i > 0 and self.use_layer_norm),
                layer_norm=layer_norm,
                highway_bias=highway_bias,
                has_skip_term=has_skip_term,
                rescale=rescale,
                v1=v1
            )
            self.rnn_lst.append(l)
github asappresearch / sru / sru / sru_functional.py View on Github external
def __init__(self,
                 input_size,
                 hidden_size,
                 dropout=0,
                 rnn_dropout=0,
                 bidirectional=False,
                 n_proj=0,
                 use_tanh=0,
                 #is_input_normalized=False,
                 highway_bias=0,
                 has_skip_term=True,
                 layer_norm=False,
                 rescale=True,
                 v1=False):

        super(SRUCell, self).__init__()
        self.input_size = input_size
        self.hidden_size = hidden_size  # hidden size per direction
        self.output_size = hidden_size * 2 if bidirectional else hidden_size
        self.rnn_dropout = rnn_dropout
        self.dropout = dropout
        self.bidirectional = bidirectional
        #self.is_input_normalized = is_input_normalized
        self.has_skip_term = has_skip_term
        self.highway_bias = highway_bias
        self.v1 = v1
        self.rescale = rescale
        self.activation_type = 0
        self.activation = 'none'
        if use_tanh:
            self.activation_type = 1
            self.activation = 'tanh'