How to use the dnn.pytorch.layer.CNN_layer function in dnn

To help you get started, we’ve selected a few dnn examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github NUSTM / pytorch-dnnnlp / pytorch / contrib.py View on Github external
def __init__(self, emb_matrix, args, n_time):
        nn.Module.__init__(self)
        base.base.__init__(self, args)

        self.n_time = n_time
        self.bi_direction_num = 2 if self.bi_direction else 1
        out_n_hidden = self.n_hidden * self.bi_direction_num
        self.drop_out = nn.Dropout(self.drop_prob)
        self.embedding_layer(emb_matrix)

        self.extractors = nn.ModuleList()
        self.attentions = nn.ModuleList()
        self.predictors = nn.ModuleList()
        for _ in range(n_time):
            self.extractors.append(
                nn.ModuleList([layer.CNN_layer(self.emb_dim, 1, self.n_hidden, kw) for kw in range(1, 3)])
            )  # index 0 -> (nt-1)
            self.attentions.append(layer.self_attention_layer(out_n_hidden))
            self.predictors.append(layer.softmax_layer(out_n_hidden, self.n_class))  # index 0 -> (nt-1)
        self.connections = nn.ModuleList()
        self.connections.append(None)
        for _ in range(n_time - 1):
            self.connections.append(
                nn.Sequential(
                    nn.Linear(2 * out_n_hidden, out_n_hidden, bias=False),
                    nn.Sigmoid()
                )
github NUSTM / pytorch-dnnnlp / pytorch / model.py View on Github external
def init_weights(self):
        for m in self.modules():
            if isinstance(m, layer.CNN_layer):
                m.init_weights()
            if isinstance(m, layer.softmax_layer):
                m.init_weights()
github NUSTM / pytorch-dnnnlp / pytorch / model.py View on Github external
def __init__(self, emb_matrix, args, kernel_widths):
        """
        Initilize the model data and layer
        * emb_matrix [np.array]: word embedding matrix
        * args [dict]: all model arguments
        * kernel_widths [list]: list of kernel widths for cnn kernel
        """
        nn.Module.__init__(self)
        base.base.__init__(self, args)

        self.emb_mat = layer.embedding_layer(emb_matrix, self.emb_type)
        self.drop_out = nn.Dropout(self.drop_prob)
        self.cnn = nn.ModuleList()
        for kw in kernel_widths:
            self.cnn.append(layer.CNN_layer(self.emb_dim, 1, self.n_hidden, kw))
        self.predict = layer.softmax_layer(self.n_hidden * len(kernel_widths), self.n_class)