How to use the nlp.layers.attention.Attention function in nlp

To help you get started, we’ve selected a few nlp examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github msgi / nlp-journey / nlp / classfication / han_classifier.py View on Github external
def __build_model(self):
        input_word = Input(shape=(self.max_len_word,))
        x_word = Embedding(self.max_features, self.embedding_dims, input_length=self.max_len_word)(input_word)
        x_word = Bidirectional(CuDNNLSTM(128, return_sequences=True))(x_word)
        x_word = Attention()(x_word)
        model_word = Model(input_word, x_word)

        # Sentence part
        input = Input(shape=(self.max_len_sentence, self.max_len_word))
        x_sentence = TimeDistributed(model_word)(input)
        x_sentence = Bidirectional(CuDNNLSTM(128, return_sequences=True))(x_sentence)
        x_sentence = Attention()(x_sentence)

        output = Dense(self.class_num, activation=self.last_activation)(x_sentence)
        model = Model(inputs=input, outputs=output)
        model.compile('adam', 'binary_crossentropy', metrics=['accuracy'])
        return model
github msgi / nlp-journey / nlp / classfication / bilstm_att_classifier.py View on Github external
def __build_model(self):
        input = Input(shape=(self.maxlen,))
        output = Embedding(len(self.embedding_matrix),
                           self.embed_size,
                           weights=[self.embedding_matrix],
                           trainable=False)(input)
        output = Bidirectional(LSTM(150, return_sequences=True, dropout=0.25, recurrent_dropout=0.25))(output)
        output = Attention()(output)
        output = Dense(128, activation="relu")(output)
        output = Dropout(0.25)(output)
        output = Dense(1, activation="sigmoid")(output)
        model = Model(inputs=input, outputs=output)
        model.compile(loss='binary_crossentropy',
                      optimizer='adam',
                      metrics=['accuracy'])
        return model
github msgi / nlp-journey / nlp / classfication / dl / rnn_attention_classifier.py View on Github external
def build_model(self):
        inputs = Input(shape=(self.maxlen,))
        output = Embedding(len(self.embeddings),
                           300,
                           weights=[self.embeddings],
                           trainable=False)(inputs)
        output = Bidirectional(LSTM(150, return_sequences=True, dropout=0.25, recurrent_dropout=0.25))(output)
        output = Attention()(output)
        output = Dense(128, activation="relu")(output)
        output = Dropout(0.25)(output)
        output = Dense(1, activation="sigmoid")(output)
        model = Model(inputs=inputs, outputs=output)
        model.compile(loss='binary_crossentropy',
                      optimizer='adam',
                      metrics=['accuracy'])
        return model
github msgi / nlp-journey / nlp / classfication / han_classifier.py View on Github external
def __build_model(self):
        input_word = Input(shape=(self.max_len_word,))
        x_word = Embedding(self.max_features, self.embedding_dims, input_length=self.max_len_word)(input_word)
        x_word = Bidirectional(CuDNNLSTM(128, return_sequences=True))(x_word)
        x_word = Attention()(x_word)
        model_word = Model(input_word, x_word)

        # Sentence part
        input = Input(shape=(self.max_len_sentence, self.max_len_word))
        x_sentence = TimeDistributed(model_word)(input)
        x_sentence = Bidirectional(CuDNNLSTM(128, return_sequences=True))(x_sentence)
        x_sentence = Attention()(x_sentence)

        output = Dense(self.class_num, activation=self.last_activation)(x_sentence)
        model = Model(inputs=input, outputs=output)
        model.compile('adam', 'binary_crossentropy', metrics=['accuracy'])
        return model