How to use the text2vec.embeddings.bert_embedding.BERTEmbedding function in text2vec

To help you get started, we’ve selected a few text2vec examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github shibing624 / text2vec / tests / test_embedding.py View on Github external
def setUpClass(cls):
        from text2vec.embeddings.bert_embedding import BERTEmbedding
        cls.embedding = BERTEmbedding(sequence_length=SEQUENCE_LENGTH)
github shibing624 / text2vec / text2vec / vector.py View on Github external
def load_model(self):
        if not self.model:
            if self.embedding_type == EmbType.BERT:
                from text2vec.embeddings.bert_embedding import BERTEmbedding
                self.model = BERTEmbedding(model_folder=self.bert_model_folder,
                                           layer_nums=self.bert_layer_nums,
                                           trainable=self.trainable,
                                           sequence_length=self.sequence_length,
                                           processor=self.processor)
            elif self.embedding_type == EmbType.W2V:
                from text2vec.embeddings.word_embedding import WordEmbedding
                self.model = WordEmbedding(w2v_path=self.w2v_path,
                                           w2v_kwargs=self.w2v_kwargs,
                                           sequence_length=self.sequence_length,
                                           processor=self.processor,
                                           trainable=self.trainable)
            else:
                raise ValueError('set error embedding type.')
github shibing624 / text2vec / text2vec / embeddings / bert_embedding.py View on Github external
def analyze_corpus(self,
                       x: Union[Tuple[List[List[str]], ...], List[List[str]]],
                       y: Union[List[List[Any]], List[Any]]):
        """
        Prepare embedding layer and pre-processor for labeling task

        Args:
            x:
            y:

        Returns:

        """
        if len(self.processor.token2idx) == 0:
            self._build_token2idx_from_bert()
        super(BERTEmbedding, self).analyze_corpus(x, y)