Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def __init__(self,
k=constants.DEFAULT_EMBEDDING_SIZE,
eta=constants.DEFAULT_ETA,
epochs=constants.DEFAULT_EPOCH,
batches_count=constants.DEFAULT_BATCH_COUNT,
seed=constants.DEFAULT_SEED,
embedding_model_params={'negative_corruption_entities': constants.DEFAULT_CORRUPTION_ENTITIES,
'corrupt_sides': constants.DEFAULT_CORRUPT_SIDE_TRAIN},
optimizer=constants.DEFAULT_OPTIM,
optimizer_params={'lr': constants.DEFAULT_LR},
loss=constants.DEFAULT_LOSS,
loss_params={},
regularizer=constants.DEFAULT_REGULARIZER,
regularizer_params={},
initializer=constants.DEFAULT_INITIALIZER,
initializer_params={'uniform': DEFAULT_XAVIER_IS_UNIFORM},
verbose=constants.DEFAULT_VERBOSE):
"""Initialize an EmbeddingModel
Also creates a new Tensorflow session for training.
Parameters
----------
k : int
Embedding space dimensionality
eta : int
The number of negatives that must be generated at runtime during training for each positive.
epochs : int
The iterations of the training loop.
batches_count : int
The number of batches in which the training set must be split during the training loop.
seed : int
k=constants.DEFAULT_EMBEDDING_SIZE,
eta=constants.DEFAULT_ETA,
epochs=constants.DEFAULT_EPOCH,
batches_count=constants.DEFAULT_BATCH_COUNT,
seed=constants.DEFAULT_SEED,
embedding_model_params={'num_filters': 32,
'filter_sizes': [1],
'dropout': 0.1},
optimizer=constants.DEFAULT_OPTIM,
optimizer_params={'lr': constants.DEFAULT_LR},
loss=constants.DEFAULT_LOSS,
loss_params={},
regularizer=constants.DEFAULT_REGULARIZER,
regularizer_params={},
initializer=constants.DEFAULT_INITIALIZER,
initializer_params={'uniform': DEFAULT_XAVIER_IS_UNIFORM},
large_graphs=False,
verbose=constants.DEFAULT_VERBOSE):
"""Initialize an EmbeddingModel
Parameters
----------
k : int
Embedding space dimensionality.
eta : int
The number of negatives that must be generated at runtime during training for each positive.
epochs : int
The iterations of the training loop.
batches_count : int
def __init__(self,
k=DEFAULT_EMBEDDING_SIZE,
eta=DEFAULT_ETA,
epochs=DEFAULT_EPOCH,
batches_count=DEFAULT_BATCH_COUNT,
seed=DEFAULT_SEED,
embedding_model_params={'negative_corruption_entities': DEFAULT_CORRUPTION_ENTITIES,
'corrupt_sides': DEFAULT_CORRUPT_SIDE_TRAIN},
optimizer=DEFAULT_OPTIM,
optimizer_params={'lr': DEFAULT_LR},
loss=DEFAULT_LOSS,
loss_params={},
regularizer=DEFAULT_REGULARIZER,
regularizer_params={},
initializer=DEFAULT_INITIALIZER,
initializer_params={'uniform': DEFAULT_XAVIER_IS_UNIFORM},
verbose=DEFAULT_VERBOSE):
"""Initialize an EmbeddingModel
Also creates a new Tensorflow session for training.
Parameters
----------
k : int
Embedding space dimensionality
eta : int
The number of negatives that must be generated at runtime during
training for each positive.
epochs : int
The iterations of the training loop.
batches_count : int
The number of batches in which the training set must be split
k=constants.DEFAULT_EMBEDDING_SIZE,
eta=constants.DEFAULT_ETA,
epochs=constants.DEFAULT_EPOCH,
batches_count=constants.DEFAULT_BATCH_COUNT,
seed=constants.DEFAULT_SEED,
embedding_model_params={'normalize_ent_emb': constants.DEFAULT_NORMALIZE_EMBEDDINGS,
'negative_corruption_entities': constants.DEFAULT_CORRUPTION_ENTITIES,
'corrupt_sides': constants.DEFAULT_CORRUPT_SIDE_TRAIN},
optimizer=constants.DEFAULT_OPTIM,
optimizer_params={'lr': constants.DEFAULT_LR},
loss=constants.DEFAULT_LOSS,
loss_params={},
regularizer=constants.DEFAULT_REGULARIZER,
regularizer_params={},
initializer=constants.DEFAULT_INITIALIZER,
initializer_params={'uniform': DEFAULT_XAVIER_IS_UNIFORM},
verbose=constants.DEFAULT_VERBOSE):
"""Initialize an EmbeddingModel
Also creates a new Tensorflow session for training.
Parameters
----------
k : int
Embedding space dimensionality
eta : int
The number of negatives that must be generated at runtime during training for each positive.
epochs : int
The iterations of the training loop.
batches_count : int
The number of batches in which the training set must be split during the training loop.
seed : int
eta=constants.DEFAULT_ETA,
epochs=constants.DEFAULT_EPOCH,
batches_count=constants.DEFAULT_BATCH_COUNT,
seed=constants.DEFAULT_SEED,
embedding_model_params={'norm': constants.DEFAULT_NORM_TRANSE,
'normalize_ent_emb': constants.DEFAULT_NORMALIZE_EMBEDDINGS,
'negative_corruption_entities': constants.DEFAULT_CORRUPTION_ENTITIES,
'corrupt_sides': constants.DEFAULT_CORRUPT_SIDE_TRAIN},
optimizer=constants.DEFAULT_OPTIM,
optimizer_params={'lr': constants.DEFAULT_LR},
loss=constants.DEFAULT_LOSS,
loss_params={},
regularizer=constants.DEFAULT_REGULARIZER,
regularizer_params={},
initializer=constants.DEFAULT_INITIALIZER,
initializer_params={'uniform': DEFAULT_XAVIER_IS_UNIFORM},
verbose=constants.DEFAULT_VERBOSE):
"""
Initialize an EmbeddingModel.
Also creates a new Tensorflow session for training.
Parameters
----------
k : int
Embedding space dimensionality.
eta : int
The number of negatives that must be generated at runtime during training for each positive.
epochs : int
The iterations of the training loop.
batches_count : int
The number of batches in which the training set must be split during the training loop.
def __init__(self,
k=constants.DEFAULT_EMBEDDING_SIZE,
eta=constants.DEFAULT_ETA,
epochs=constants.DEFAULT_EPOCH,
batches_count=constants.DEFAULT_BATCH_COUNT,
seed=constants.DEFAULT_SEED,
embedding_model_params={'negative_corruption_entities': constants.DEFAULT_CORRUPTION_ENTITIES,
'corrupt_sides': constants.DEFAULT_CORRUPT_SIDE_TRAIN},
optimizer=constants.DEFAULT_OPTIM,
optimizer_params={'lr': constants.DEFAULT_LR},
loss=constants.DEFAULT_LOSS,
loss_params={},
regularizer=constants.DEFAULT_REGULARIZER,
regularizer_params={},
initializer=constants.DEFAULT_INITIALIZER,
initializer_params={'uniform': DEFAULT_XAVIER_IS_UNIFORM},
verbose=constants.DEFAULT_VERBOSE):
"""Initialize an EmbeddingModel
Also creates a new Tensorflow session for training.
Parameters
----------
k : int
Embedding space dimensionality
eta : int
The number of negatives that must be generated at runtime during training for each positive.
epochs : int
The iterations of the training loop.
batches_count : int
The number of batches in which the training set must be split during the training loop.
seed : int
def __init__(self,
k=constants.DEFAULT_EMBEDDING_SIZE,
eta=constants.DEFAULT_ETA,
epochs=constants.DEFAULT_EPOCH,
batches_count=constants.DEFAULT_BATCH_COUNT,
seed=constants.DEFAULT_SEED,
embedding_model_params={},
optimizer=constants.DEFAULT_OPTIM,
optimizer_params={'lr': constants.DEFAULT_LR},
loss=constants.DEFAULT_LOSS,
loss_params={},
regularizer=constants.DEFAULT_REGULARIZER,
regularizer_params={},
initializer=constants.DEFAULT_INITIALIZER,
initializer_params={'uniform': DEFAULT_XAVIER_IS_UNIFORM},
large_graphs=False,
verbose=constants.DEFAULT_VERBOSE):
"""Initialize an EmbeddingModel
Also creates a new Tensorflow session for training.
Parameters
----------
k : int
Embedding space dimensionality.
eta : int
The number of negatives that must be generated at runtime during training for each positive.
epochs : int
The iterations of the training loop.