How to use the emmental.learner.EmmentalLearner function in emmental

To help you get started, we’ve selected a few emmental examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github SenWu / emmental / tests / optimizers / test_adadelta_optimizer.py View on Github external
def test_adadelta_optimizer(caplog):
    """Unit test of Adadelta optimizer."""
    caplog.set_level(logging.INFO)

    optimizer = "adadelta"
    dirpath = "temp_test_optimizer"
    model = nn.Linear(1, 1)
    emmental_learner = EmmentalLearner()

    Meta.reset()
    emmental.init(dirpath)

    # Test default Adadelta setting
    config = {"learner_config": {"optimizer_config": {"optimizer": optimizer}}}
    emmental.Meta.update_config(config)
    emmental_learner._set_optimizer(model)

    assert isequal(
        emmental_learner.optimizer.defaults,
        {"lr": 0.001, "rho": 0.9, "eps": 1e-06, "weight_decay": 0},
    )

    # Test new Adadelta setting
    config = {
github SenWu / emmental / tests / optimizers / test_rms_prop_optimizer.py View on Github external
def test_rms_prop_optimizer(caplog):
    """Unit test of RMSprop optimizer."""
    caplog.set_level(logging.INFO)

    optimizer = "rms_prop"
    dirpath = "temp_test_optimizer"
    model = nn.Linear(1, 1)
    emmental_learner = EmmentalLearner()

    Meta.reset()
    emmental.init(dirpath)

    # Test default RMSprop setting
    config = {"learner_config": {"optimizer_config": {"optimizer": optimizer}}}
    emmental.Meta.update_config(config)
    emmental_learner._set_optimizer(model)

    assert emmental_learner.optimizer.defaults == {
        "lr": 0.001,
        "alpha": 0.99,
        "eps": 1e-08,
        "momentum": 0,
        "centered": False,
        "weight_decay": 0,
github SenWu / emmental / tests / optimizers / test_asgd_optimizer.py View on Github external
def test_asgd_optimizer(caplog):
    """Unit test of ASGD optimizer."""
    caplog.set_level(logging.INFO)

    optimizer = "asgd"
    dirpath = "temp_test_optimizer"
    model = nn.Linear(1, 1)
    emmental_learner = EmmentalLearner()

    Meta.reset()
    emmental.init(dirpath)

    # Test default ASGD setting
    config = {"learner_config": {"optimizer_config": {"optimizer": optimizer}}}
    emmental.Meta.update_config(config)
    emmental_learner._set_optimizer(model)

    assert isequal(
        emmental_learner.optimizer.defaults,
        {
            "lr": 0.001,
            "lambd": 0.0001,
            "alpha": 0.75,
            "t0": 1_000_000.0,
github SenWu / emmental / tests / lr_schedulers / test_plateau_scheduler.py View on Github external
def test_plateau_scheduler(caplog):
    """Unit test of plateau scheduler."""
    caplog.set_level(logging.INFO)

    lr_scheduler = "plateau"
    dirpath = "temp_test_scheduler"
    model = nn.Linear(1, 1)
    emmental_learner = EmmentalLearner()

    Meta.reset()
    emmental.init(dirpath)

    config = {
        "learner_config": {
            "n_epochs": 4,
            "optimizer_config": {"optimizer": "sgd", "lr": 10},
            "lr_scheduler_config": {
                "lr_scheduler": lr_scheduler,
                "plateau_config": {
                    "metric": "model/train/all/loss",
                    "mode": "min",
                    "factor": 0.1,
                    "patience": 1,
                    "threshold": 0.0001,
github SenWu / emmental / tests / optimizers / test_adam_optimizer.py View on Github external
def test_adam_optimizer(caplog):
    """Unit test of Adam optimizer."""
    caplog.set_level(logging.INFO)

    optimizer = "adam"
    dirpath = "temp_test_optimizer"
    model = nn.Linear(1, 1)
    emmental_learner = EmmentalLearner()

    Meta.reset()
    emmental.init(dirpath)

    # Test default Adam setting
    config = {"learner_config": {"optimizer_config": {"optimizer": optimizer}}}
    emmental.Meta.update_config(config)
    emmental_learner._set_optimizer(model)

    assert emmental_learner.optimizer.defaults == {
        "lr": 0.001,
        "betas": (0.9, 0.999),
        "eps": 1e-08,
        "amsgrad": False,
        "weight_decay": 0,
    }
github SenWu / emmental / tests / optimizers / test_sgd_optimizer.py View on Github external
def test_sgd_optimizer(caplog):
    """Unit test of SGD optimizer."""
    caplog.set_level(logging.INFO)

    optimizer = "sgd"
    dirpath = "temp_test_optimizer"
    model = nn.Linear(1, 1)
    emmental_learner = EmmentalLearner()

    Meta.reset()
    emmental.init(dirpath)

    # Test default SGD setting
    config = {"learner_config": {"optimizer_config": {"optimizer": optimizer}}}
    emmental.Meta.update_config(config)
    emmental_learner._set_optimizer(model)

    assert emmental_learner.optimizer.defaults == {
        "lr": 0.001,
        "momentum": 0,
        "dampening": 0,
        "nesterov": False,
        "weight_decay": 0.0,
    }
github SenWu / emmental / tests / lr_schedulers / test_exponential_scheduler.py View on Github external
def test_exponential_scheduler(caplog):
    """Unit test of exponential scheduler."""
    caplog.set_level(logging.INFO)

    lr_scheduler = "exponential"
    dirpath = "temp_test_scheduler"
    model = nn.Linear(1, 1)
    emmental_learner = EmmentalLearner()

    Meta.reset()
    emmental.init(dirpath)

    # Test step per batch
    config = {
        "learner_config": {
            "n_epochs": 4,
            "optimizer_config": {"optimizer": "sgd", "lr": 10},
            "lr_scheduler_config": {
                "lr_scheduler": lr_scheduler,
                "exponential_config": {"gamma": 0.1},
            },
        }
    }
    emmental.Meta.update_config(config)
github SenWu / emmental / tests / optimizers / test_bert_adam_optimizer.py View on Github external
def test_bert_adam_optimizer(caplog):
    """Unit test of BertAdam optimizer."""
    caplog.set_level(logging.INFO)

    optimizer = "bert_adam"
    dirpath = "temp_test_optimizer"
    model = nn.Linear(1, 1)
    emmental_learner = EmmentalLearner()

    Meta.reset()
    emmental.init(dirpath)

    # Test default BertAdam setting
    config = {"learner_config": {"optimizer_config": {"optimizer": optimizer}}}
    emmental.Meta.update_config(config)
    emmental_learner._set_optimizer(model)

    assert emmental_learner.optimizer.defaults == {
        "lr": 0.001,
        "betas": (0.9, 0.999),
        "eps": 1e-08,
        "weight_decay": 0.0,
    }
github SenWu / emmental / tests / lr_schedulers / test_linear_scheduler.py View on Github external
def test_linear_scheduler(caplog):
    """Unit test of linear scheduler."""
    caplog.set_level(logging.INFO)

    lr_scheduler = "linear"
    dirpath = "temp_test_scheduler"
    model = nn.Linear(1, 1)
    emmental_learner = EmmentalLearner()

    Meta.reset()
    emmental.init(dirpath)

    # Test per batch
    config = {
        "learner_config": {
            "n_epochs": 4,
            "optimizer_config": {"optimizer": "sgd", "lr": 10},
            "lr_scheduler_config": {"lr_scheduler": lr_scheduler},
        }
    }
    emmental.Meta.update_config(config)
    emmental_learner.n_batches_per_epoch = 1
    emmental_learner._set_optimizer(model)
    emmental_learner._set_lr_scheduler(model)
github SenWu / emmental / tests / lr_schedulers / test_cyclic_scheduler.py View on Github external
def test_cyclic_scheduler(caplog):
    """Unit test of cyclic scheduler."""
    caplog.set_level(logging.INFO)

    lr_scheduler = "cyclic"
    dirpath = "temp_test_scheduler"
    model = nn.Linear(1, 1)
    emmental_learner = EmmentalLearner()

    Meta.reset()
    emmental.init(dirpath)

    config = {
        "learner_config": {
            "n_epochs": 4,
            "optimizer_config": {"optimizer": "sgd", "lr": 10},
            "lr_scheduler_config": {
                "lr_scheduler": lr_scheduler,
                "cyclic_config": {
                    "base_lr": 10,
                    "base_momentum": 0.8,
                    "cycle_momentum": True,
                    "gamma": 1.0,
                    "last_epoch": -1,