How to use the gluonts.trainer.Trainer function in gluonts

To help you get started, we’ve selected a few gluonts examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github awslabs / gluon-ts / test / trainer / test_trainer.py View on Github external
def assert_valid_param(param_name: str, param_values: List[Any]) -> None:
    try:
        for x in param_values:
            Trainer(**{param_name: x})
    except Exception as e:
        pytest.fail(f'Unexpected exception when initializing Trainer: "{e}"')
        raise e
github awslabs / gluon-ts / test / paper_examples / test_axiv_paper_examples.py View on Github external
from gluonts.dataset.repository.datasets import get_dataset
    from gluonts.model.deepar import DeepAREstimator
    from gluonts.trainer import Trainer
    from gluonts.evaluation import Evaluator
    from gluonts.evaluation.backtest import backtest_metrics

    # We use electricity in the paper but that would take too long to run in
    # the unit test
    dataset_info, train_ds, test_ds = constant_dataset()

    meta = dataset_info.metadata

    estimator = DeepAREstimator(
        freq=meta.freq,
        prediction_length=1,
        trainer=Trainer(epochs=1, batch_size=32),
    )
    predictor = estimator.train(train_ds)

    evaluator = Evaluator(quantiles=(0.1, 0.5, 0.9))
    agg_metrics, item_metrics = backtest_metrics(
        train_dataset=train_ds,
        test_dataset=test_ds,
        forecaster=predictor,
        evaluator=evaluator,
    )
github awslabs / gluon-ts / test / model / test_deepar_auxiliary_outputs.py View on Github external
def test_distribution():
    """
    Makes sure additional tensors can be accessed and have expected shapes
    """
    prediction_length = ds_info.prediction_length
    estimator = DeepAREstimator(
        freq=freq,
        prediction_length=prediction_length,
        trainer=Trainer(epochs=1, num_batches_per_epoch=1),
        distr_output=StudentTOutput(),
    )

    train_output = estimator.train_model(train_ds)

    # todo adapt loader to anomaly detection use-case
    batch_size = 2
    num_samples = 3

    training_data_loader = TrainDataLoader(
        dataset=train_ds,
        transform=train_output.transformation,
        batch_size=batch_size,
        num_batches_per_epoch=estimator.trainer.num_batches_per_epoch,
        ctx=mx.cpu(),
    )
github awslabs / gluon-ts / test / trainer / test_trainer.py View on Github external
def assert_invalid_param(
    param_name: str, param_values: List[Any], exp_msg: str
) -> None:
    for x in param_values:
        with pytest.raises(AssertionError) as excinfo:
            Trainer(**{param_name: x})
            assert exp_msg in str(excinfo.value)
github awslabs / gluon-ts / src / gluonts / model / deep_factor / _estimator.py View on Github external
def __init__(
        self,
        freq: str,
        prediction_length: int,
        num_hidden_global: int = 50,
        num_layers_global: int = 1,
        num_factors: int = 10,
        num_hidden_local: int = 5,
        num_layers_local: int = 1,
        cell_type: str = "lstm",
        trainer: Trainer = Trainer(),
        context_length: Optional[int] = None,
        num_parallel_samples: int = 100,
        cardinality: List[int] = list([1]),
        embedding_dimension: int = 10,
        distr_output: DistributionOutput = StudentTOutput(),
    ) -> None:
        super().__init__(trainer=trainer)

        assert (
            prediction_length > 0
        ), "The value of `prediction_length` should be > 0"
        assert (
            context_length is None or context_length > 0
        ), "The value of `context_length` should be > 0"
        assert num_layers_global > 0, "The value of `num_layers` should be > 0"
        assert num_hidden_global > 0, "The value of `num_hidden` should be > 0"
github awslabs / gluon-ts / src / gluonts / model / transformer / _estimator.py View on Github external
def __init__(
        self,
        freq: str,
        prediction_length: int,
        context_length: Optional[int] = None,
        trainer: Trainer = Trainer(),
        dropout_rate: float = 0.1,
        cardinality: Optional[List[int]] = None,
        embedding_dimension: int = 20,
        distr_output: DistributionOutput = StudentTOutput(),
        model_dim: int = 32,
        inner_ff_dim_scale: int = 4,
        pre_seq: str = "dn",
        post_seq: str = "drn",
        act_type: str = "softrelu",
        num_heads: int = 8,
        scaling: bool = True,
        lags_seq: Optional[List[int]] = None,
        time_features: Optional[List[TimeFeature]] = None,
        use_feat_dynamic_real: bool = False,
        use_feat_static_cat: bool = False,
        num_parallel_samples: int = 100,
github awslabs / gluon-ts / examples / anomaly_detection.py View on Github external
from gluonts.dataset.loader import TrainDataLoader
from gluonts.model.deepar import DeepAREstimator
from gluonts.support.util import get_hybrid_forward_input_names
from gluonts.trainer import Trainer
from gluonts.dataset.repository.datasets import get_dataset


if __name__ == "__main__":

    dataset = get_dataset(dataset_name="electricity")

    estimator = DeepAREstimator(
        prediction_length=dataset.metadata.prediction_length,
        freq=dataset.metadata.freq,
        trainer=Trainer(
            learning_rate=1e-3, epochs=50, num_batches_per_epoch=100
        ),
    )

    # instead of calling `train` method, we call `train_model` that returns more things including the training model
    train_output = estimator.train_model(dataset.train)

    # we construct a data_entry that contains 500 random windows
    batch_size = 500
    num_samples = 100
    training_data_loader = TrainDataLoader(
        dataset=dataset.train,
        transform=train_output.transformation,
        batch_size=batch_size,
        num_batches_per_epoch=estimator.trainer.num_batches_per_epoch,
        ctx=mx.cpu(),
github awslabs / gluon-ts / src / gluonts / model / canonical / _estimator.py View on Github external
def __init__(
        self,
        freq: str,
        context_length: int,
        prediction_length: int,
        trainer: Trainer = Trainer(),
        hidden_dim_sequence=list([50]),
        num_parallel_samples: int = 100,
        cardinality: List[int] = list([1]),
        embedding_dimension: int = 10,
        distr_output: DistributionOutput = StudentTOutput(),
    ) -> None:
        model = nn.HybridSequential()

        for layer, layer_dim in enumerate(hidden_dim_sequence):
            model.add(
                nn.Dense(
                    layer_dim,
                    flatten=False,
                    activation="relu",
                    prefix="mlp_%d_" % layer,
                )
github awslabs / gluon-ts / src / gluonts / model / seq2seq / _mq_dnn_estimator.py View on Github external
def __init__(
        self,
        prediction_length: int,
        freq: str,
        context_length: Optional[int] = None,
        # FIXME: prefix those so clients know that these are decoder params
        mlp_final_dim: int = 20,
        mlp_hidden_dimension_seq: List[int] = list(),
        quantiles: List[float] = list(
            [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]
        ),
        trainer: Trainer = Trainer(),
    ) -> None:
        encoder = HierarchicalCausalConv1DEncoder(
            dilation_seq=[1, 3, 9],
            kernel_size_seq=([3] * len([30, 30, 30])),
            channels_seq=[30, 30, 30],
            use_residual=True,
            prefix="encoder_",
        )
        super(MQCNNEstimator, self).__init__(
            encoder=encoder,
            mlp_final_dim=mlp_final_dim,
            mlp_hidden_dimension_seq=mlp_hidden_dimension_seq,
            freq=freq,
            prediction_length=prediction_length,
            trainer=trainer,
            context_length=context_length,
github awslabs / gluon-ts / examples / persist_model.py View on Github external
from gluonts.dataset.repository.datasets import get_dataset
from gluonts.evaluation import Evaluator
from gluonts.evaluation.backtest import make_evaluation_predictions
from gluonts.model.simple_feedforward import SimpleFeedForwardEstimator
from gluonts.support.util import get_download_path
from gluonts.trainer import Trainer
from gluonts.model.predictor import Predictor

if __name__ == "__main__":

    dataset = get_dataset("exchange_rate")

    estimator = SimpleFeedForwardEstimator(
        prediction_length=dataset.metadata.prediction_length,
        freq=dataset.metadata.freq,
        trainer=Trainer(epochs=5, num_batches_per_epoch=10),
    )

    predictor = estimator.train(dataset.train)

    # save the trained model in a path ~/.mxnet/gluon-ts/feedforward/
    # or $MXNET_HOME/feedforward if MXNET_HOME is defined
    model_path = get_download_path() / "feedforward"
    os.makedirs(model_path, exist_ok=True)

    predictor.serialize(model_path)

    # loads it back and evaluate predictions accuracy with the deserialized model
    predictor_deserialized = Predictor.deserialize(model_path)

    forecast_it, ts_it = make_evaluation_predictions(
        dataset.test, predictor=predictor_deserialized, num_samples=100