How to use the poutyne.framework.callbacks.EarlyStopping function in Poutyne

To help you get started, we’ve selected a few Poutyne examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github GRAAL-Research / poutyne / tests / framework / callbacks / test_earlystopping.py View on Github external
def test_early_stopping_patience_of_1(self):
        earlystopper = EarlyStopping(monitor='val_loss', min_delta=0, patience=1, verbose=False)

        val_losses = [8, 4, 5, 2]
        early_stop_epoch = 3
        self._test_early_stopping(earlystopper, val_losses, early_stop_epoch)
github GRAAL-Research / poutyne / tests / framework / callbacks / test_earlystopping.py View on Github external
def test_early_stopping_with_delta(self):
        earlystopper = EarlyStopping(monitor='val_loss', min_delta=3, patience=2, verbose=False)

        val_losses = [8, 4, 5, 2, 2]
        early_stop_epoch = 4
        self._test_early_stopping(earlystopper, val_losses, early_stop_epoch)
github GRAAL-Research / poutyne / tests / framework / callbacks / test_earlystopping.py View on Github external
def test_early_stopping_with_max(self):
        earlystopper = EarlyStopping(monitor='val_loss', mode='max', min_delta=0, patience=2, verbose=False)

        val_losses = [2, 8, 4, 5, 2]
        early_stop_epoch = 4
        self._test_early_stopping(earlystopper, val_losses, early_stop_epoch)
github GRAAL-Research / poutyne / tests / framework / callbacks / test_earlystopping.py View on Github external
def test_integration(self):
        train_gen = some_data_generator(20)
        valid_gen = some_data_generator(20)
        earlystopper = EarlyStopping(monitor='val_loss', min_delta=0, patience=2, verbose=False)
        self.model.fit_generator(train_gen, valid_gen, epochs=10, steps_per_epoch=5, callbacks=[earlystopper])
github FrancescoSaverioZuppichini / PyTorch-Deep-Learning-Template / main.py View on Github external
# define our comet experiment
experiment = Experiment(api_key="YOU_KEY",
                        project_name="dl-pytorch-template", workspace="francescosaveriozuppichini")
experiment.log_parameters(params)
# create our special resnet18
cnn = resnet18(2).to(device)
# print the model summary to show useful information
logging.info(summary(cnn, (3, 224, 244)))
# define custom optimizer and instantiace the trainer `Model`
optimizer = optim.Adam(cnn.parameters(), lr=params['lr'])
model = Model(cnn, optimizer, "cross_entropy", batch_metrics=["accuracy"]).to(device)
# usually you want to reduce the lr on plateau and store the best model
callbacks = [
    ReduceLROnPlateau(monitor="val_acc", patience=5, verbose=True),
    ModelCheckpoint(str(project.checkpoint_dir / f"{time.time()}-model.pt"), save_best_only="True", verbose=True),
    EarlyStopping(monitor="val_acc", patience=10, mode='max'),
    CometCallback(experiment)
]
model.fit_generator(
    train_dl,
    val_dl,
    epochs=50,
    callbacks=callbacks,
)
# get the results on the test set
loss, test_acc = model.evaluate_generator(test_dl)
logging.info(f'test_acc=({test_acc})')
experiment.log_metric('test_acc', test_acc)