How to use the poutyne.framework.Model function in Poutyne

To help you get started, we’ve selected a few Poutyne examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github GRAAL-Research / poutyne / tests / framework / test_model.py View on Github external
self.loss_function = nn.MSELoss()
        self.optimizer = torch.optim.SGD(self.pytorch_module.parameters(), lr=1e-3)
        self.batch_metrics = [some_batch_metric_1, some_batch_metric_2]
        self.batch_metrics_names = ['some_batch_metric_1', 'some_batch_metric_2']
        self.batch_metrics_values = [some_metric_1_value, some_metric_2_value]
        self.epoch_metrics = [SomeConstantEpochMetric()]
        self.epoch_metrics_names = ['SomeConstantEpochMetric']
        self.epoch_metrics_values = [some_constant_epoch_metric_value]

        self.model = Model(self.pytorch_module,
                           self.optimizer,
                           self.loss_function,
                           batch_metrics=self.batch_metrics,
                           epoch_metrics=self.epoch_metrics)

        self.multi_input_model = Model(MultiIOModel(num_input=1, num_output=1),
                                       self.optimizer,
                                       self.loss_function,
                                       batch_metrics=self.batch_metrics,
                                       epoch_metrics=self.epoch_metrics)

        self.multi_output_model = Model(
            MultiIOModel(num_input=1, num_output=2),
            self.optimizer,
            lambda y_pred, y_true: self.loss_function(y_pred[0], y_true[0]) + self.loss_function(y_pred[1], y_true[1]),
            batch_metrics=self.batch_metrics,
            epoch_metrics=self.epoch_metrics)

        self.multi_io_model = Model(
            MultiIOModel(num_input=2, num_output=2),
            self.optimizer,
            lambda y_pred, y_true: self.loss_function(y_pred[0], y_true[0]) + self.loss_function(y_pred[1], y_true[1]),
github GRAAL-Research / poutyne / tests / framework / test_model.py View on Github external
self.multi_output_model = Model(
            MultiIOModel(num_input=1, num_output=2),
            self.optimizer,
            lambda y_pred, y_true: self.loss_function(y_pred[0], y_true[0]) + self.loss_function(y_pred[1], y_true[1]),
            batch_metrics=self.batch_metrics,
            epoch_metrics=self.epoch_metrics)

        self.multi_io_model = Model(
            MultiIOModel(num_input=2, num_output=2),
            self.optimizer,
            lambda y_pred, y_true: self.loss_function(y_pred[0], y_true[0]) + self.loss_function(y_pred[1], y_true[1]),
            batch_metrics=self.batch_metrics,
            epoch_metrics=self.epoch_metrics)

        self.dict_output_model = Model(
            DictOutputModel(),
            self.optimizer,
            lambda y_p, y_t: self.loss_function(y_p['out1'], y_t[0]) + self.loss_function(y_p['out2'], y_t[1]),
            batch_metrics=self.batch_metrics,
            epoch_metrics=self.epoch_metrics)

        self.mocked_optimizer = some_mocked_optimizer()
        self.mocked_optim_model = Model(self.pytorch_module,
                                        self.mocked_optimizer,
                                        self.loss_function,
                                        batch_metrics=self.batch_metrics,
                                        epoch_metrics=self.epoch_metrics)

        self.mock_callback = MagicMock()
github GRAAL-Research / poutyne / tests / framework / callbacks / test_delay.py View on Github external
def setUp(self):
        torch.manual_seed(42)
        self.pytorch_module = nn.Linear(1, 1)
        self.loss_function = nn.MSELoss()
        self.optimizer = torch.optim.SGD(self.pytorch_module.parameters(), lr=1e-3)
        self.model = Model(self.pytorch_module, self.optimizer, self.loss_function)
        self.mock_callback = MagicMock()
        self.delay_callback = DelayCallback(self.mock_callback)
        self.train_dict = {'loss': ANY, 'time': ANY}
        self.log_dict = {'loss': ANY, 'val_loss': ANY, 'time': ANY}
github GRAAL-Research / poutyne / tests / framework / test_model.py View on Github external
def test_metrics_integration(self):
        num_steps = 10
        model = Model(self.pytorch_module, self.optimizer, self.loss_function, batch_metrics=[F.mse_loss])
        train_generator = some_data_tensor_generator(ModelTest.batch_size)
        valid_generator = some_data_tensor_generator(ModelTest.batch_size)
        model.fit_generator(train_generator,
                            valid_generator,
                            epochs=ModelTest.epochs,
                            steps_per_epoch=ModelTest.steps_per_epoch,
                            validation_steps=ModelTest.steps_per_epoch,
                            callbacks=[self.mock_callback])
        generator = some_data_tensor_generator(ModelTest.batch_size)
        loss, mse = model.evaluate_generator(generator, steps=num_steps)
        self.assertEqual(type(loss), float)
        self.assertEqual(type(mse), float)
github GRAAL-Research / poutyne / tests / framework / callbacks / test_lr_scheduler.py View on Github external
def setUp(self):
        torch.manual_seed(42)
        self.pytorch_module = nn.Linear(1, 1)
        self.loss_function = nn.MSELoss()
        self.optimizer = torch.optim.SGD(self.pytorch_module.parameters(), lr=1e-3)
        self.model = Model(self.pytorch_module, self.optimizer, self.loss_function)
        self.train_gen = some_data_generator(20)
        self.valid_gen = some_data_generator(20)
github GRAAL-Research / poutyne / tests / framework / callbacks / test_lr_scheduler_checkpoint.py View on Github external
def setUp(self):
        torch.manual_seed(42)
        self.pytorch_module = nn.Linear(1, 1)
        self.loss_function = nn.MSELoss()
        self.optimizer = torch.optim.Adam(self.pytorch_module.parameters(), lr=1e-3)
        self.model = Model(self.pytorch_module, self.optimizer, self.loss_function)
        self.temp_dir_obj = TemporaryDirectory()
        self.checkpoint_filename = os.path.join(self.temp_dir_obj.name, 'my_checkpoint_{epoch}.optim')
github GRAAL-Research / poutyne / tests / framework / test_model.py View on Github external
def test_epoch_metrics_integration(self):
        model = Model(self.pytorch_module, self.optimizer, self.loss_function, epoch_metrics=[SomeEpochMetric()])
        train_generator = some_data_tensor_generator(ModelTest.batch_size)
        valid_generator = some_data_tensor_generator(ModelTest.batch_size)
        logs = model.fit_generator(train_generator,
                                   valid_generator,
                                   epochs=1,
                                   steps_per_epoch=ModelTest.steps_per_epoch,
                                   validation_steps=ModelTest.steps_per_epoch)
        actual_value = logs[-1]['SomeEpochMetric']
        val_actual_value = logs[-1]['val_SomeEpochMetric']
        expected_value = 5
        self.assertEqual(val_actual_value, expected_value)
        self.assertEqual(actual_value, expected_value)
github GRAAL-Research / poutyne / tests / framework / callbacks / test_checkpoint.py View on Github external
def setUp(self):
        torch.manual_seed(42)
        self.pytorch_module = nn.Linear(1, 1)
        self.loss_function = nn.MSELoss()
        self.optimizer = torch.optim.SGD(self.pytorch_module.parameters(), lr=1e-3)
        self.model = Model(self.pytorch_module, self.optimizer, self.loss_function)
        self.temp_dir_obj = TemporaryDirectory()
        self.checkpoint_filename = os.path.join(self.temp_dir_obj.name, 'my_checkpoint_{epoch}.ckpt')
github GRAAL-Research / poutyne / tests / framework / test_model.py View on Github external
self.epoch_metrics_names = ['SomeConstantEpochMetric']
        self.epoch_metrics_values = [some_constant_epoch_metric_value]

        self.model = Model(self.pytorch_module,
                           self.optimizer,
                           self.loss_function,
                           batch_metrics=self.batch_metrics,
                           epoch_metrics=self.epoch_metrics)

        self.multi_input_model = Model(MultiIOModel(num_input=1, num_output=1),
                                       self.optimizer,
                                       self.loss_function,
                                       batch_metrics=self.batch_metrics,
                                       epoch_metrics=self.epoch_metrics)

        self.multi_output_model = Model(
            MultiIOModel(num_input=1, num_output=2),
            self.optimizer,
            lambda y_pred, y_true: self.loss_function(y_pred[0], y_true[0]) + self.loss_function(y_pred[1], y_true[1]),
            batch_metrics=self.batch_metrics,
            epoch_metrics=self.epoch_metrics)

        self.multi_io_model = Model(
            MultiIOModel(num_input=2, num_output=2),
            self.optimizer,
            lambda y_pred, y_true: self.loss_function(y_pred[0], y_true[0]) + self.loss_function(y_pred[1], y_true[1]),
            batch_metrics=self.batch_metrics,
            epoch_metrics=self.epoch_metrics)

        self.dict_output_model = Model(
            DictOutputModel(),
            self.optimizer,
github GRAAL-Research / poutyne / tests / framework / callbacks / test_earlystopping.py View on Github external
def setUp(self):
        torch.manual_seed(42)
        self.pytorch_module = nn.Linear(1, 1)
        self.loss_function = nn.MSELoss()
        self.optimizer = torch.optim.SGD(self.pytorch_module.parameters(), lr=1e-3)
        self.model = Model(self.pytorch_module, self.optimizer, self.loss_function)