How to use the batchgenerators.transforms.Compose function in batchgenerators

To help you get started, we’ve selected a few batchgenerators examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github delira-dev / delira / tests / data_loading / test_numba_transforms.py View on Github external
def setUp(self) -> None:
        from delira.data_loading.numba_transform import NumbaTransform, \
            NumbaCompose
        self._basic_zoom_trafo = ZoomTransform(3)
        self._numba_zoom_trafo = NumbaTransform(ZoomTransform, zoom_factors=3)
        self._basic_pad_trafo = PadTransform(new_size=(30, 30))
        self._numba_pad_trafo = NumbaTransform(PadTransform,
                                               new_size=(30, 30))

        self._basic_compose_trafo = Compose([self._basic_pad_trafo,
                                             self._basic_zoom_trafo])
        self._numba_compose_trafo = NumbaCompose([self._basic_pad_trafo,
                                                  self._basic_zoom_trafo])

        self._input = {"data": np.random.rand(10, 1, 24, 24)}
github frankkramer-lab / MIScnn / miscnn / processing / data_augmentation.py View on Github external
angle_z=self.config_rotations_angleZ,
                                    do_scale=self.scaling,
                                    scale=self.config_scaling_range,
                                    border_mode_data='constant',
                                    border_cval_data=0,
                                    border_mode_seg='constant',
                                    border_cval_seg=0,
                                    order_data=3, order_seg=0,
                                    p_el_per_sample=self.config_p_per_sample,
                                    p_rot_per_sample=self.config_p_per_sample,
                                    p_scale_per_sample=self.config_p_per_sample,
                                    random_crop=self.cropping)
            # Append spatial transformation to transformation list
            transforms.append(aug_spatial_transform)
        # Compose the batchgenerators transforms
        all_transforms = Compose(transforms)
        # Assemble transforms into a augmentation generator
        augmentation_generator = SingleThreadedAugmenter(data_generator,
                                                         all_transforms)
        # Perform the data augmentation x times (x = cycles)
        aug_img_data = None
        aug_seg_data = None
        for i in range(0, self.cycles):
            # Run the computation process for the data augmentations
            augmentation = next(augmentation_generator)
            # Access augmentated data from the batchgenerators data structure
            if aug_img_data is None and aug_seg_data is None:
                aug_img_data = augmentation["data"]
                aug_seg_data = augmentation[seg_label]
            # Concatenate the new data augmentated data with the cached data
            else:
                aug_img_data = np.concatenate((augmentation["data"],
github jenspetersen / probabilistic-unet / probunet / experiment / probabilistic_unet_segmentation.py View on Github external
test_data = self.data_val
        else:
            test_data = self.data_test

        generator = self.config.generator_val(
            test_data, self.config.batch_size_val, 3,
            number_of_threads_in_multithreaded=self.config.augmenter_val_kwargs.num_processes)
        transforms = []
        for t in sorted(self.config.transforms_val.keys()):
            if self.config.transforms_val[t]["active"]:
                cls = self.config.transforms_val[t]["type"]
                kwargs = self.config.transforms_val[t]["kwargs"]
                transforms.append(cls(**kwargs))

        augmenter = self.config.augmenter_val(generator,
                                              Compose(transforms),
                                              **self.config.augmenter_val_kwargs)
        test_scores, info = self.test_inner(augmenter, [], info, future=True)
        test_scores = np.array(test_scores)

        self.elog.save_numpy_data(test_scores, "test_future.npy")
        self.elog.save_dict(info, "test_future.json")
github MIC-DKFZ / basic_unet_example / datasets / three_dim / data_augmentation.py View on Github external
border_mode_data="nearest", border_mode_seg="nearest"),
                          ]

    elif mode == "val":
        transform_list = [CenterCropTransform(crop_size=target_size),
                          ResizeTransform(target_size=target_size, order=1),
                          ]

    elif mode == "test":
        transform_list = [CenterCropTransform(crop_size=target_size),
                          ResizeTransform(target_size=target_size, order=1),
                          ]

    transform_list.append(NumpyToTensor())

    return Compose(transform_list)
github frankkramer-lab / MIScnn / miscnn / preprocessing / data_augmentation.py View on Github external
angle_z=self.config_rotations_angleZ,
                                    do_scale=self.scaling,
                                    scale=self.config_scaling_range,
                                    border_mode_data='constant',
                                    border_cval_data=0,
                                    border_mode_seg='constant',
                                    border_cval_seg=0,
                                    order_data=3, order_seg=0,
                                    p_el_per_sample=self.config_p_per_sample,
                                    p_rot_per_sample=self.config_p_per_sample,
                                    p_scale_per_sample=self.config_p_per_sample,
                                    random_crop=self.cropping)
            # Append spatial transformation to transformation list
            transforms.append(aug_spatial_transform)
        # Compose the batchgenerators transforms
        all_transforms = Compose(transforms)
        # Assemble transforms into a augmentation generator
        augmentation_generator = MultiThreadedAugmenter(data_generator,
                                                        all_transforms,
                                                        self.config_mta_workers,
                                                        self.config_mta_queue,
                                                        seeds=None)
        # Perform the data augmentation x times (x = cycles)
        aug_img_data = None
        aug_seg_data = None
        for i in range(0, self.cycles):
            # Run the computation process for the data augmentations
            augmentation = next(augmentation_generator)
            # Access augmentated data from the batchgenerators data structure
            if aug_img_data is None and aug_seg_data is None:
                aug_img_data = augmentation["data"]
                aug_seg_data = augmentation["seg"]
github delira-dev / delira / delira / data_loading / numba_transform.py View on Github external
self._transform = transform

    def __call__(self, **kwargs):
        return self._transform(**kwargs)


class NumbaTransform(NumbaTransformWrapper):
    def __init__(self, transform_cls, nopython=True, target="cpu",
                 parallel=False, **kwargs):
        trafo = transform_cls(**kwargs)

        super().__init__(trafo, nopython=nopython, target=target,
                         parallel=parallel)


class NumbaCompose(Compose):
    def __init__(self, transforms):
        super().__init__(transforms=[NumbaTransformWrapper(trafo)
                                     for trafo in transforms])
github MIC-DKFZ / basic_unet_example / datasets / two_dim / data_augmentation.py View on Github external
]


    elif mode == "val":
        tranform_list = [# CenterCropTransform(crop_size=target_size),
                         ResizeTransform(target_size=target_size, order=1),
                         ]

    elif mode == "test":
        tranform_list = [# CenterCropTransform(crop_size=target_size),
                         ResizeTransform(target_size=target_size, order=1),
                         ]

    tranform_list.append(NumpyToTensor())

    return Compose(tranform_list)